From ee3cc11536213fd003d6ecd89254f4187dbca13a Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Mon, 15 Jun 2020 00:13:09 -0700 Subject: [PATCH 01/79] Internal change. PiperOrigin-RevId: 316409253 --- official/core/base_task.py | 43 +++++++++++++++++++++++++++++++++----- 1 file changed, 38 insertions(+), 5 deletions(-) diff --git a/official/core/base_task.py b/official/core/base_task.py index eed53f91e..2d02afbfa 100644 --- a/official/core/base_task.py +++ b/official/core/base_task.py @@ -250,11 +250,44 @@ _REGISTERED_TASK_CLS = {} # TODO(b/158268740): Move these outside the base class file. -def register_task_cls(task_config: cfg.TaskConfig) -> Task: - """Register ExperimentConfig factory method.""" - return registry.register(_REGISTERED_TASK_CLS, task_config) +# TODO(b/158741360): Add type annotations once pytype checks across modules. +def register_task_cls(task_config_cls): + """Decorates a factory of Tasks for lookup by a subclass of TaskConfig. + This decorator supports registration of tasks as follows: -def get_task_cls(task_config: cfg.TaskConfig) -> Task: - task_cls = registry.lookup(_REGISTERED_TASK_CLS, task_config) + ``` + @dataclasses.dataclass + class MyTaskConfig(TaskConfig): + # Add fields here. + pass + + @register_task_cls(MyTaskConfig) + class MyTask(Task): + # Inherits def __init__(self, task_config). + pass + + my_task_config = MyTaskConfig() + my_task = get_task(my_task_config) # Returns MyTask(my_task_config). + ``` + + Besisdes a class itself, other callables that create a Task from a TaskConfig + can be decorated by the result of this function, as long as there is at most + one registration for each config class. + + Args: + task_config_cls: a subclass of TaskConfig (*not* an instance of TaskConfig). + Each task_config_cls can only be used for a single registration. + + Returns: + A callable for use as class decorator that registers the decorated class + for creation from an instance of task_config_cls. + """ + return registry.register(_REGISTERED_TASK_CLS, task_config_cls) + + +# The user-visible get_task() is defined after classes have been registered. +# TODO(b/158741360): Add type annotations once pytype checks across modules. +def get_task_cls(task_config_cls): + task_cls = registry.lookup(_REGISTERED_TASK_CLS, task_config_cls) return task_cls -- GitLab From 57c08e2f98849a92311b17c7e1d670bdd7ecc396 Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Mon, 15 Jun 2020 11:54:05 -0700 Subject: [PATCH 02/79] Make function argument names consistent in core.base_task.Task PiperOrigin-RevId: 316513485 --- official/core/base_task.py | 22 ++++++++++----------- official/nlp/tasks/masked_lm.py | 24 +++++++++++------------ official/nlp/tasks/sentence_prediction.py | 11 +++++------ 3 files changed, 28 insertions(+), 29 deletions(-) diff --git a/official/core/base_task.py b/official/core/base_task.py index 2d02afbfa..3dff9e087 100644 --- a/official/core/base_task.py +++ b/official/core/base_task.py @@ -114,18 +114,18 @@ class Task(tf.Module): """ pass - def build_losses(self, features, model_outputs, aux_losses=None) -> tf.Tensor: + def build_losses(self, labels, model_outputs, aux_losses=None) -> tf.Tensor: """Standard interface to compute losses. Args: - features: optional feature/labels tensors. + labels: optional label tensors. model_outputs: a nested structure of output tensors. aux_losses: auxiliarly loss tensors, i.e. `losses` in keras.Model. Returns: The total loss tensor. """ - del model_outputs, features + del model_outputs, labels if aux_losses is None: losses = [tf.constant(0.0, dtype=tf.float32)] @@ -139,29 +139,29 @@ class Task(tf.Module): del training return [] - def process_metrics(self, metrics, labels, outputs): + def process_metrics(self, metrics, labels, model_outputs): """Process and update metrics. Called when using custom training loop API. Args: metrics: a nested structure of metrics objects. The return of function self.build_metrics. labels: a tensor or a nested structure of tensors. - outputs: a tensor or a nested structure of tensors. + model_outputs: a tensor or a nested structure of tensors. For example, output of the keras model built by self.build_model. """ for metric in metrics: - metric.update_state(labels, outputs) + metric.update_state(labels, model_outputs) - def process_compiled_metrics(self, compiled_metrics, labels, outputs): + def process_compiled_metrics(self, compiled_metrics, labels, model_outputs): """Process and update compiled_metrics. call when using compile/fit API. Args: compiled_metrics: the compiled metrics (model.compiled_metrics). labels: a tensor or a nested structure of tensors. - outputs: a tensor or a nested structure of tensors. + model_outputs: a tensor or a nested structure of tensors. For example, output of the keras model built by self.build_model. """ - compiled_metrics.update_state(labels, outputs) + compiled_metrics.update_state(labels, model_outputs) def train_step(self, inputs, @@ -187,7 +187,7 @@ class Task(tf.Module): outputs = model(features, training=True) # Computes per-replica loss. loss = self.build_losses( - features=labels, model_outputs=outputs, aux_losses=model.losses) + labels=labels, model_outputs=outputs, aux_losses=model.losses) # Scales loss as the default gradients allreduce performs sum inside the # optimizer. scaled_loss = loss / tf.distribute.get_strategy().num_replicas_in_sync @@ -231,7 +231,7 @@ class Task(tf.Module): features, labels = inputs, inputs outputs = self.inference_step(features, model) loss = self.build_losses( - features=labels, model_outputs=outputs, aux_losses=model.losses) + labels=labels, model_outputs=outputs, aux_losses=model.losses) logs = {self.loss: loss} if metrics: self.process_metrics(metrics, labels, outputs) diff --git a/official/nlp/tasks/masked_lm.py b/official/nlp/tasks/masked_lm.py index 2445ae1f1..1679c09a0 100644 --- a/official/nlp/tasks/masked_lm.py +++ b/official/nlp/tasks/masked_lm.py @@ -43,25 +43,25 @@ class MaskedLMTask(base_task.Task): return bert.instantiate_from_cfg(self.task_config.network) def build_losses(self, - features, + labels, model_outputs, metrics, aux_losses=None) -> tf.Tensor: metrics = dict([(metric.name, metric) for metric in metrics]) lm_output = tf.nn.log_softmax(model_outputs['lm_output'], axis=-1) mlm_loss = loss_lib.weighted_sparse_categorical_crossentropy_loss( - labels=features['masked_lm_ids'], + labels=labels['masked_lm_ids'], predictions=lm_output, - weights=features['masked_lm_weights']) + weights=labels['masked_lm_weights']) metrics['lm_example_loss'].update_state(mlm_loss) - if 'next_sentence_labels' in features: + if 'next_sentence_labels' in labels: policy = tf.keras.mixed_precision.experimental.global_policy() if policy.name == 'mixed_bfloat16': # b/158514794: bf16 is not stable. policy = tf.float32 predictions = tf.keras.layers.Activation( tf.nn.log_softmax, dtype=policy)(model_outputs['next_sentence']) - sentence_labels = features['next_sentence_labels'] + sentence_labels = labels['next_sentence_labels'] sentence_loss = loss_lib.weighted_sparse_categorical_crossentropy_loss( labels=sentence_labels, predictions=predictions) @@ -112,15 +112,15 @@ class MaskedLMTask(base_task.Task): metrics.append(tf.keras.metrics.Mean(name='next_sentence_loss')) return metrics - def process_metrics(self, metrics, inputs, outputs): + def process_metrics(self, metrics, labels, model_outputs): metrics = dict([(metric.name, metric) for metric in metrics]) if 'masked_lm_accuracy' in metrics: - metrics['masked_lm_accuracy'].update_state(inputs['masked_lm_ids'], - outputs['lm_output'], - inputs['masked_lm_weights']) + metrics['masked_lm_accuracy'].update_state(labels['masked_lm_ids'], + model_outputs['lm_output'], + labels['masked_lm_weights']) if 'next_sentence_accuracy' in metrics: metrics['next_sentence_accuracy'].update_state( - inputs['next_sentence_labels'], outputs['next_sentence']) + labels['next_sentence_labels'], model_outputs['next_sentence']) def train_step(self, inputs, model: tf.keras.Model, optimizer: tf.keras.optimizers.Optimizer, metrics): @@ -139,7 +139,7 @@ class MaskedLMTask(base_task.Task): outputs = model(inputs, training=True) # Computes per-replica loss. loss = self.build_losses( - features=inputs, + labels=inputs, model_outputs=outputs, metrics=metrics, aux_losses=model.losses) @@ -166,7 +166,7 @@ class MaskedLMTask(base_task.Task): """ outputs = self.inference_step(inputs, model) loss = self.build_losses( - features=inputs, + labels=inputs, model_outputs=outputs, metrics=metrics, aux_losses=model.losses) diff --git a/official/nlp/tasks/sentence_prediction.py b/official/nlp/tasks/sentence_prediction.py index b17f645a3..a7d9193b1 100644 --- a/official/nlp/tasks/sentence_prediction.py +++ b/official/nlp/tasks/sentence_prediction.py @@ -79,8 +79,7 @@ class SentencePredictionTask(base_task.Task): else: return bert.instantiate_from_cfg(self.task_config.network) - def build_losses(self, features, model_outputs, aux_losses=None) -> tf.Tensor: - labels = features + def build_losses(self, labels, model_outputs, aux_losses=None) -> tf.Tensor: loss = loss_lib.weighted_sparse_categorical_crossentropy_loss( labels=labels, predictions=tf.nn.log_softmax(model_outputs['sentence_prediction'], @@ -118,12 +117,12 @@ class SentencePredictionTask(base_task.Task): ] return metrics - def process_metrics(self, metrics, labels, outputs): + def process_metrics(self, metrics, labels, model_outputs): for metric in metrics: - metric.update_state(labels, outputs['sentence_prediction']) + metric.update_state(labels, model_outputs['sentence_prediction']) - def process_compiled_metrics(self, compiled_metrics, labels, outputs): - compiled_metrics.update_state(labels, outputs['sentence_prediction']) + def process_compiled_metrics(self, compiled_metrics, labels, model_outputs): + compiled_metrics.update_state(labels, model_outputs['sentence_prediction']) def initialize(self, model): """Load a pretrained checkpoint (if exists) and then train from iter 0.""" -- GitLab From 48b6d1d11bd727f8a68c52b2662bcad65fb38aa7 Mon Sep 17 00:00:00 2001 From: Dan Anghel Date: Mon, 15 Jun 2020 14:56:27 -0700 Subject: [PATCH 03/79] Push to Github of TF2 changes to DELF package (#8678) * First version of working script to download the GLDv2 dataset * First version of the DEFL package installation script * First working version of the DELF package installation script * Fixed feedback from PR review * Push to Github of changes to the TFRecord data generation script for DELF. * Merged commit includes the following changes: 315363544 by Andre Araujo: Added the generation of TRAIN and VALIDATE splits from the train dataset. -- 314676530 by Andre Araujo: Updated script to download GLDv2 images for DELF training. -- 314101235 by Andre Araujo: Added newly created module 'utils' to the copybara script. -- 313677085 by Andre Araujo: Code migration from TF1 to TF2 for: - logging (replaced usage of tf.compat.v1.logging.info) - testing directories (replaced usage of tf.compat.v1.test.get_temp_dir()) - feature/object extraction scripts (replaced usage of tf.compat.v1.train.string_input_producer and tf.compat.v1.train.start_queue_runners with PIL) -- 312770828 by Andre Araujo: Internal change. -- PiperOrigin-RevId: 315363544 * First version of the updated README of the DELF training instructions * Added to the README the section describing the generation of the training data * Added warning about the TFRecord generation time * Updated the launch of the training * Minor README update * Integrated review feedback * Merged commit includes the following changes: 315971979 by Andre Araujo: Performance optimization in generating the TRAIN and VALIDATION splits per label. -- 315578370 by Andre Araujo: Tiny fix to char limit in extractor.py. -- 315546242 by Andre Araujo: Script to measure DELG latency. -- 315545801 by Andre Araujo: Pre-load PCA parameters, if using them when extracting DELF/G features. -- 315450392 by Andre Araujo: Code migration from TF1 to TF2 for: - loading the models using in extractor.py and detector.py using tf.saved_model.load - removed tf.compat.v1.Session for the extractor and detector model usage -- 315406342 by Andre Araujo: Internal change. -- PiperOrigin-RevId: 315971979 * Merged commit includes the following changes: 316538447 by Andre Araujo: Read the number of classes from the GLDv2 dataset metadata. -- 316416973 by Andre Araujo: Migration of DELF code to TF2: - removed tf.compat.v1.test.get_temp_dir() with FLAGS.test_tmpdir - removed delf_v1.py and its dependencies - removed tf.compat.v1, Session, Graph dependencies from feature_extractor.py, feature_aggregation_extractor.py and aggregation_extraction.py -- PiperOrigin-RevId: 316538447 Co-authored-by: Andre Araujo --- research/delf/delf/python/box_io_test.py | 9 +- research/delf/delf/python/datum_io_test.py | 9 +- research/delf/delf/python/delf_v1.py | 397 ------------------ .../aggregation_extraction.py | 135 +++--- .../python/feature_aggregation_extractor.py | 130 +++--- .../feature_aggregation_extractor_test.py | 148 +++---- .../delf/delf/python/feature_extractor.py | 246 +---------- .../delf/python/feature_extractor_test.py | 67 +-- research/delf/delf/python/feature_io_test.py | 9 +- .../training/datasets/googlelandmarks.py | 18 + research/delf/delf/python/training/train.py | 10 +- 11 files changed, 256 insertions(+), 922 deletions(-) delete mode 100644 research/delf/delf/python/delf_v1.py diff --git a/research/delf/delf/python/box_io_test.py b/research/delf/delf/python/box_io_test.py index b733fc9cc..c659185da 100644 --- a/research/delf/delf/python/box_io_test.py +++ b/research/delf/delf/python/box_io_test.py @@ -20,11 +20,14 @@ from __future__ import print_function import os +from absl import flags import numpy as np import tensorflow as tf from delf import box_io +FLAGS = flags.FLAGS + class BoxesIoTest(tf.test.TestCase): @@ -57,8 +60,7 @@ class BoxesIoTest(tf.test.TestCase): def testWriteAndReadToFile(self): boxes, scores, class_indices = self._create_data() - tmpdir = tf.compat.v1.test.get_temp_dir() - filename = os.path.join(tmpdir, 'test.boxes') + filename = os.path.join(FLAGS.test_tmpdir, 'test.boxes') box_io.WriteToFile(filename, boxes, scores, class_indices) data_read = box_io.ReadFromFile(filename) @@ -67,8 +69,7 @@ class BoxesIoTest(tf.test.TestCase): self.assertAllEqual(class_indices, data_read[2]) def testWriteAndReadToFileEmptyFile(self): - tmpdir = tf.compat.v1.test.get_temp_dir() - filename = os.path.join(tmpdir, 'test.box') + filename = os.path.join(FLAGS.test_tmpdir, 'test.box') box_io.WriteToFile(filename, np.array([]), np.array([]), np.array([])) data_read = box_io.ReadFromFile(filename) diff --git a/research/delf/delf/python/datum_io_test.py b/research/delf/delf/python/datum_io_test.py index 00a94936f..f3587a100 100644 --- a/research/delf/delf/python/datum_io_test.py +++ b/research/delf/delf/python/datum_io_test.py @@ -20,11 +20,14 @@ from __future__ import print_function import os +from absl import flags import numpy as np import tensorflow as tf from delf import datum_io +FLAGS = flags.FLAGS + class DatumIoTest(tf.test.TestCase): @@ -69,8 +72,7 @@ class DatumIoTest(tf.test.TestCase): def testWriteAndReadToFile(self): data = np.array([[[-1.0, 125.0, -2.5], [14.5, 3.5, 0.0]], [[20.0, 0.0, 30.0], [25.5, 36.0, 42.0]]]) - tmpdir = tf.compat.v1.test.get_temp_dir() - filename = os.path.join(tmpdir, 'test.datum') + filename = os.path.join(FLAGS.test_tmpdir, 'test.datum') datum_io.WriteToFile(data, filename) data_read = datum_io.ReadFromFile(filename) self.assertAllEqual(data_read, data) @@ -84,8 +86,7 @@ class DatumIoTest(tf.test.TestCase): data_2 = np.array( [[[255, 0, 5], [10, 300, 0]], [[20, 1, 100], [255, 360, 420]]], dtype='uint32') - tmpdir = tf.compat.v1.test.get_temp_dir() - filename = os.path.join(tmpdir, 'test.datum_pair') + filename = os.path.join(FLAGS.test_tmpdir, 'test.datum_pair') datum_io.WritePairToFile(data_1, data_2, filename) data_read_1, data_read_2 = datum_io.ReadPairFromFile(filename) self.assertAllEqual(data_read_1, data_1) diff --git a/research/delf/delf/python/delf_v1.py b/research/delf/delf/python/delf_v1.py deleted file mode 100644 index 4fdfcaeb7..000000000 --- a/research/delf/delf/python/delf_v1.py +++ /dev/null @@ -1,397 +0,0 @@ -# Copyright 2017 The TensorFlow Authors All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""DELF model implementation based on the following paper. - - Large-Scale Image Retrieval with Attentive Deep Local Features - https://arxiv.org/abs/1612.06321 - -Please refer to the README.md file for detailed explanations on using the DELF -model. -""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import tensorflow as tf -from tf_slim import layers -from tf_slim.nets import resnet_v1 -from tf_slim.ops.arg_scope import arg_scope - -_SUPPORTED_TARGET_LAYER = ['resnet_v1_50/block3', 'resnet_v1_50/block4'] - -# The variable scope for the attention portion of the model. -_ATTENTION_VARIABLE_SCOPE = 'attention_block' - -# The attention_type determines whether the attention based feature aggregation -# is performed on the L2-normalized feature map or on the default feature map -# where L2-normalization is not applied. Note that in both cases, attention -# functions are built on the un-normalized feature map. This is only relevant -# for the training stage. -# Currently supported options are as follows: -# * use_l2_normalized_feature: -# The option use_l2_normalized_feature first applies L2-normalization on the -# feature map and then applies attention based feature aggregation. This -# option is used for the DELF+FT+Att model in the paper. -# * use_default_input_feature: -# The option use_default_input_feature aggregates unnormalized feature map -# directly. -_SUPPORTED_ATTENTION_TYPES = [ - 'use_l2_normalized_feature', 'use_default_input_feature' -] - -# Supported types of non-lineary for the attention score function. -_SUPPORTED_ATTENTION_NONLINEARITY = ['softplus'] - - -class DelfV1(object): - """Creates a DELF model. - - Args: - target_layer_type: The name of target CNN architecture and its layer. - - Raises: - ValueError: If an unknown target_layer_type is provided. - """ - - def __init__(self, target_layer_type=_SUPPORTED_TARGET_LAYER[0]): - print('Creating model %s ' % target_layer_type) - - self._target_layer_type = target_layer_type - if self._target_layer_type not in _SUPPORTED_TARGET_LAYER: - raise ValueError('Unknown model type.') - - @property - def target_layer_type(self): - return self._target_layer_type - - def _PerformAttention(self, - attention_feature_map, - feature_map, - attention_nonlinear, - kernel=1): - """Helper function to construct the attention part of the model. - - Computes attention score map and aggregates the input feature map based on - the attention score map. - - Args: - attention_feature_map: Potentially normalized feature map that will be - aggregated with attention score map. - feature_map: Unnormalized feature map that will be used to compute - attention score map. - attention_nonlinear: Type of non-linearity that will be applied to - attention value. - kernel: Convolutional kernel to use in attention layers (eg: 1, [3, 3]). - - Returns: - attention_feat: Aggregated feature vector. - attention_prob: Attention score map after the non-linearity. - attention_score: Attention score map before the non-linearity. - - Raises: - ValueError: If unknown attention non-linearity type is provided. - """ - with tf.compat.v1.variable_scope( - 'attention', values=[attention_feature_map, feature_map]): - with tf.compat.v1.variable_scope('compute', values=[feature_map]): - activation_fn_conv1 = tf.nn.relu - feature_map_conv1 = layers.conv2d( - feature_map, - 512, - kernel, - rate=1, - activation_fn=activation_fn_conv1, - scope='conv1') - - attention_score = layers.conv2d( - feature_map_conv1, - 1, - kernel, - rate=1, - activation_fn=None, - normalizer_fn=None, - scope='conv2') - - # Set activation of conv2 layer of attention model. - with tf.compat.v1.variable_scope( - 'merge', values=[attention_feature_map, attention_score]): - if attention_nonlinear not in _SUPPORTED_ATTENTION_NONLINEARITY: - raise ValueError('Unknown attention non-linearity.') - if attention_nonlinear == 'softplus': - with tf.compat.v1.variable_scope( - 'softplus_attention', - values=[attention_feature_map, attention_score]): - attention_prob = tf.nn.softplus(attention_score) - attention_feat = tf.reduce_mean( - tf.multiply(attention_feature_map, attention_prob), [1, 2]) - attention_feat = tf.expand_dims(tf.expand_dims(attention_feat, 1), 2) - return attention_feat, attention_prob, attention_score - - def _GetAttentionSubnetwork( - self, - feature_map, - end_points, - attention_nonlinear=_SUPPORTED_ATTENTION_NONLINEARITY[0], - attention_type=_SUPPORTED_ATTENTION_TYPES[0], - kernel=1, - reuse=False): - """Constructs the part of the model performing attention. - - Args: - feature_map: A tensor of size [batch, height, width, channels]. Usually it - corresponds to the output feature map of a fully-convolutional network. - end_points: Set of activations of the network constructed so far. - attention_nonlinear: Type of non-linearity on top of the attention - function. - attention_type: Type of the attention structure. - kernel: Convolutional kernel to use in attention layers (eg, [3, 3]). - reuse: Whether or not the layer and its variables should be reused. - - Returns: - prelogits: A tensor of size [batch, 1, 1, channels]. - attention_prob: Attention score after the non-linearity. - attention_score: Attention score before the non-linearity. - end_points: Updated set of activations, for external use. - Raises: - ValueError: If unknown attention_type is provided. - """ - with tf.compat.v1.variable_scope( - _ATTENTION_VARIABLE_SCOPE, - values=[feature_map, end_points], - reuse=reuse): - if attention_type not in _SUPPORTED_ATTENTION_TYPES: - raise ValueError('Unknown attention_type.') - if attention_type == 'use_l2_normalized_feature': - attention_feature_map = tf.nn.l2_normalize( - feature_map, 3, name='l2_normalize') - elif attention_type == 'use_default_input_feature': - attention_feature_map = feature_map - end_points['attention_feature_map'] = attention_feature_map - - attention_outputs = self._PerformAttention(attention_feature_map, - feature_map, - attention_nonlinear, kernel) - prelogits, attention_prob, attention_score = attention_outputs - end_points['prelogits'] = prelogits - end_points['attention_prob'] = attention_prob - end_points['attention_score'] = attention_score - return prelogits, attention_prob, attention_score, end_points - - def GetResnet50Subnetwork(self, - images, - is_training=False, - global_pool=False, - reuse=None): - """Constructs resnet_v1_50 part of the DELF model. - - Args: - images: A tensor of size [batch, height, width, channels]. - is_training: Whether or not the model is in training mode. - global_pool: If True, perform global average pooling after feature - extraction. This may be useful for DELF's descriptor fine-tuning stage. - reuse: Whether or not the layer and its variables should be reused. - - Returns: - net: A rank-4 tensor of size [batch, height_out, width_out, channels_out]. - If global_pool is True, height_out = width_out = 1. - end_points: A set of activations for external use. - """ - block = resnet_v1.resnet_v1_block - blocks = [ - block('block1', base_depth=64, num_units=3, stride=2), - block('block2', base_depth=128, num_units=4, stride=2), - block('block3', base_depth=256, num_units=6, stride=2), - ] - if self._target_layer_type == 'resnet_v1_50/block4': - blocks.append(block('block4', base_depth=512, num_units=3, stride=1)) - net, end_points = resnet_v1.resnet_v1( - images, - blocks, - is_training=is_training, - global_pool=global_pool, - reuse=reuse, - scope='resnet_v1_50') - return net, end_points - - def GetAttentionPrelogit( - self, - images, - weight_decay=0.0001, - attention_nonlinear=_SUPPORTED_ATTENTION_NONLINEARITY[0], - attention_type=_SUPPORTED_ATTENTION_TYPES[0], - kernel=1, - training_resnet=False, - training_attention=False, - reuse=False, - use_batch_norm=True): - """Constructs attention model on resnet_v1_50. - - Args: - images: A tensor of size [batch, height, width, channels]. - weight_decay: The parameters for weight_decay regularizer. - attention_nonlinear: Type of non-linearity on top of the attention - function. - attention_type: Type of the attention structure. - kernel: Convolutional kernel to use in attention layers (eg, [3, 3]). - training_resnet: Whether or not the Resnet blocks from the model are in - training mode. - training_attention: Whether or not the attention part of the model is in - training mode. - reuse: Whether or not the layer and its variables should be reused. - use_batch_norm: Whether or not to use batch normalization. - - Returns: - prelogits: A tensor of size [batch, 1, 1, channels]. - attention_prob: Attention score after the non-linearity. - attention_score: Attention score before the non-linearity. - feature_map: Features extracted from the model, which are not - l2-normalized. - end_points: Set of activations for external use. - """ - # Construct Resnet50 features. - with arg_scope(resnet_v1.resnet_arg_scope(use_batch_norm=use_batch_norm)): - _, end_points = self.GetResnet50Subnetwork( - images, is_training=training_resnet, reuse=reuse) - - feature_map = end_points[self._target_layer_type] - - # Construct attention subnetwork on top of features. - with arg_scope( - resnet_v1.resnet_arg_scope( - weight_decay=weight_decay, use_batch_norm=use_batch_norm)): - with arg_scope([layers.batch_norm], is_training=training_attention): - (prelogits, attention_prob, attention_score, - end_points) = self._GetAttentionSubnetwork( - feature_map, - end_points, - attention_nonlinear=attention_nonlinear, - attention_type=attention_type, - kernel=kernel, - reuse=reuse) - - return prelogits, attention_prob, attention_score, feature_map, end_points - - def _GetAttentionModel( - self, - images, - num_classes, - weight_decay=0.0001, - attention_nonlinear=_SUPPORTED_ATTENTION_NONLINEARITY[0], - attention_type=_SUPPORTED_ATTENTION_TYPES[0], - kernel=1, - training_resnet=False, - training_attention=False, - reuse=False): - """Constructs attention model on resnet_v1_50. - - Args: - images: A tensor of size [batch, height, width, channels] - num_classes: The number of output classes. - weight_decay: The parameters for weight_decay regularizer. - attention_nonlinear: Type of non-linearity on top of the attention - function. - attention_type: Type of the attention structure. - kernel: Convolutional kernel to use in attention layers (eg, [3, 3]). - training_resnet: Whether or not the Resnet blocks from the model are in - training mode. - training_attention: Whether or not the attention part of the model is in - training mode. - reuse: Whether or not the layer and its variables should be reused. - - Returns: - logits: A tensor of size [batch, num_classes]. - attention_prob: Attention score after the non-linearity. - attention_score: Attention score before the non-linearity. - feature_map: Features extracted from the model, which are not - l2-normalized. - """ - - attention_feat, attention_prob, attention_score, feature_map, _ = ( - self.GetAttentionPrelogit( - images, - weight_decay, - attention_nonlinear=attention_nonlinear, - attention_type=attention_type, - kernel=kernel, - training_resnet=training_resnet, - training_attention=training_attention, - reuse=reuse)) - with arg_scope( - resnet_v1.resnet_arg_scope( - weight_decay=weight_decay, batch_norm_scale=True)): - with arg_scope([layers.batch_norm], is_training=training_attention): - with tf.compat.v1.variable_scope( - _ATTENTION_VARIABLE_SCOPE, values=[attention_feat], reuse=reuse): - logits = layers.conv2d( - attention_feat, - num_classes, [1, 1], - activation_fn=None, - normalizer_fn=None, - scope='logits') - logits = tf.squeeze(logits, [1, 2], name='spatial_squeeze') - return logits, attention_prob, attention_score, feature_map - - def AttentionModel(self, - images, - num_classes, - weight_decay=0.0001, - attention_nonlinear=_SUPPORTED_ATTENTION_NONLINEARITY[0], - attention_type=_SUPPORTED_ATTENTION_TYPES[0], - kernel=1, - training_resnet=False, - training_attention=False, - reuse=False): - """Constructs attention based classification model for training. - - Args: - images: A tensor of size [batch, height, width, channels] - num_classes: The number of output classes. - weight_decay: The parameters for weight_decay regularizer. - attention_nonlinear: Type of non-linearity on top of the attention - function. - attention_type: Type of the attention structure. - kernel: Convolutional kernel to use in attention layers (eg, [3, 3]). - training_resnet: Whether or not the Resnet blocks from the model are in - training mode. - training_attention: Whether or not the model is in training mode. Note - that this function only supports training the attention part of the - model, ie, the feature extraction layers are not trained. - reuse: Whether or not the layer and its variables should be reused. - - Returns: - logit: A tensor of size [batch, num_classes] - attention: Attention score after the non-linearity. - feature_map: Features extracted from the model, which are not - l2-normalized. - - Raises: - ValueError: If unknown target_layer_type is provided. - """ - if 'resnet_v1_50' in self._target_layer_type: - net_outputs = self._GetAttentionModel( - images, - num_classes, - weight_decay, - attention_nonlinear=attention_nonlinear, - attention_type=attention_type, - kernel=kernel, - training_resnet=training_resnet, - training_attention=training_attention, - reuse=reuse) - logits, attention, _, feature_map = net_outputs - else: - raise ValueError('Unknown target_layer_type.') - return logits, attention, feature_map diff --git a/research/delf/delf/python/detect_to_retrieve/aggregation_extraction.py b/research/delf/delf/python/detect_to_retrieve/aggregation_extraction.py index af85e27dc..4ddab944b 100644 --- a/research/delf/delf/python/detect_to_retrieve/aggregation_extraction.py +++ b/research/delf/delf/python/detect_to_retrieve/aggregation_extraction.py @@ -124,71 +124,70 @@ def ExtractAggregatedRepresentationsToFiles(image_names, features_dir, if not tf.io.gfile.exists(output_aggregation_dir): tf.io.gfile.makedirs(output_aggregation_dir) - with tf.compat.v1.Session() as sess: - extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( - sess, config) - - start = time.clock() - for i in range(num_images): - if i == 0: - print('Starting to extract aggregation from images...') - elif i % _STATUS_CHECK_ITERATIONS == 0: - elapsed = (time.clock() - start) - print('Processing image %d out of %d, last %d ' - 'images took %f seconds' % - (i, num_images, _STATUS_CHECK_ITERATIONS, elapsed)) - start = time.clock() - - image_name = image_names[i] - - # Compose output file name, skip extraction for this image if it already - # exists. - output_aggregation_filename = os.path.join(output_aggregation_dir, - image_name + output_extension) - if tf.io.gfile.exists(output_aggregation_filename): - print('Skipping %s' % image_name) - continue - - # Load DELF features. - if config.use_regional_aggregation: - if not mapping_path: - raise ValueError( - 'Requested regional aggregation, but mapping_path was not ' - 'provided') - descriptors_list = [] - num_features_per_box = [] - for box_feature_file in images_to_box_feature_files[image_name]: - delf_filename = os.path.join(features_dir, - box_feature_file + _DELF_EXTENSION) - _, _, box_descriptors, _, _ = feature_io.ReadFromFile(delf_filename) - # If `box_descriptors` is empty, reshape it such that it can be - # concatenated with other descriptors. - if not box_descriptors.shape[0]: - box_descriptors = np.reshape(box_descriptors, - [0, config.feature_dimensionality]) - descriptors_list.append(box_descriptors) - num_features_per_box.append(box_descriptors.shape[0]) - - descriptors = np.concatenate(descriptors_list) - else: - input_delf_filename = os.path.join(features_dir, - image_name + _DELF_EXTENSION) - _, _, descriptors, _, _ = feature_io.ReadFromFile(input_delf_filename) - # If `descriptors` is empty, reshape it to avoid extraction failure. - if not descriptors.shape[0]: - descriptors = np.reshape(descriptors, - [0, config.feature_dimensionality]) - num_features_per_box = None - - # Extract and save aggregation. If using VLAD, only - # `aggregated_descriptors` needs to be saved. - (aggregated_descriptors, - feature_visual_words) = extractor.Extract(descriptors, - num_features_per_box) - if config.aggregation_type == _VLAD: - datum_io.WriteToFile(aggregated_descriptors, - output_aggregation_filename) - else: - datum_io.WritePairToFile(aggregated_descriptors, - feature_visual_words.astype('uint32'), - output_aggregation_filename) + extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( + config) + + start = time.time() + for i in range(num_images): + if i == 0: + print('Starting to extract aggregation from images...') + elif i % _STATUS_CHECK_ITERATIONS == 0: + elapsed = (time.time() - start) + print('Processing image %d out of %d, last %d ' + 'images took %f seconds' % + (i, num_images, _STATUS_CHECK_ITERATIONS, elapsed)) + start = time.time() + + image_name = image_names[i] + + # Compose output file name, skip extraction for this image if it already + # exists. + output_aggregation_filename = os.path.join(output_aggregation_dir, + image_name + output_extension) + if tf.io.gfile.exists(output_aggregation_filename): + print('Skipping %s' % image_name) + continue + + # Load DELF features. + if config.use_regional_aggregation: + if not mapping_path: + raise ValueError( + 'Requested regional aggregation, but mapping_path was not ' + 'provided') + descriptors_list = [] + num_features_per_box = [] + for box_feature_file in images_to_box_feature_files[image_name]: + delf_filename = os.path.join(features_dir, + box_feature_file + _DELF_EXTENSION) + _, _, box_descriptors, _, _ = feature_io.ReadFromFile(delf_filename) + # If `box_descriptors` is empty, reshape it such that it can be + # concatenated with other descriptors. + if not box_descriptors.shape[0]: + box_descriptors = np.reshape(box_descriptors, + [0, config.feature_dimensionality]) + descriptors_list.append(box_descriptors) + num_features_per_box.append(box_descriptors.shape[0]) + + descriptors = np.concatenate(descriptors_list) + else: + input_delf_filename = os.path.join(features_dir, + image_name + _DELF_EXTENSION) + _, _, descriptors, _, _ = feature_io.ReadFromFile(input_delf_filename) + # If `descriptors` is empty, reshape it to avoid extraction failure. + if not descriptors.shape[0]: + descriptors = np.reshape(descriptors, + [0, config.feature_dimensionality]) + num_features_per_box = None + + # Extract and save aggregation. If using VLAD, only + # `aggregated_descriptors` needs to be saved. + (aggregated_descriptors, + feature_visual_words) = extractor.Extract(descriptors, + num_features_per_box) + if config.aggregation_type == _VLAD: + datum_io.WriteToFile(aggregated_descriptors, + output_aggregation_filename) + else: + datum_io.WritePairToFile(aggregated_descriptors, + feature_visual_words.astype('uint32'), + output_aggregation_filename) diff --git a/research/delf/delf/python/feature_aggregation_extractor.py b/research/delf/delf/python/feature_aggregation_extractor.py index 36794f646..f230642ea 100644 --- a/research/delf/delf/python/feature_aggregation_extractor.py +++ b/research/delf/delf/python/feature_aggregation_extractor.py @@ -40,7 +40,6 @@ class ExtractAggregatedRepresentation(object): """Class for extraction of aggregated local feature representation. Args: - sess: TensorFlow session to use. aggregation_config: AggregationConfig object defining type of aggregation to use. @@ -48,65 +47,28 @@ class ExtractAggregatedRepresentation(object): ValueError: If aggregation type is invalid. """ - def __init__(self, sess, aggregation_config): - self._sess = sess + def __init__(self, aggregation_config): self._codebook_size = aggregation_config.codebook_size self._feature_dimensionality = aggregation_config.feature_dimensionality self._aggregation_type = aggregation_config.aggregation_type self._feature_batch_size = aggregation_config.feature_batch_size + self._codebook_path = aggregation_config.codebook_path + self._use_regional_aggregation = aggregation_config.use_regional_aggregation + self._use_l2_normalization = aggregation_config.use_l2_normalization + self._num_assignments = aggregation_config.num_assignments - # Inputs to extraction function. - self._features = tf.compat.v1.placeholder(tf.float32, [None, None]) - self._num_features_per_region = tf.compat.v1.placeholder(tf.int32, [None]) - - # Load codebook into graph. - codebook = tf.compat.v1.get_variable( - "codebook", - shape=[ - aggregation_config.codebook_size, - aggregation_config.feature_dimensionality - ]) - tf.compat.v1.train.init_from_checkpoint( - aggregation_config.codebook_path, {_CLUSTER_CENTERS_VAR_NAME: codebook}) - - # Construct extraction graph based on desired options. - if self._aggregation_type == _VLAD: - # Feature visual words are unused in the case of VLAD, so just return - # dummy constant. - self._feature_visual_words = tf.constant(-1, dtype=tf.int32) - if aggregation_config.use_regional_aggregation: - self._aggregated_descriptors = self._ComputeRvlad( - self._features, - self._num_features_per_region, - codebook, - use_l2_normalization=aggregation_config.use_l2_normalization, - num_assignments=aggregation_config.num_assignments) - else: - self._aggregated_descriptors = self._ComputeVlad( - self._features, - codebook, - use_l2_normalization=aggregation_config.use_l2_normalization, - num_assignments=aggregation_config.num_assignments) - elif (self._aggregation_type == _ASMK or - self._aggregation_type == _ASMK_STAR): - if aggregation_config.use_regional_aggregation: - (self._aggregated_descriptors, - self._feature_visual_words) = self._ComputeRasmk( - self._features, - self._num_features_per_region, - codebook, - num_assignments=aggregation_config.num_assignments) - else: - (self._aggregated_descriptors, - self._feature_visual_words) = self._ComputeAsmk( - self._features, - codebook, - num_assignments=aggregation_config.num_assignments) - else: + if self._aggregation_type not in [_VLAD, _ASMK, _ASMK_STAR]: raise ValueError("Invalid aggregation type: %d" % self._aggregation_type) - # Initialize variables in the TF graph. - sess.run(tf.compat.v1.global_variables_initializer()) + # Load codebook + codebook = tf.Variable( + tf.zeros([self._codebook_size, self._feature_dimensionality], + dtype=tf.float32), + name=_CLUSTER_CENTERS_VAR_NAME) + ckpt = tf.train.Checkpoint(codebook=codebook) + ckpt.restore(self._codebook_path) + + self._codebook = codebook def Extract(self, features, num_features_per_region=None): """Extracts aggregated representation. @@ -127,10 +89,13 @@ class ExtractAggregatedRepresentation(object): Raises: ValueError: If inputs are misconfigured. """ + features = tf.cast(features, dtype=tf.float32) + if num_features_per_region is None: # Use dummy value since it is unused. num_features_per_region = [] else: + num_features_per_region = tf.cast(num_features_per_region, dtype=tf.int32) if len(num_features_per_region ) and sum(num_features_per_region) != features.shape[0]: raise ValueError( @@ -138,12 +103,41 @@ class ExtractAggregatedRepresentation(object): "features.shape[0] are different: %d vs %d" % (sum(num_features_per_region), features.shape[0])) - aggregated_descriptors, feature_visual_words = self._sess.run( - [self._aggregated_descriptors, self._feature_visual_words], - feed_dict={ - self._features: features, - self._num_features_per_region: num_features_per_region - }) + # Extract features based on desired options. + if self._aggregation_type == _VLAD: + # Feature visual words are unused in the case of VLAD, so just return + # dummy constant. + feature_visual_words = tf.constant(-1, dtype=tf.int32) + if self._use_regional_aggregation: + aggregated_descriptors = self._ComputeRvlad( + features, + num_features_per_region, + self._codebook, + use_l2_normalization=self._use_l2_normalization, + num_assignments=self._num_assignments) + else: + aggregated_descriptors = self._ComputeVlad( + features, + self._codebook, + use_l2_normalization=self._use_l2_normalization, + num_assignments=self._num_assignments) + elif (self._aggregation_type == _ASMK or + self._aggregation_type == _ASMK_STAR): + if self._use_regional_aggregation: + (aggregated_descriptors, + feature_visual_words) = self._ComputeRasmk( + features, + num_features_per_region, + self._codebook, + num_assignments=self._num_assignments) + else: + (aggregated_descriptors, + feature_visual_words) = self._ComputeAsmk( + features, + self._codebook, + num_assignments=self._num_assignments) + + feature_visual_words_output = feature_visual_words.numpy() # If using ASMK*/RASMK*, binarize the aggregated descriptors. if self._aggregation_type == _ASMK_STAR: @@ -151,9 +145,11 @@ class ExtractAggregatedRepresentation(object): aggregated_descriptors, [-1, self._feature_dimensionality]) packed_descriptors = np.packbits( reshaped_aggregated_descriptors > 0, axis=1) - aggregated_descriptors = np.reshape(packed_descriptors, [-1]) + aggregated_descriptors_output = np.reshape(packed_descriptors, [-1]) + else: + aggregated_descriptors_output = aggregated_descriptors.numpy() - return aggregated_descriptors, feature_visual_words + return aggregated_descriptors_output, feature_visual_words_output def _ComputeVlad(self, features, @@ -268,11 +264,13 @@ class ExtractAggregatedRepresentation(object): output_vlad: VLAD descriptor updated to take into account contribution from ind-th feature. """ + diff = tf.tile( + tf.expand_dims(features[ind], + axis=0), [num_assignments, 1]) - tf.gather( + codebook, selected_visual_words[ind]) return ind + 1, tf.tensor_scatter_nd_add( vlad, tf.expand_dims(selected_visual_words[ind], axis=1), - tf.tile( - tf.expand_dims(features[ind], axis=0), [num_assignments, 1]) - - tf.gather(codebook, selected_visual_words[ind])) + tf.cast(diff, dtype=tf.float32)) ind_vlad = tf.constant(0, dtype=tf.int32) keep_going = lambda j, vlad: tf.less(j, num_features) @@ -398,7 +396,9 @@ class ExtractAggregatedRepresentation(object): visual_words = tf.reshape( tf.where( - tf.greater(per_centroid_norms, tf.sqrt(_NORM_SQUARED_TOLERANCE))), + tf.greater( + per_centroid_norms, + tf.cast(tf.sqrt(_NORM_SQUARED_TOLERANCE), dtype=tf.float32))), [-1]) per_centroid_normalized_vector = tf.math.l2_normalize( diff --git a/research/delf/delf/python/feature_aggregation_extractor_test.py b/research/delf/delf/python/feature_aggregation_extractor_test.py index b23649550..dfba92a2b 100644 --- a/research/delf/delf/python/feature_aggregation_extractor_test.py +++ b/research/delf/delf/python/feature_aggregation_extractor_test.py @@ -20,12 +20,15 @@ from __future__ import print_function import os +from absl import flags import numpy as np import tensorflow as tf from delf import aggregation_config_pb2 from delf import feature_aggregation_extractor +FLAGS = flags.FLAGS + class FeatureAggregationTest(tf.test.TestCase): @@ -35,17 +38,15 @@ class FeatureAggregationTest(tf.test.TestCase): Args: checkpoint_path: Directory where codebook is saved to. """ - with tf.Graph().as_default() as g, self.session(graph=g) as sess: - codebook = tf.Variable( - [[0.5, 0.5], [0.0, 0.0], [1.0, 0.0], [-0.5, -0.5], [0.0, 1.0]], - name='clusters') - saver = tf.compat.v1.train.Saver([codebook]) - sess.run(tf.compat.v1.global_variables_initializer()) - saver.save(sess, checkpoint_path) + codebook = tf.Variable( + [[0.5, 0.5], [0.0, 0.0], [1.0, 0.0], [-0.5, -0.5], [0.0, 1.0]], + name='clusters', + dtype=tf.float32) + ckpt = tf.train.Checkpoint(codebook=codebook) + ckpt.write(checkpoint_path) def setUp(self): - self._codebook_path = os.path.join(tf.compat.v1.test.get_temp_dir(), - 'test_codebook') + self._codebook_path = os.path.join(FLAGS.test_tmpdir, 'test_codebook') self._CreateCodebook(self._codebook_path) def testComputeNormalizedVladWorks(self): @@ -61,10 +62,9 @@ class FeatureAggregationTest(tf.test.TestCase): config.num_assignments = 1 # Run tested function. - with tf.Graph().as_default() as g, self.session(graph=g) as sess: - extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( - sess, config) - vlad, extra_output = extractor.Extract(features) + extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( + config) + vlad, extra_output = extractor.Extract(features) # Define expected results. exp_vlad = [ @@ -90,10 +90,9 @@ class FeatureAggregationTest(tf.test.TestCase): config.feature_batch_size = 2 # Run tested function. - with tf.Graph().as_default() as g, self.session(graph=g) as sess: - extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( - sess, config) - vlad, extra_output = extractor.Extract(features) + extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( + config) + vlad, extra_output = extractor.Extract(features) # Define expected results. exp_vlad = [ @@ -118,10 +117,9 @@ class FeatureAggregationTest(tf.test.TestCase): config.num_assignments = 1 # Run tested function. - with tf.Graph().as_default() as g, self.session(graph=g) as sess: - extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( - sess, config) - vlad, extra_output = extractor.Extract(features) + extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( + config) + vlad, extra_output = extractor.Extract(features) # Define expected results. exp_vlad = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 0.5, 1.0, 1.0] @@ -144,10 +142,9 @@ class FeatureAggregationTest(tf.test.TestCase): config.num_assignments = 3 # Run tested function. - with tf.Graph().as_default() as g, self.session(graph=g) as sess: - extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( - sess, config) - vlad, extra_output = extractor.Extract(features) + extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( + config) + vlad, extra_output = extractor.Extract(features) # Define expected results. exp_vlad = [1.0, 1.0, 0.0, 0.0, 0.0, 2.0, -0.5, 0.5, 0.0, 0.0] @@ -168,10 +165,9 @@ class FeatureAggregationTest(tf.test.TestCase): config.codebook_path = self._codebook_path # Run tested function. - with tf.Graph().as_default() as g, self.session(graph=g) as sess: - extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( - sess, config) - vlad, extra_output = extractor.Extract(features) + extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( + config) + vlad, extra_output = extractor.Extract(features) # Define expected results. exp_vlad = np.zeros([10], dtype=float) @@ -197,10 +193,9 @@ class FeatureAggregationTest(tf.test.TestCase): config.use_regional_aggregation = True # Run tested function. - with tf.Graph().as_default() as g, self.session(graph=g) as sess: - extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( - sess, config) - rvlad, extra_output = extractor.Extract(features, num_features_per_region) + extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( + config) + rvlad, extra_output = extractor.Extract(features, num_features_per_region) # Define expected results. exp_rvlad = [ @@ -228,10 +223,9 @@ class FeatureAggregationTest(tf.test.TestCase): config.use_regional_aggregation = True # Run tested function. - with tf.Graph().as_default() as g, self.session(graph=g) as sess: - extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( - sess, config) - rvlad, extra_output = extractor.Extract(features, num_features_per_region) + extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( + config) + rvlad, extra_output = extractor.Extract(features, num_features_per_region) # Define expected results. exp_rvlad = [ @@ -256,10 +250,9 @@ class FeatureAggregationTest(tf.test.TestCase): config.use_regional_aggregation = True # Run tested function. - with tf.Graph().as_default() as g, self.session(graph=g) as sess: - extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( - sess, config) - rvlad, extra_output = extractor.Extract(features, num_features_per_region) + extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( + config) + rvlad, extra_output = extractor.Extract(features, num_features_per_region) # Define expected results. exp_rvlad = np.zeros([10], dtype=float) @@ -286,10 +279,9 @@ class FeatureAggregationTest(tf.test.TestCase): config.use_regional_aggregation = True # Run tested function. - with tf.Graph().as_default() as g, self.session(graph=g) as sess: - extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( - sess, config) - rvlad, extra_output = extractor.Extract(features, num_features_per_region) + extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( + config) + rvlad, extra_output = extractor.Extract(features, num_features_per_region) # Define expected results. exp_rvlad = [ @@ -318,10 +310,9 @@ class FeatureAggregationTest(tf.test.TestCase): config.use_regional_aggregation = True # Run tested function. - with tf.Graph().as_default() as g, self.session(graph=g) as sess: - extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( - sess, config) - rvlad, extra_output = extractor.Extract(features, num_features_per_region) + extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( + config) + rvlad, extra_output = extractor.Extract(features, num_features_per_region) # Define expected results. exp_rvlad = [ @@ -349,14 +340,13 @@ class FeatureAggregationTest(tf.test.TestCase): config.use_regional_aggregation = True # Run tested function. - with tf.Graph().as_default() as g, self.session(graph=g) as sess: - extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( - sess, config) - with self.assertRaisesRegex( - ValueError, - r'Incorrect arguments: sum\(num_features_per_region\) and ' - r'features.shape\[0\] are different'): - extractor.Extract(features, num_features_per_region) + extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( + config) + with self.assertRaisesRegex( + ValueError, + r'Incorrect arguments: sum\(num_features_per_region\) and ' + r'features.shape\[0\] are different'): + extractor.Extract(features, num_features_per_region) def testComputeAsmkWorks(self): # Construct inputs. @@ -370,10 +360,9 @@ class FeatureAggregationTest(tf.test.TestCase): config.num_assignments = 1 # Run tested function. - with tf.Graph().as_default() as g, self.session(graph=g) as sess: - extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( - sess, config) - asmk, visual_words = extractor.Extract(features) + extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( + config) + asmk, visual_words = extractor.Extract(features) # Define expected results. exp_asmk = [-0.707107, 0.707107, 0.707107, 0.707107] @@ -395,10 +384,9 @@ class FeatureAggregationTest(tf.test.TestCase): config.num_assignments = 1 # Run tested function. - with tf.Graph().as_default() as g, self.session(graph=g) as sess: - extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( - sess, config) - asmk_star, visual_words = extractor.Extract(features) + extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( + config) + asmk_star, visual_words = extractor.Extract(features) # Define expected results. exp_asmk_star = [64, 192] @@ -420,10 +408,9 @@ class FeatureAggregationTest(tf.test.TestCase): config.num_assignments = 3 # Run tested function. - with tf.Graph().as_default() as g, self.session(graph=g) as sess: - extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( - sess, config) - asmk, visual_words = extractor.Extract(features) + extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( + config) + asmk, visual_words = extractor.Extract(features) # Define expected results. exp_asmk = [0.707107, 0.707107, 0.0, 1.0, -0.707107, 0.707107] @@ -448,10 +435,9 @@ class FeatureAggregationTest(tf.test.TestCase): config.use_regional_aggregation = True # Run tested function. - with tf.Graph().as_default() as g, self.session(graph=g) as sess: - extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( - sess, config) - rasmk, visual_words = extractor.Extract(features, num_features_per_region) + extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( + config) + rasmk, visual_words = extractor.Extract(features, num_features_per_region) # Define expected results. exp_rasmk = [-0.707107, 0.707107, 0.361261, 0.932465] @@ -476,11 +462,10 @@ class FeatureAggregationTest(tf.test.TestCase): config.use_regional_aggregation = True # Run tested function. - with tf.Graph().as_default() as g, self.session(graph=g) as sess: - extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( - sess, config) - rasmk_star, visual_words = extractor.Extract(features, - num_features_per_region) + extractor = feature_aggregation_extractor.ExtractAggregatedRepresentation( + config) + rasmk_star, visual_words = extractor.Extract(features, + num_features_per_region) # Define expected results. exp_rasmk_star = [64, 192] @@ -500,10 +485,9 @@ class FeatureAggregationTest(tf.test.TestCase): config.use_regional_aggregation = True # Run tested function. - with tf.Graph().as_default() as g, self.session(graph=g) as sess: - with self.assertRaisesRegex(ValueError, 'Invalid aggregation type'): - feature_aggregation_extractor.ExtractAggregatedRepresentation( - sess, config) + with self.assertRaisesRegex(ValueError, 'Invalid aggregation type'): + feature_aggregation_extractor.ExtractAggregatedRepresentation( + config) if __name__ == '__main__': diff --git a/research/delf/delf/python/feature_extractor.py b/research/delf/delf/python/feature_extractor.py index 3d574f076..9545337f1 100644 --- a/research/delf/delf/python/feature_extractor.py +++ b/research/delf/delf/python/feature_extractor.py @@ -19,10 +19,6 @@ from __future__ import print_function import tensorflow as tf -from delf import delf_v1 -from object_detection.core import box_list -from object_detection.core import box_list_ops - def NormalizePixelValues(image, pixel_value_offset=128.0, @@ -81,219 +77,6 @@ def CalculateKeypointCenters(boxes): 2.0) -def ExtractKeypointDescriptor(image, layer_name, image_scales, iou, - max_feature_num, abs_thres, model_fn): - """Extract keypoint descriptor for input image. - - Args: - image: A image tensor with shape [h, w, channels]. - layer_name: The endpoint of feature extraction layer. - image_scales: A 1D float tensor which contains the scales. - iou: A float scalar denoting the IOU threshold for NMS. - max_feature_num: An int tensor denoting the maximum selected feature points. - abs_thres: A float tensor denoting the score threshold for feature - selection. - model_fn: Model function. Follows the signature: - * Args: - * `images`: Image tensor which is re-scaled. - * `normalized_image`: Whether or not the images are normalized. - * `reuse`: Whether or not the layer and its variables should be reused. - * Returns: - * `attention`: Attention score after the non-linearity. - * `feature_map`: Feature map obtained from the ResNet model. - - Returns: - boxes: [N, 4] float tensor which denotes the selected receptive box. N is - the number of final feature points which pass through keypoint selection - and NMS steps. - feature_scales: [N] float tensor. It is the inverse of the input image - scales such that larger image scales correspond to larger image regions, - which is compatible with scale-space keypoint detection convention. - features: [N, depth] float tensor with feature descriptors. - scores: [N, 1] float tensor denoting the attention score. - - Raises: - ValueError: If the layer_name is unsupported. - """ - original_image_shape_float = tf.gather( - tf.cast(tf.shape(image), dtype=tf.float32), [0, 1]) - image_tensor = NormalizePixelValues(image) - image_tensor = tf.expand_dims(image_tensor, 0, name='image/expand_dims') - - # Feature depth and receptive field parameters for each network version. - if layer_name == 'resnet_v1_50/block3': - feature_depth = 1024 - rf, stride, padding = [291.0, 32.0, 145.0] - elif layer_name == 'resnet_v1_50/block4': - feature_depth = 2048 - rf, stride, padding = [483.0, 32.0, 241.0] - else: - raise ValueError('Unsupported layer_name.') - - def _ProcessSingleScale(scale_index, - boxes, - features, - scales, - scores, - reuse=True): - """Resize the image and run feature extraction and keypoint selection. - - This function will be passed into tf.while_loop() and be called - repeatedly. The input boxes are collected from the previous iteration - [0: scale_index -1]. We get the current scale by - image_scales[scale_index], and run image resizing, feature extraction and - keypoint selection. Then we will get a new set of selected_boxes for - current scale. In the end, we concat the previous boxes with current - selected_boxes as the output. - - Args: - scale_index: A valid index in the image_scales. - boxes: Box tensor with the shape of [N, 4]. - features: Feature tensor with the shape of [N, depth]. - scales: Scale tensor with the shape of [N]. - scores: Attention score tensor with the shape of [N]. - reuse: Whether or not the layer and its variables should be reused. - - Returns: - scale_index: The next scale index for processing. - boxes: Concatenated box tensor with the shape of [K, 4]. K >= N. - features: Concatenated feature tensor with the shape of [K, depth]. - scales: Concatenated scale tensor with the shape of [K]. - scores: Concatenated attention score tensor with the shape of [K]. - """ - scale = tf.gather(image_scales, scale_index) - new_image_size = tf.cast( - tf.round(original_image_shape_float * scale), dtype=tf.int32) - resized_image = tf.compat.v1.image.resize_bilinear(image_tensor, - new_image_size) - - attention, feature_map = model_fn( - resized_image, normalized_image=True, reuse=reuse) - - rf_boxes = CalculateReceptiveBoxes( - tf.shape(feature_map)[1], - tf.shape(feature_map)[2], rf, stride, padding) - # Re-project back to the original image space. - rf_boxes = tf.divide(rf_boxes, scale) - attention = tf.reshape(attention, [-1]) - feature_map = tf.reshape(feature_map, [-1, feature_depth]) - - # Use attention score to select feature vectors. - indices = tf.reshape(tf.where(attention >= abs_thres), [-1]) - selected_boxes = tf.gather(rf_boxes, indices) - selected_features = tf.gather(feature_map, indices) - selected_scores = tf.gather(attention, indices) - selected_scales = tf.ones_like(selected_scores, tf.float32) / scale - - # Concat with the previous result from different scales. - boxes = tf.concat([boxes, selected_boxes], 0) - features = tf.concat([features, selected_features], 0) - scales = tf.concat([scales, selected_scales], 0) - scores = tf.concat([scores, selected_scores], 0) - - return scale_index + 1, boxes, features, scales, scores - - output_boxes = tf.zeros([0, 4], dtype=tf.float32) - output_features = tf.zeros([0, feature_depth], dtype=tf.float32) - output_scales = tf.zeros([0], dtype=tf.float32) - output_scores = tf.zeros([0], dtype=tf.float32) - - # Process the first scale separately, the following scales will reuse the - # graph variables. - (_, output_boxes, output_features, output_scales, - output_scores) = _ProcessSingleScale( - 0, - output_boxes, - output_features, - output_scales, - output_scores, - reuse=False) - i = tf.constant(1, dtype=tf.int32) - num_scales = tf.shape(image_scales)[0] - keep_going = lambda j, boxes, features, scales, scores: tf.less(j, num_scales) - - (_, output_boxes, output_features, output_scales, - output_scores) = tf.while_loop( - cond=keep_going, - body=_ProcessSingleScale, - loop_vars=[ - i, output_boxes, output_features, output_scales, output_scores - ], - shape_invariants=[ - i.get_shape(), - tf.TensorShape([None, 4]), - tf.TensorShape([None, feature_depth]), - tf.TensorShape([None]), - tf.TensorShape([None]) - ], - back_prop=False) - - feature_boxes = box_list.BoxList(output_boxes) - feature_boxes.add_field('features', output_features) - feature_boxes.add_field('scales', output_scales) - feature_boxes.add_field('scores', output_scores) - - nms_max_boxes = tf.minimum(max_feature_num, feature_boxes.num_boxes()) - final_boxes = box_list_ops.non_max_suppression(feature_boxes, iou, - nms_max_boxes) - - return (final_boxes.get(), final_boxes.get_field('scales'), - final_boxes.get_field('features'), - tf.expand_dims(final_boxes.get_field('scores'), 1)) - - -def BuildModel(layer_name, attention_nonlinear, attention_type, - attention_kernel_size): - """Build the DELF model. - - This function is helpful for constructing the model function which will be fed - to ExtractKeypointDescriptor(). - - Args: - layer_name: the endpoint of feature extraction layer. - attention_nonlinear: Type of the non-linearity for the attention function. - Currently, only 'softplus' is supported. - attention_type: Type of the attention used. Options are: - 'use_l2_normalized_feature' and 'use_default_input_feature'. Note that - this is irrelevant during inference time. - attention_kernel_size: Size of attention kernel (kernel is square). - - Returns: - Attention model function. - """ - - def _ModelFn(images, normalized_image, reuse): - """Attention model to get feature map and attention score map. - - Args: - images: Image tensor. - normalized_image: Whether or not the images are normalized. - reuse: Whether or not the layer and its variables should be reused. - - Returns: - attention: Attention score after the non-linearity. - feature_map: Feature map after ResNet convolution. - """ - if normalized_image: - image_tensor = images - else: - image_tensor = NormalizePixelValues(images) - - # Extract features and attention scores. - model = delf_v1.DelfV1(layer_name) - _, attention, _, feature_map, _ = model.GetAttentionPrelogit( - image_tensor, - attention_nonlinear=attention_nonlinear, - attention_type=attention_type, - kernel=[attention_kernel_size, attention_kernel_size], - training_resnet=False, - training_attention=False, - reuse=reuse) - return attention, feature_map - - return _ModelFn - - def ApplyPcaAndWhitening(data, pca_matrix, pca_mean, @@ -345,22 +128,21 @@ def PostProcessDescriptors(descriptors, use_pca, pca_parameters=None): normalization and (possibly) PCA/whitening. """ # L2-normalize, and if desired apply PCA (followed by L2-normalization). - with tf.compat.v1.variable_scope('postprocess'): + final_descriptors = tf.nn.l2_normalize( + descriptors, axis=1, name='l2_normalization') + + if use_pca: + # Apply PCA, and whitening if desired. + final_descriptors = ApplyPcaAndWhitening(final_descriptors, + pca_parameters['matrix'], + pca_parameters['mean'], + pca_parameters['dim'], + pca_parameters['use_whitening'], + pca_parameters['variances']) + + # Re-normalize. final_descriptors = tf.nn.l2_normalize( - descriptors, axis=1, name='l2_normalization') - - if use_pca: - # Apply PCA, and whitening if desired. - final_descriptors = ApplyPcaAndWhitening(final_descriptors, - pca_parameters['matrix'], - pca_parameters['mean'], - pca_parameters['dim'], - pca_parameters['use_whitening'], - pca_parameters['variances']) - - # Re-normalize. - final_descriptors = tf.nn.l2_normalize( - final_descriptors, axis=1, name='pca_l2_normalization') + final_descriptors, axis=1, name='pca_l2_normalization') return final_descriptors diff --git a/research/delf/delf/python/feature_extractor_test.py b/research/delf/delf/python/feature_extractor_test.py index 675ecb8f3..0caa51c43 100644 --- a/research/delf/delf/python/feature_extractor_test.py +++ b/research/delf/delf/python/feature_extractor_test.py @@ -18,7 +18,6 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function -import numpy as np import tensorflow as tf from delf import feature_extractor @@ -34,78 +33,24 @@ class FeatureExtractorTest(tf.test.TestCase): image, pixel_value_offset=5.0, pixel_value_scale=2.0) exp_normalized_image = [[[-1.0, 125.0, -2.5], [14.5, 3.5, 0.0]], [[20.0, 0.0, 30.0], [25.5, 36.0, 42.0]]] - with self.session() as sess: - normalized_image_out = sess.run(normalized_image) - self.assertAllEqual(normalized_image_out, exp_normalized_image) + self.assertAllEqual(normalized_image, exp_normalized_image) def testCalculateReceptiveBoxes(self): boxes = feature_extractor.CalculateReceptiveBoxes( height=1, width=2, rf=291, stride=32, padding=145) exp_boxes = [[-145., -145., 145., 145.], [-145., -113., 145., 177.]] - with self.session() as sess: - boxes_out = sess.run(boxes) - self.assertAllEqual(exp_boxes, boxes_out) + self.assertAllEqual(exp_boxes, boxes) def testCalculateKeypointCenters(self): boxes = [[-10.0, 0.0, 11.0, 21.0], [-2.5, 5.0, 18.5, 26.0], [45.0, -2.5, 66.0, 18.5]] centers = feature_extractor.CalculateKeypointCenters(boxes) - with self.session() as sess: - centers_out = sess.run(centers) exp_centers = [[0.5, 10.5], [8.0, 15.5], [55.5, 8.0]] - self.assertAllEqual(exp_centers, centers_out) - - def testExtractKeypointDescriptor(self): - image = tf.constant( - [[[0, 255, 255], [128, 64, 196]], [[0, 0, 32], [32, 128, 16]]], - dtype=tf.uint8) - - # Arbitrary model function used to test ExtractKeypointDescriptor. The - # generated feature_map is a replicated version of the image, concatenated - # with zeros to achieve the required dimensionality. The attention is simply - # the norm of the input image pixels. - def _test_model_fn(image, normalized_image, reuse): - del normalized_image, reuse # Unused variables in the test. - image_shape = tf.shape(image) - attention = tf.squeeze(tf.norm(image, axis=3)) - feature_map = tf.concat([ - tf.tile(image, [1, 1, 1, 341]), - tf.zeros([1, image_shape[1], image_shape[2], 1]) - ], - axis=3) - return attention, feature_map - - boxes, feature_scales, features, scores = ( - feature_extractor.ExtractKeypointDescriptor( - image, - layer_name='resnet_v1_50/block3', - image_scales=tf.constant([1.0]), - iou=1.0, - max_feature_num=10, - abs_thres=1.5, - model_fn=_test_model_fn)) - - exp_boxes = [[-145.0, -145.0, 145.0, 145.0], [-113.0, -145.0, 177.0, 145.0]] - exp_feature_scales = [1.0, 1.0] - exp_features = np.array( - np.concatenate( - (np.tile([[-1.0, 127.0 / 128.0, 127.0 / 128.0], [-1.0, -1.0, -0.75] - ], [1, 341]), np.zeros([2, 1])), - axis=1)) - exp_scores = [[1.723042], [1.600781]] - - with self.session() as sess: - boxes_out, feature_scales_out, features_out, scores_out = sess.run( - [boxes, feature_scales, features, scores]) - - self.assertAllEqual(exp_boxes, boxes_out) - self.assertAllEqual(exp_feature_scales, feature_scales_out) - self.assertAllClose(exp_features, features_out) - self.assertAllClose(exp_scores, scores_out) + self.assertAllEqual(exp_centers, centers) def testPcaWhitening(self): data = tf.constant([[1.0, 2.0, -2.0], [-5.0, 0.0, 3.0], [-1.0, 2.0, 0.0], @@ -123,12 +68,8 @@ class FeatureExtractorTest(tf.test.TestCase): exp_output = [[2.5, -5.0], [-6.0, -2.0], [-0.5, -3.0], [1.0, -2.0]] - with self.session() as sess: - output_out = sess.run(output) - - self.assertAllEqual(exp_output, output_out) + self.assertAllEqual(exp_output, output) if __name__ == '__main__': - tf.compat.v1.disable_eager_execution() tf.test.main() diff --git a/research/delf/delf/python/feature_io_test.py b/research/delf/delf/python/feature_io_test.py index dc8ad75d1..8b68d3b24 100644 --- a/research/delf/delf/python/feature_io_test.py +++ b/research/delf/delf/python/feature_io_test.py @@ -20,11 +20,14 @@ from __future__ import print_function import os +from absl import flags import numpy as np import tensorflow as tf from delf import feature_io +FLAGS = flags.FLAGS + def create_data(): """Creates data to be used in tests. @@ -81,8 +84,7 @@ class DelfFeaturesIoTest(tf.test.TestCase): def testWriteAndReadToFile(self): locations, scales, descriptors, attention, orientations = create_data() - tmpdir = tf.compat.v1.test.get_temp_dir() - filename = os.path.join(tmpdir, 'test.delf') + filename = os.path.join(FLAGS.test_tmpdir, 'test.delf') feature_io.WriteToFile(filename, locations, scales, descriptors, attention, orientations) data_read = feature_io.ReadFromFile(filename) @@ -94,8 +96,7 @@ class DelfFeaturesIoTest(tf.test.TestCase): self.assertAllEqual(orientations, data_read[4]) def testWriteAndReadToFileEmptyFile(self): - tmpdir = tf.compat.v1.test.get_temp_dir() - filename = os.path.join(tmpdir, 'test.delf') + filename = os.path.join(FLAGS.test_tmpdir, 'test.delf') feature_io.WriteToFile(filename, np.array([]), np.array([]), np.array([]), np.array([]), np.array([])) data_read = feature_io.ReadFromFile(filename) diff --git a/research/delf/delf/python/training/datasets/googlelandmarks.py b/research/delf/delf/python/training/datasets/googlelandmarks.py index dd0942fc4..f289cc166 100644 --- a/research/delf/delf/python/training/datasets/googlelandmarks.py +++ b/research/delf/delf/python/training/datasets/googlelandmarks.py @@ -27,6 +27,15 @@ import functools import tensorflow as tf +class _GoogleLandmarksInfo(object): + """Metadata about the Google Landmarks dataset.""" + num_classes = { + 'gld_v1': 14951, + 'gld_v2': 203094, + 'gld_v2_clean': 81313 + } + + class _DataAugmentationParams(object): """Default parameters for augmentation.""" # The following are used for training. @@ -167,3 +176,12 @@ def CreateDataset(file_pattern, dataset = dataset.batch(batch_size) return dataset + + +def GoogleLandmarksInfo(): + """Returns metadata information on the Google Landmarks dataset. + + Returns: + object _GoogleLandmarksInfo containing metadata about the GLD dataset. + """ + return _GoogleLandmarksInfo() diff --git a/research/delf/delf/python/training/train.py b/research/delf/delf/python/training/train.py index dcf61b3f3..9b0d0a6cd 100644 --- a/research/delf/delf/python/training/train.py +++ b/research/delf/delf/python/training/train.py @@ -43,6 +43,10 @@ flags.DEFINE_string('train_file_pattern', '/tmp/data/train*', 'File pattern of training dataset files.') flags.DEFINE_string('validation_file_pattern', '/tmp/data/validation*', 'File pattern of validation dataset files.') +flags.DEFINE_enum('dataset_version', 'gld_v1', + ['gld_v1', 'gld_v2', 'gld_v2_clean'], + 'Google Landmarks dataset version, used to determine the' + 'number of classes.') flags.DEFINE_integer('seed', 0, 'Seed to training dataset.') flags.DEFINE_float('initial_lr', 0.001, 'Initial learning rate.') flags.DEFINE_integer('batch_size', 32, 'Global batch size.') @@ -136,9 +140,9 @@ def main(argv): save_interval = 1 report_interval = 1 - # TODO(andrearaujo): Using placeholder, replace with actual value using - # GoogleLandmarksInfo() from datasets/googlelandmarks.py. - num_classes = 14951 + # Determine the number of classes based on the version of the dataset. + gld_info = gld.GoogleLandmarksInfo() + num_classes = gld_info.num_classes[FLAGS.dataset_version] # ------------------------------------------------------------ # Create the distributed train/validation sets. -- GitLab From cabb22cd684059e885502468a73b3fe40cadd937 Mon Sep 17 00:00:00 2001 From: Dan Anghel Date: Mon, 15 Jun 2020 17:47:01 -0700 Subject: [PATCH 04/79] Fix to DELF package (#8679) * First version of working script to download the GLDv2 dataset * First version of the DEFL package installation script * First working version of the DELF package installation script * Fixed feedback from PR review * Push to Github of changes to the TFRecord data generation script for DELF. * Merged commit includes the following changes: 315363544 by Andre Araujo: Added the generation of TRAIN and VALIDATE splits from the train dataset. -- 314676530 by Andre Araujo: Updated script to download GLDv2 images for DELF training. -- 314101235 by Andre Araujo: Added newly created module 'utils' to the copybara script. -- 313677085 by Andre Araujo: Code migration from TF1 to TF2 for: - logging (replaced usage of tf.compat.v1.logging.info) - testing directories (replaced usage of tf.compat.v1.test.get_temp_dir()) - feature/object extraction scripts (replaced usage of tf.compat.v1.train.string_input_producer and tf.compat.v1.train.start_queue_runners with PIL) -- 312770828 by Andre Araujo: Internal change. -- PiperOrigin-RevId: 315363544 * First version of the updated README of the DELF training instructions * Added to the README the section describing the generation of the training data * Added warning about the TFRecord generation time * Updated the launch of the training * Minor README update * Integrated review feedback * Merged commit includes the following changes: 315971979 by Andre Araujo: Performance optimization in generating the TRAIN and VALIDATION splits per label. -- 315578370 by Andre Araujo: Tiny fix to char limit in extractor.py. -- 315546242 by Andre Araujo: Script to measure DELG latency. -- 315545801 by Andre Araujo: Pre-load PCA parameters, if using them when extracting DELF/G features. -- 315450392 by Andre Araujo: Code migration from TF1 to TF2 for: - loading the models using in extractor.py and detector.py using tf.saved_model.load - removed tf.compat.v1.Session for the extractor and detector model usage -- 315406342 by Andre Araujo: Internal change. -- PiperOrigin-RevId: 315971979 * Merged commit includes the following changes: 316538447 by Andre Araujo: Read the number of classes from the GLDv2 dataset metadata. -- 316416973 by Andre Araujo: Migration of DELF code to TF2: - removed tf.compat.v1.test.get_temp_dir() with FLAGS.test_tmpdir - removed delf_v1.py and its dependencies - removed tf.compat.v1, Session, Graph dependencies from feature_extractor.py, feature_aggregation_extractor.py and aggregation_extraction.py -- PiperOrigin-RevId: 316538447 * Removed reference to delf_v1 Co-authored-by: Andre Araujo --- research/delf/delf/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/research/delf/delf/__init__.py b/research/delf/delf/__init__.py index 234b54f35..a52df3c45 100644 --- a/research/delf/delf/__init__.py +++ b/research/delf/delf/__init__.py @@ -25,7 +25,6 @@ from delf.protos import delf_config_pb2 from delf.protos import feature_pb2 from delf.python import box_io from delf.python import datum_io -from delf.python import delf_v1 from delf.python import feature_aggregation_extractor from delf.python import feature_aggregation_similarity from delf.python import feature_extractor -- GitLab From 802488f1dc363b326437dd05341b8310cfe786c3 Mon Sep 17 00:00:00 2001 From: Hongkun Yu Date: Mon, 15 Jun 2020 19:13:30 -0700 Subject: [PATCH 05/79] Internal change PiperOrigin-RevId: 316593329 --- official/nlp/bert/bert_models.py | 5 +- official/nlp/modeling/layers/README.md | 3 + official/nlp/modeling/layers/__init__.py | 1 + official/nlp/modeling/layers/masked_lm.py | 124 ++++++++++ .../nlp/modeling/layers/masked_lm_test.py | 162 +++++++++++++ ...ed_sparse_categorical_crossentropy_test.py | 11 +- .../nlp/modeling/models/bert_pretrainer.py | 40 +-- .../modeling/models/bert_pretrainer_test.py | 15 +- official/nlp/modeling/networks/README.md | 2 - official/nlp/modeling/networks/__init__.py | 1 - official/nlp/modeling/networks/masked_lm.py | 189 --------------- .../nlp/modeling/networks/masked_lm_test.py | 227 ------------------ 12 files changed, 329 insertions(+), 451 deletions(-) create mode 100644 official/nlp/modeling/layers/masked_lm.py create mode 100644 official/nlp/modeling/layers/masked_lm_test.py delete mode 100644 official/nlp/modeling/networks/masked_lm.py delete mode 100644 official/nlp/modeling/networks/masked_lm_test.py diff --git a/official/nlp/bert/bert_models.py b/official/nlp/bert/bert_models.py index 00a3528e0..e26c2a0ca 100644 --- a/official/nlp/bert/bert_models.py +++ b/official/nlp/bert/bert_models.py @@ -230,9 +230,10 @@ def pretrain_model(bert_config, initializer=initializer, output='predictions') - lm_output, sentence_output = pretrainer_model( + outputs = pretrainer_model( [input_word_ids, input_mask, input_type_ids, masked_lm_positions]) - + lm_output = outputs['masked_lm'] + sentence_output = outputs['classification'] pretrain_loss_layer = BertPretrainLossAndMetricLayer( vocab_size=bert_config.vocab_size) output_loss = pretrain_loss_layer(lm_output, sentence_output, masked_lm_ids, diff --git a/official/nlp/modeling/layers/README.md b/official/nlp/modeling/layers/README.md index a86dcd3fd..212aee22b 100644 --- a/official/nlp/modeling/layers/README.md +++ b/official/nlp/modeling/layers/README.md @@ -45,6 +45,9 @@ assemble new layers, networks, or models. should be masked), the output will have masked positions set to approximately zero. +* [`MaskedLM`](masked_lm.py) implements a masked language model. It assumes the + embedding table variable is passed to it. + * [ClassificationHead](cls_head.py) A pooling head over a sequence of embeddings, commonly used by classification tasks. diff --git a/official/nlp/modeling/layers/__init__.py b/official/nlp/modeling/layers/__init__.py index 301ebfcb7..9c89b0b17 100644 --- a/official/nlp/modeling/layers/__init__.py +++ b/official/nlp/modeling/layers/__init__.py @@ -18,6 +18,7 @@ from official.nlp.modeling.layers.attention import * from official.nlp.modeling.layers.cls_head import * from official.nlp.modeling.layers.dense_einsum import DenseEinsum from official.nlp.modeling.layers.gated_feedforward import GatedFeedforward +from official.nlp.modeling.layers.masked_lm import MaskedLM from official.nlp.modeling.layers.masked_softmax import MaskedSoftmax from official.nlp.modeling.layers.on_device_embedding import OnDeviceEmbedding from official.nlp.modeling.layers.position_embedding import PositionEmbedding diff --git a/official/nlp/modeling/layers/masked_lm.py b/official/nlp/modeling/layers/masked_lm.py new file mode 100644 index 000000000..3b81556f4 --- /dev/null +++ b/official/nlp/modeling/layers/masked_lm.py @@ -0,0 +1,124 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Masked language model network.""" +# pylint: disable=g-classes-have-attributes +from __future__ import absolute_import +from __future__ import division +# from __future__ import google_type_annotations +from __future__ import print_function + +import tensorflow as tf + +from official.modeling import tf_utils + + +@tf.keras.utils.register_keras_serializable(package='Text') +class MaskedLM(tf.keras.layers.Layer): + """Masked language model network head for BERT modeling. + + This network implements a masked language model based on the provided network. + It assumes that the network being passed has a "get_embedding_table()" method. + + Arguments: + embedding_table: The embedding table of the targets. + activation: The activation, if any, for the dense layer. + initializer: The intializer for the dense layer. Defaults to a Glorot + uniform initializer. + output: The output style for this network. Can be either 'logits' or + 'predictions'. + """ + + def __init__(self, + embedding_table, + activation=None, + initializer='glorot_uniform', + output='logits', + name='cls/predictions', + **kwargs): + super(MaskedLM, self).__init__(name=name, **kwargs) + self.embedding_table = embedding_table + self.activation = activation + self.initializer = tf.keras.initializers.get(initializer) + + if output not in ('predictions', 'logits'): + raise ValueError( + ('Unknown `output` value "%s". `output` can be either "logits" or ' + '"predictions"') % output) + self._output_type = output + + def build(self, input_shape): + self._vocab_size, hidden_size = self.embedding_table.shape + self.dense = tf.keras.layers.Dense( + hidden_size, + activation=self.activation, + kernel_initializer=self.initializer, + name='transform/dense') + self.layer_norm = tf.keras.layers.LayerNormalization( + axis=-1, epsilon=1e-12, name='transform/LayerNorm') + self.bias = self.add_weight( + 'output_bias/bias', + shape=(self._vocab_size,), + initializer='zeros', + trainable=True) + + super(MaskedLM, self).build(input_shape) + + def call(self, sequence_data, masked_positions): + masked_lm_input = self._gather_indexes(sequence_data, masked_positions) + lm_data = self.dense(masked_lm_input) + lm_data = self.layer_norm(lm_data) + lm_data = tf.matmul(lm_data, self.embedding_table, transpose_b=True) + logits = tf.nn.bias_add(lm_data, self.bias) + + masked_positions_shape = tf_utils.get_shape_list( + masked_positions, name='masked_positions_tensor') + logits = tf.reshape(logits, + [-1, masked_positions_shape[1], self._vocab_size]) + if self._output_type == 'logits': + return logits + return tf.nn.log_softmax(logits) + + def get_config(self): + raise NotImplementedError('MaskedLM cannot be directly serialized because ' + 'it has variable sharing logic.') + + def _gather_indexes(self, sequence_tensor, positions): + """Gathers the vectors at the specific positions. + + Args: + sequence_tensor: Sequence output of `BertModel` layer of shape + (`batch_size`, `seq_length`, num_hidden) where num_hidden is number of + hidden units of `BertModel` layer. + positions: Positions ids of tokens in sequence to mask for pretraining + of with dimension (batch_size, num_predictions) where + `num_predictions` is maximum number of tokens to mask out and predict + per each sequence. + + Returns: + Masked out sequence tensor of shape (batch_size * num_predictions, + num_hidden). + """ + sequence_shape = tf_utils.get_shape_list( + sequence_tensor, name='sequence_output_tensor') + batch_size, seq_length, width = sequence_shape + + flat_offsets = tf.reshape( + tf.range(0, batch_size, dtype=tf.int32) * seq_length, [-1, 1]) + flat_positions = tf.reshape(positions + flat_offsets, [-1]) + flat_sequence_tensor = tf.reshape(sequence_tensor, + [batch_size * seq_length, width]) + output_tensor = tf.gather(flat_sequence_tensor, flat_positions) + + return output_tensor diff --git a/official/nlp/modeling/layers/masked_lm_test.py b/official/nlp/modeling/layers/masked_lm_test.py new file mode 100644 index 000000000..12e28ec95 --- /dev/null +++ b/official/nlp/modeling/layers/masked_lm_test.py @@ -0,0 +1,162 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for masked language model network.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import numpy as np +import tensorflow as tf + +from tensorflow.python.keras import keras_parameterized # pylint: disable=g-direct-tensorflow-import + +from official.nlp.modeling.layers import masked_lm +from official.nlp.modeling.networks import transformer_encoder + + +# This decorator runs the test in V1, V2-Eager, and V2-Functional mode. It +# guarantees forward compatibility of this code for the V2 switchover. +@keras_parameterized.run_all_keras_modes +class MaskedLMTest(keras_parameterized.TestCase): + + def create_layer(self, + vocab_size, + sequence_length, + hidden_size, + output='predictions', + xformer_stack=None): + # First, create a transformer stack that we can use to get the LM's + # vocabulary weight. + if xformer_stack is None: + xformer_stack = transformer_encoder.TransformerEncoder( + vocab_size=vocab_size, + num_layers=1, + sequence_length=sequence_length, + hidden_size=hidden_size, + num_attention_heads=4, + ) + + # Create a maskedLM from the transformer stack. + test_layer = masked_lm.MaskedLM( + embedding_table=xformer_stack.get_embedding_table(), + output=output) + return test_layer + + def test_layer_creation(self): + vocab_size = 100 + sequence_length = 32 + hidden_size = 64 + num_predictions = 21 + test_layer = self.create_layer( + vocab_size=vocab_size, + sequence_length=sequence_length, + hidden_size=hidden_size) + + # Make sure that the output tensor of the masked LM is the right shape. + lm_input_tensor = tf.keras.Input(shape=(sequence_length, hidden_size)) + masked_positions = tf.keras.Input(shape=(num_predictions,), dtype=tf.int32) + output = test_layer(lm_input_tensor, masked_positions=masked_positions) + + expected_output_shape = [None, num_predictions, vocab_size] + self.assertEqual(expected_output_shape, output.shape.as_list()) + + def test_layer_invocation_with_external_logits(self): + vocab_size = 100 + sequence_length = 32 + hidden_size = 64 + num_predictions = 21 + xformer_stack = transformer_encoder.TransformerEncoder( + vocab_size=vocab_size, + num_layers=1, + sequence_length=sequence_length, + hidden_size=hidden_size, + num_attention_heads=4, + ) + test_layer = self.create_layer( + vocab_size=vocab_size, + sequence_length=sequence_length, + hidden_size=hidden_size, + xformer_stack=xformer_stack, + output='predictions') + logit_layer = self.create_layer( + vocab_size=vocab_size, + sequence_length=sequence_length, + hidden_size=hidden_size, + xformer_stack=xformer_stack, + output='logits') + + # Create a model from the masked LM layer. + lm_input_tensor = tf.keras.Input(shape=(sequence_length, hidden_size)) + masked_positions = tf.keras.Input(shape=(num_predictions,), dtype=tf.int32) + output = test_layer(lm_input_tensor, masked_positions) + logit_output = logit_layer(lm_input_tensor, masked_positions) + logit_output = tf.keras.layers.Activation(tf.nn.log_softmax)(logit_output) + logit_layer.set_weights(test_layer.get_weights()) + model = tf.keras.Model([lm_input_tensor, masked_positions], output) + logits_model = tf.keras.Model(([lm_input_tensor, masked_positions]), + logit_output) + + # Invoke the masked LM on some fake data to make sure there are no runtime + # errors in the code. + batch_size = 3 + lm_input_data = 10 * np.random.random_sample( + (batch_size, sequence_length, hidden_size)) + masked_position_data = np.random.randint( + sequence_length, size=(batch_size, num_predictions)) + # ref_outputs = model.predict([lm_input_data, masked_position_data]) + # outputs = logits_model.predict([lm_input_data, masked_position_data]) + ref_outputs = model([lm_input_data, masked_position_data]) + outputs = logits_model([lm_input_data, masked_position_data]) + + # Ensure that the tensor shapes are correct. + expected_output_shape = (batch_size, num_predictions, vocab_size) + self.assertEqual(expected_output_shape, ref_outputs.shape) + self.assertEqual(expected_output_shape, outputs.shape) + self.assertAllClose(ref_outputs, outputs) + + def test_layer_invocation(self): + vocab_size = 100 + sequence_length = 32 + hidden_size = 64 + num_predictions = 21 + test_layer = self.create_layer( + vocab_size=vocab_size, + sequence_length=sequence_length, + hidden_size=hidden_size) + + # Create a model from the masked LM layer. + lm_input_tensor = tf.keras.Input(shape=(sequence_length, hidden_size)) + masked_positions = tf.keras.Input(shape=(num_predictions,), dtype=tf.int32) + output = test_layer(lm_input_tensor, masked_positions) + model = tf.keras.Model([lm_input_tensor, masked_positions], output) + + # Invoke the masked LM on some fake data to make sure there are no runtime + # errors in the code. + batch_size = 3 + lm_input_data = 10 * np.random.random_sample( + (batch_size, sequence_length, hidden_size)) + masked_position_data = np.random.randint( + 2, size=(batch_size, num_predictions)) + _ = model.predict([lm_input_data, masked_position_data]) + + def test_unknown_output_type_fails(self): + with self.assertRaisesRegex(ValueError, 'Unknown `output` value "bad".*'): + _ = self.create_layer( + vocab_size=8, sequence_length=8, hidden_size=8, output='bad') + + +if __name__ == '__main__': + tf.test.main() diff --git a/official/nlp/modeling/losses/weighted_sparse_categorical_crossentropy_test.py b/official/nlp/modeling/losses/weighted_sparse_categorical_crossentropy_test.py index deb4d1207..2fec2a318 100644 --- a/official/nlp/modeling/losses/weighted_sparse_categorical_crossentropy_test.py +++ b/official/nlp/modeling/losses/weighted_sparse_categorical_crossentropy_test.py @@ -23,6 +23,7 @@ import numpy as np import tensorflow as tf from tensorflow.python.keras import keras_parameterized # pylint: disable=g-direct-tensorflow-import +from official.nlp.modeling import layers from official.nlp.modeling import networks from official.nlp.modeling.losses import weighted_sparse_categorical_crossentropy @@ -48,20 +49,18 @@ class ClassificationLossTest(keras_parameterized.TestCase): word_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32) mask = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32) type_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32) - lm_outputs, _ = xformer_stack([word_ids, mask, type_ids]) + _ = xformer_stack([word_ids, mask, type_ids]) # Create a maskedLM from the transformer stack. - test_network = networks.MaskedLM( - num_predictions=num_predictions, - input_width=lm_outputs.shape[-1], - source_network=xformer_stack, + test_layer = layers.MaskedLM( + embedding_table=xformer_stack.get_embedding_table(), output=output) # Create a model from the masked LM layer. lm_input_tensor = tf.keras.Input(shape=(sequence_length, hidden_size)) masked_lm_positions = tf.keras.Input( shape=(num_predictions,), dtype=tf.int32) - output = test_network([lm_input_tensor, masked_lm_positions]) + output = test_layer(lm_input_tensor, masked_positions=masked_lm_positions) return tf.keras.Model([lm_input_tensor, masked_lm_positions], output) def create_classification_model(self, input_width, num_classes): diff --git a/official/nlp/modeling/models/bert_pretrainer.py b/official/nlp/modeling/models/bert_pretrainer.py index 32e9fc671..bce33747f 100644 --- a/official/nlp/modeling/models/bert_pretrainer.py +++ b/official/nlp/modeling/models/bert_pretrainer.py @@ -25,6 +25,7 @@ from typing import List, Optional import gin import tensorflow as tf +from official.nlp.modeling import layers from official.nlp.modeling import networks @@ -47,8 +48,8 @@ class BertPretrainer(tf.keras.Model): num_token_predictions: Number of tokens to predict from the masked LM. embedding_table: Embedding table of a network. If None, the "network.get_embedding_table()" is used. - activation: The activation (if any) to use in the masked LM network. - If None, no activation will be used. + activation: The activation (if any) to use in the masked LM network. If + None, no activation will be used. initializer: The initializer (if any) to use in the masked LM and classification networks. Defaults to a Glorot uniform initializer. output: The output style for this network. Can be either 'logits' or @@ -106,16 +107,16 @@ class BertPretrainer(tf.keras.Model): dtype=tf.int32) inputs.append(masked_lm_positions) - self.masked_lm = networks.MaskedLM( - num_predictions=num_token_predictions, - input_width=sequence_output.shape[-1], - source_network=network, + if embedding_table is None: + embedding_table = self.encoder.get_embedding_table() + self.masked_lm = layers.MaskedLM( embedding_table=embedding_table, activation=activation, initializer=initializer, output=output, - name='masked_lm') - lm_outputs = self.masked_lm([sequence_output, masked_lm_positions]) + name='cls/predictions') + lm_outputs = self.masked_lm( + sequence_output, masked_positions=masked_lm_positions) self.classification = networks.Classification( input_width=cls_output.shape[-1], @@ -126,7 +127,9 @@ class BertPretrainer(tf.keras.Model): sentence_outputs = self.classification(cls_output) super(BertPretrainer, self).__init__( - inputs=inputs, outputs=[lm_outputs, sentence_outputs], **kwargs) + inputs=inputs, + outputs=dict(masked_lm=lm_outputs, classification=sentence_outputs), + **kwargs) def get_config(self): return self._config @@ -151,8 +154,8 @@ class BertPretrainerV2(tf.keras.Model): num_masked_tokens: Number of tokens to predict from the masked LM. encoder_network: A transformer network. This network should output a sequence output and a classification output. - mlm_activation: The activation (if any) to use in the masked LM network. - If None, no activation will be used. + mlm_activation: The activation (if any) to use in the masked LM network. If + None, no activation will be used. mlm_initializer: The initializer (if any) to use in the masked LM. Default to a Glorot uniform initializer. classification_heads: A list of optional head layers to transform on encoder @@ -193,17 +196,18 @@ class BertPretrainerV2(tf.keras.Model): outputs = dict() if num_masked_tokens > 0: - self.masked_lm = networks.MaskedLM( - num_predictions=num_masked_tokens, - input_width=sequence_output.shape[-1], - source_network=self.encoder_network, + self.masked_lm = layers.MaskedLM( + embedding_table=self.encoder_network.get_embedding_table(), activation=mlm_activation, initializer=mlm_initializer, - name='masked_lm') - masked_lm_positions = copy.copy(self.masked_lm.inputs[-1]) + name='cls/predictions') + masked_lm_positions = tf.keras.layers.Input( + shape=(num_masked_tokens,), + name='masked_lm_positions', + dtype=tf.int32) inputs.append(masked_lm_positions) outputs['lm_output'] = self.masked_lm( - [sequence_output, masked_lm_positions]) + sequence_output, masked_positions=masked_lm_positions) for cls_head in self.classification_heads: outputs[cls_head.name] = cls_head(sequence_output) diff --git a/official/nlp/modeling/models/bert_pretrainer_test.py b/official/nlp/modeling/models/bert_pretrainer_test.py index 6828c52b9..eb9ace5cc 100644 --- a/official/nlp/modeling/models/bert_pretrainer_test.py +++ b/official/nlp/modeling/models/bert_pretrainer_test.py @@ -50,16 +50,19 @@ class BertPretrainerTest(keras_parameterized.TestCase): word_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32) mask = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32) type_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32) - lm_mask = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32) + masked_lm_positions = tf.keras.Input( + shape=(num_token_predictions,), dtype=tf.int32) # Invoke the trainer model on the inputs. This causes the layer to be built. - lm_outs, cls_outs = bert_trainer_model([word_ids, mask, type_ids, lm_mask]) + outputs = bert_trainer_model( + [word_ids, mask, type_ids, masked_lm_positions]) # Validate that the outputs are of the expected shape. expected_lm_shape = [None, num_token_predictions, vocab_size] expected_classification_shape = [None, num_classes] - self.assertAllEqual(expected_lm_shape, lm_outs.shape.as_list()) - self.assertAllEqual(expected_classification_shape, cls_outs.shape.as_list()) + self.assertAllEqual(expected_lm_shape, outputs['masked_lm'].shape.as_list()) + self.assertAllEqual(expected_classification_shape, + outputs['classification'].shape.as_list()) def test_bert_trainer_tensor_call(self): """Validate that the Keras object can be invoked.""" @@ -81,7 +84,7 @@ class BertPretrainerTest(keras_parameterized.TestCase): # Invoke the trainer model on the tensors. In Eager mode, this does the # actual calculation. (We can't validate the outputs, since the network is # too complex: this simply ensures we're not hitting runtime errors.) - _, _ = bert_trainer_model([word_ids, mask, type_ids, lm_mask]) + _ = bert_trainer_model([word_ids, mask, type_ids, lm_mask]) def test_serialize_deserialize(self): """Validate that the BERT trainer can be serialized and deserialized.""" @@ -123,7 +126,7 @@ class BertPretrainerTest(keras_parameterized.TestCase): word_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32) mask = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32) type_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32) - lm_mask = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32) + lm_mask = tf.keras.Input(shape=(num_token_predictions,), dtype=tf.int32) # Invoke the trainer model on the inputs. This causes the layer to be built. outputs = bert_trainer_model([word_ids, mask, type_ids, lm_mask]) diff --git a/official/nlp/modeling/networks/README.md b/official/nlp/modeling/networks/README.md index d8e93e26f..42347373e 100644 --- a/official/nlp/modeling/networks/README.md +++ b/official/nlp/modeling/networks/README.md @@ -16,8 +16,6 @@ Self-supervised Learning of Language Representations] (https://arxiv.org/abs/1909.11942). Compared with [BERT](https://arxiv.org/abs/1810.04805), ALBERT refactorizes embedding parameters into two smaller matrices and shares parameters across layers. -* [`MaskedLM`](masked_lm.py) implements a masked language model for BERT pretraining. It assumes that the network being passed has a `get_embedding_table()` method. - * [`Classification`](classification.py) contains a single hidden layer, and is intended for use as a classification or regression (if number of classes is set to 1) head. diff --git a/official/nlp/modeling/networks/__init__.py b/official/nlp/modeling/networks/__init__.py index 9d4826d46..b8443e9f9 100644 --- a/official/nlp/modeling/networks/__init__.py +++ b/official/nlp/modeling/networks/__init__.py @@ -16,7 +16,6 @@ from official.nlp.modeling.networks.albert_transformer_encoder import AlbertTransformerEncoder from official.nlp.modeling.networks.classification import Classification from official.nlp.modeling.networks.encoder_scaffold import EncoderScaffold -from official.nlp.modeling.networks.masked_lm import MaskedLM from official.nlp.modeling.networks.span_labeling import SpanLabeling from official.nlp.modeling.networks.token_classification import TokenClassification from official.nlp.modeling.networks.transformer_encoder import TransformerEncoder diff --git a/official/nlp/modeling/networks/masked_lm.py b/official/nlp/modeling/networks/masked_lm.py deleted file mode 100644 index 19df018cd..000000000 --- a/official/nlp/modeling/networks/masked_lm.py +++ /dev/null @@ -1,189 +0,0 @@ -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Masked language model network.""" -# pylint: disable=g-classes-have-attributes -from __future__ import absolute_import -from __future__ import division -# from __future__ import google_type_annotations -from __future__ import print_function - -import tensorflow as tf - -from official.modeling import tf_utils - - -@tf.keras.utils.register_keras_serializable(package='Text') -class MaskedLM(tf.keras.Model): - """Masked language model network head for BERT modeling. - - This network implements a masked language model based on the provided network. - It assumes that the network being passed has a "get_embedding_table()" method. - - Arguments: - input_width: The innermost dimension of the input tensor to this network. - num_predictions: The number of predictions to make per sequence. - source_network: The network with the embedding layer to use for the - embedding layer. - embedding_table: The embedding table of a source network, If None, the - `source_network.get_embedding_table()` method is used. - activation: The activation, if any, for the dense layer in this network. - initializer: The intializer for the dense layer in this network. Defaults to - a Glorot uniform initializer. - output: The output style for this network. Can be either 'logits' or - 'predictions'. - """ - - def __init__(self, - input_width, - num_predictions, - source_network, - embedding_table=None, - activation=None, - initializer='glorot_uniform', - output='logits', - **kwargs): - - if embedding_table is None: - embedding_table = source_network.get_embedding_table() - vocab_size, hidden_size = embedding_table.shape - - sequence_data = tf.keras.layers.Input( - shape=(None, input_width), name='sequence_data', dtype=tf.float32) - masked_lm_positions = tf.keras.layers.Input( - shape=(num_predictions,), name='masked_lm_positions', dtype=tf.int32) - - masked_lm_input = tf.keras.layers.Lambda( - lambda x: self._gather_indexes(x[0], x[1]))( - [sequence_data, masked_lm_positions]) - lm_data = ( - tf.keras.layers.Dense( - hidden_size, - activation=activation, - kernel_initializer=initializer, - name='cls/predictions/transform/dense')(masked_lm_input)) - lm_data = tf.keras.layers.LayerNormalization( - axis=-1, epsilon=1e-12, name='cls/predictions/transform/LayerNorm')( - lm_data) - lm_data = tf.keras.layers.Lambda( - lambda x: tf.matmul(x, embedding_table, transpose_b=True))( - lm_data) - logits = Bias( - initializer=tf.keras.initializers.Zeros(), - name='cls/predictions/output_bias')( - lm_data) - - # We can't use the standard Keras reshape layer here, since it expects - # the input and output batch size to be the same. - reshape_layer = tf.keras.layers.Lambda( - lambda x: tf.reshape(x, [-1, num_predictions, vocab_size])) - - self.logits = reshape_layer(logits) - predictions = tf.keras.layers.Activation(tf.nn.log_softmax)(self.logits) - - if output == 'logits': - output_tensors = self.logits - elif output == 'predictions': - output_tensors = predictions - else: - raise ValueError( - ('Unknown `output` value "%s". `output` can be either "logits" or ' - '"predictions"') % output) - - super(MaskedLM, self).__init__( - inputs=[sequence_data, masked_lm_positions], - outputs=output_tensors, - **kwargs) - - def get_config(self): - raise NotImplementedError('MaskedLM cannot be directly serialized at this ' - 'time. Please use it only in Layers or ' - 'functionally subclassed Models/Networks.') - - def _gather_indexes(self, sequence_tensor, positions): - """Gathers the vectors at the specific positions. - - Args: - sequence_tensor: Sequence output of `BertModel` layer of shape - (`batch_size`, `seq_length`, num_hidden) where num_hidden is number of - hidden units of `BertModel` layer. - positions: Positions ids of tokens in sequence to mask for pretraining - of with dimension (batch_size, num_predictions) where - `num_predictions` is maximum number of tokens to mask out and predict - per each sequence. - - Returns: - Masked out sequence tensor of shape (batch_size * num_predictions, - num_hidden). - """ - sequence_shape = tf_utils.get_shape_list( - sequence_tensor, name='sequence_output_tensor') - batch_size, seq_length, width = sequence_shape - - flat_offsets = tf.reshape( - tf.range(0, batch_size, dtype=tf.int32) * seq_length, [-1, 1]) - flat_positions = tf.reshape(positions + flat_offsets, [-1]) - flat_sequence_tensor = tf.reshape(sequence_tensor, - [batch_size * seq_length, width]) - output_tensor = tf.gather(flat_sequence_tensor, flat_positions) - - return output_tensor - - -@tf.keras.utils.register_keras_serializable(package='Text') -# Temporary until we can create a Dense layer that ties the embedding. -class Bias(tf.keras.layers.Layer): - """Adds a bias term to an input.""" - - def __init__(self, - initializer='zeros', - regularizer=None, - constraint=None, - activation=None, - **kwargs): - super(Bias, self).__init__(**kwargs) - self._initializer = tf.keras.initializers.get(initializer) - self._regularizer = tf.keras.regularizers.get(regularizer) - self._constraint = tf.keras.constraints.get(constraint) - self._activation = tf.keras.activations.get(activation) - - def build(self, input_shape): - input_shape = tf.TensorShape(input_shape) - self._bias = self.add_weight( - 'bias', - shape=input_shape[1:], - initializer=self._initializer, - regularizer=self._regularizer, - constraint=self._constraint, - dtype=self._dtype, - trainable=True) - - super(Bias, self).build(input_shape) - - def get_config(self): - config = { - 'activation': tf.keras.activations.serialize(self._activation), - 'initializer': tf.keras.initializers.serialize(self._initializer), - 'regularizer': tf.keras.regularizers.serialize(self._regularizer), - 'constraint': tf.keras.constraints.serialize(self._constraint) - } - base_config = super(Bias, self).get_config() - return dict(list(base_config.items()) + list(config.items())) - - def call(self, inputs): - outputs = tf.nn.bias_add(inputs, self._bias) - if self._activation is not None: - return self._activation(outputs) # pylint: disable=not-callable - else: - return outputs diff --git a/official/nlp/modeling/networks/masked_lm_test.py b/official/nlp/modeling/networks/masked_lm_test.py deleted file mode 100644 index 2b7b382cc..000000000 --- a/official/nlp/modeling/networks/masked_lm_test.py +++ /dev/null @@ -1,227 +0,0 @@ -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Tests for masked language model network.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import numpy as np -import tensorflow as tf - -from tensorflow.python.keras import keras_parameterized # pylint: disable=g-direct-tensorflow-import - -from official.nlp.modeling.networks import masked_lm -from official.nlp.modeling.networks import transformer_encoder - - -# This decorator runs the test in V1, V2-Eager, and V2-Functional mode. It -# guarantees forward compatibility of this code for the V2 switchover. -@keras_parameterized.run_all_keras_modes -class MaskedLMTest(keras_parameterized.TestCase): - - def create_network(self, - vocab_size, - sequence_length, - hidden_size, - num_predictions, - output='predictions', - xformer_stack=None): - # First, create a transformer stack that we can use to get the LM's - # vocabulary weight. - if xformer_stack is None: - xformer_stack = transformer_encoder.TransformerEncoder( - vocab_size=vocab_size, - num_layers=1, - sequence_length=sequence_length, - hidden_size=hidden_size, - num_attention_heads=4, - ) - word_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32) - mask = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32) - type_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32) - lm_outputs, _ = xformer_stack([word_ids, mask, type_ids]) - - # Create a maskedLM from the transformer stack. - test_network = masked_lm.MaskedLM( - num_predictions=num_predictions, - input_width=lm_outputs.shape[-1], - source_network=xformer_stack, - output=output) - return test_network - - def test_network_creation(self): - vocab_size = 100 - sequence_length = 32 - hidden_size = 64 - num_predictions = 21 - test_network = self.create_network( - vocab_size=vocab_size, - sequence_length=sequence_length, - hidden_size=hidden_size, - num_predictions=num_predictions) - - # Make sure that the output tensor of the masked LM is the right shape. - lm_input_tensor = tf.keras.Input(shape=(sequence_length, hidden_size)) - masked_lm_positions = tf.keras.Input( - shape=(num_predictions,), dtype=tf.int32) - output = test_network([lm_input_tensor, masked_lm_positions]) - - expected_output_shape = [None, num_predictions, vocab_size] - self.assertEqual(expected_output_shape, output.shape.as_list()) - - def test_network_invocation_with_internal_logits(self): - vocab_size = 100 - sequence_length = 32 - hidden_size = 64 - num_predictions = 21 - test_network = self.create_network( - vocab_size=vocab_size, - sequence_length=sequence_length, - hidden_size=hidden_size, - num_predictions=num_predictions) - - # Create a model from the masked LM layer. - lm_input_tensor = tf.keras.Input(shape=(sequence_length, hidden_size)) - masked_lm_positions = tf.keras.Input( - shape=(num_predictions,), dtype=tf.int32) - output = test_network([lm_input_tensor, masked_lm_positions]) - model = tf.keras.Model([lm_input_tensor, masked_lm_positions], output) - logits_model = tf.keras.Model(test_network.inputs, test_network.logits) - - # Invoke the masked LM on some fake data to make sure there are no runtime - # errors in the code. - batch_size = 3 - lm_input_data = 10 * np.random.random_sample( - (batch_size, sequence_length, hidden_size)) - masked_position_data = np.random.randint( - 2, size=(batch_size, num_predictions)) - outputs = model.predict([lm_input_data, masked_position_data]) - logits = logits_model.predict([lm_input_data, masked_position_data]) - - # Ensure that the tensor shapes are correct. - expected_output_shape = (batch_size, num_predictions, vocab_size) - self.assertEqual(expected_output_shape, outputs.shape) - self.assertEqual(expected_output_shape, logits.shape) - - # Ensure that the logits, when softmaxed, create the outputs. - input_tensor = tf.keras.Input(expected_output_shape[1:]) - output_tensor = tf.keras.layers.Activation(tf.nn.log_softmax)(input_tensor) - softmax_model = tf.keras.Model(input_tensor, output_tensor) - - calculated_softmax = softmax_model.predict(logits) - self.assertAllClose(outputs, calculated_softmax) - - def test_network_invocation_with_external_logits(self): - vocab_size = 100 - sequence_length = 32 - hidden_size = 64 - num_predictions = 21 - xformer_stack = transformer_encoder.TransformerEncoder( - vocab_size=vocab_size, - num_layers=1, - sequence_length=sequence_length, - hidden_size=hidden_size, - num_attention_heads=4, - ) - test_network = self.create_network( - vocab_size=vocab_size, - sequence_length=sequence_length, - hidden_size=hidden_size, - num_predictions=num_predictions, - xformer_stack=xformer_stack, - output='predictions') - logit_network = self.create_network( - vocab_size=vocab_size, - sequence_length=sequence_length, - hidden_size=hidden_size, - num_predictions=num_predictions, - xformer_stack=xformer_stack, - output='logits') - logit_network.set_weights(test_network.get_weights()) - - # Create a model from the masked LM layer. - lm_input_tensor = tf.keras.Input(shape=(sequence_length, hidden_size)) - masked_lm_positions = tf.keras.Input( - shape=(num_predictions,), dtype=tf.int32) - output = test_network([lm_input_tensor, masked_lm_positions]) - logit_output = logit_network([lm_input_tensor, masked_lm_positions]) - - model = tf.keras.Model([lm_input_tensor, masked_lm_positions], output) - logits_model = tf.keras.Model(([lm_input_tensor, masked_lm_positions]), - logit_output) - - # Invoke the masked LM on some fake data to make sure there are no runtime - # errors in the code. - batch_size = 3 - lm_input_data = 10 * np.random.random_sample( - (batch_size, sequence_length, hidden_size)) - masked_position_data = np.random.randint( - 2, size=(batch_size, num_predictions)) - outputs = model.predict([lm_input_data, masked_position_data]) - logits = logits_model.predict([lm_input_data, masked_position_data]) - - # Ensure that the tensor shapes are correct. - expected_output_shape = (batch_size, num_predictions, vocab_size) - self.assertEqual(expected_output_shape, outputs.shape) - self.assertEqual(expected_output_shape, logits.shape) - - # Ensure that the logits, when softmaxed, create the outputs. - input_tensor = tf.keras.Input(expected_output_shape[1:]) - output_tensor = tf.keras.layers.Activation(tf.nn.log_softmax)(input_tensor) - softmax_model = tf.keras.Model(input_tensor, output_tensor) - - calculated_softmax = softmax_model.predict(logits) - self.assertAllClose(outputs, calculated_softmax) - - def test_network_invocation(self): - vocab_size = 100 - sequence_length = 32 - hidden_size = 64 - num_predictions = 21 - test_network = self.create_network( - vocab_size=vocab_size, - sequence_length=sequence_length, - hidden_size=hidden_size, - num_predictions=num_predictions) - - # Create a model from the masked LM layer. - lm_input_tensor = tf.keras.Input(shape=(sequence_length, hidden_size)) - masked_lm_positions = tf.keras.Input( - shape=(num_predictions,), dtype=tf.int32) - output = test_network([lm_input_tensor, masked_lm_positions]) - model = tf.keras.Model([lm_input_tensor, masked_lm_positions], output) - - # Invoke the masked LM on some fake data to make sure there are no runtime - # errors in the code. - batch_size = 3 - lm_input_data = 10 * np.random.random_sample( - (batch_size, sequence_length, hidden_size)) - masked_position_data = np.random.randint( - 2, size=(batch_size, num_predictions)) - _ = model.predict([lm_input_data, masked_position_data]) - - def test_unknown_output_type_fails(self): - with self.assertRaisesRegex(ValueError, 'Unknown `output` value "bad".*'): - _ = self.create_network( - vocab_size=8, - sequence_length=8, - hidden_size=8, - num_predictions=8, - output='bad') - - -if __name__ == '__main__': - tf.test.main() -- GitLab From 166f887c07c0c4b3d0ba3329a93f1b9bc083a54a Mon Sep 17 00:00:00 2001 From: Hongkun Yu Date: Tue, 16 Jun 2020 10:46:00 -0700 Subject: [PATCH 06/79] Make the base task as metaclass and decorate methods not implemented. PiperOrigin-RevId: 316712226 --- official/core/base_task.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/official/core/base_task.py b/official/core/base_task.py index 3dff9e087..f5dfdd4f5 100644 --- a/official/core/base_task.py +++ b/official/core/base_task.py @@ -14,15 +14,18 @@ # limitations under the License. # ============================================================================== """Defines the base task abstraction.""" +import abc import functools from typing import Any, Callable, Optional +import six import tensorflow as tf from official.modeling.hyperparams import config_definitions as cfg from official.utils import registry +@six.add_metaclass(abc.ABCMeta) class Task(tf.Module): """A single-replica view of training procedure. @@ -54,14 +57,13 @@ class Task(tf.Module): """ pass + @abc.abstractmethod def build_model(self) -> tf.keras.Model: """Creates the model architecture. Returns: A model instance. """ - # TODO(hongkuny): the base task should call network factory. - pass def compile_model(self, model: tf.keras.Model, @@ -98,6 +100,7 @@ class Task(tf.Module): model.test_step = functools.partial(validation_step, model=model) return model + @abc.abstractmethod def build_inputs(self, params: cfg.DataConfig, input_context: Optional[tf.distribute.InputContext] = None): @@ -112,7 +115,6 @@ class Task(tf.Module): Returns: A nested structure of per-replica input functions. """ - pass def build_losses(self, labels, model_outputs, aux_losses=None) -> tf.Tensor: """Standard interface to compute losses. -- GitLab From c64cb01b4d957bccd9f47aea6f1ac8e9ffa05236 Mon Sep 17 00:00:00 2001 From: Andrew Audibert Date: Tue, 16 Jun 2020 10:51:57 -0700 Subject: [PATCH 07/79] Add tf_data_service option to ResNet model. Tested by running the model on TPU with a tf.data service running in GKE. PiperOrigin-RevId: 316713637 --- .../image_classification/dataset_factory.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/official/vision/image_classification/dataset_factory.py b/official/vision/image_classification/dataset_factory.py index 89abb00eb..e9dad1268 100644 --- a/official/vision/image_classification/dataset_factory.py +++ b/official/vision/image_classification/dataset_factory.py @@ -100,6 +100,9 @@ class DatasetConfig(base_config.Config): skip_decoding: Whether to skip image decoding when loading from TFDS. cache: whether to cache to dataset examples. Can be used to avoid re-reading from disk on the second epoch. Requires significant memory overhead. + tf_data_service: The URI of a tf.data service to offload preprocessing onto + during training. The URI should be in the format "protocol://address", + e.g. "grpc://tf-data-service:5050". mean_subtract: whether or not to apply mean subtraction to the dataset. standardize: whether or not to apply standardization to the dataset. """ @@ -123,6 +126,7 @@ class DatasetConfig(base_config.Config): file_shuffle_buffer_size: int = 1024 skip_decoding: bool = True cache: bool = False + tf_data_service: Optional[str] = None mean_subtract: bool = False standardize: bool = False @@ -449,6 +453,18 @@ class DatasetBuilder: # Prefetch overlaps in-feed with training dataset = dataset.prefetch(tf.data.experimental.AUTOTUNE) + if self.config.tf_data_service: + if not hasattr(tf.data.experimental, 'service'): + raise ValueError('The tf_data_service flag requires Tensorflow version ' + '>= 2.3.0, but the version is {}'.format( + tf.__version__)) + dataset = dataset.apply( + tf.data.experimental.service.distribute( + processing_mode='parallel_epochs', + service=self.config.tf_data_service, + job_name='resnet_train')) + dataset = dataset.prefetch(buffer_size=tf.data.experimental.AUTOTUNE) + return dataset def parse_record(self, record: tf.Tensor) -> Tuple[tf.Tensor, tf.Tensor]: -- GitLab From 85cfe94da4ca05cdfd6179ea81c0740b0843c62d Mon Sep 17 00:00:00 2001 From: Tianqi Liu Date: Tue, 16 Jun 2020 12:28:39 -0700 Subject: [PATCH 08/79] Internal cleanup. PiperOrigin-RevId: 316734574 --- official/nlp/bert/model_training_utils.py | 69 ++++++++++--------- .../nlp/bert/model_training_utils_test.py | 10 +-- 2 files changed, 44 insertions(+), 35 deletions(-) diff --git a/official/nlp/bert/model_training_utils.py b/official/nlp/bert/model_training_utils.py index a0c15e4d1..7c72f6d76 100644 --- a/official/nlp/bert/model_training_utils.py +++ b/official/nlp/bert/model_training_utils.py @@ -111,6 +111,7 @@ def run_customized_training_loop( model_dir=None, train_input_fn=None, steps_per_epoch=None, + num_eval_per_epoch=1, steps_per_loop=None, epochs=1, eval_input_fn=None, @@ -144,6 +145,7 @@ def run_customized_training_loop( steps_per_epoch: Number of steps to run per epoch. At the end of each epoch, model checkpoint will be saved and evaluation will be conducted if evaluation dataset is provided. + num_eval_per_epoch: Number of evaluations per epoch. steps_per_loop: Number of steps per graph-mode loop. In order to reduce communication in eager context, training logs are printed every steps_per_loop. @@ -166,8 +168,8 @@ def run_customized_training_loop( sub_model_export_name: If not None, will export `sub_model` returned by `model_fn` into checkpoint files. The name of intermediate checkpoint file is {sub_model_export_name}_step_{step}.ckpt and the last - checkpint's name is {sub_model_export_name}.ckpt; - if None, `sub_model` will not be exported as checkpoint. + checkpint's name is {sub_model_export_name}.ckpt; if None, `sub_model` + will not be exported as checkpoint. explicit_allreduce: Whether to explicitly perform gradient allreduce, instead of relying on implicit allreduce in optimizer.apply_gradients(). default is False. For now, if training using FP16 mixed precision, @@ -177,10 +179,10 @@ def run_customized_training_loop( pre_allreduce_callbacks: A list of callback functions that takes gradients and model variables pairs as input, manipulate them, and returns a new gradients and model variables paris. The callback functions will be - invoked in the list order and before gradients are allreduced. - With mixed precision training, the pre_allreduce_allbacks will be - applied on scaled_gradients. Default is no callbacks. - Only used when explicit_allreduce=True. + invoked in the list order and before gradients are allreduced. With + mixed precision training, the pre_allreduce_allbacks will be applied on + scaled_gradients. Default is no callbacks. Only used when + explicit_allreduce=True. post_allreduce_callbacks: A list of callback functions that takes gradients and model variables pairs as input, manipulate them, and returns a new gradients and model variables paris. The callback @@ -208,6 +210,8 @@ def run_customized_training_loop( required_arguments = [ strategy, model_fn, loss_fn, model_dir, steps_per_epoch, train_input_fn ] + + steps_between_evals = int(steps_per_epoch / num_eval_per_epoch) if [arg for arg in required_arguments if arg is None]: raise ValueError('`strategy`, `model_fn`, `loss_fn`, `model_dir`, ' '`steps_per_epoch` and `train_input_fn` are required ' @@ -216,17 +220,17 @@ def run_customized_training_loop( if tf.config.list_logical_devices('TPU'): # One can't fully utilize a TPU with steps_per_loop=1, so in this case # default users to a more useful value. - steps_per_loop = min(1000, steps_per_epoch) + steps_per_loop = min(1000, steps_between_evals) else: steps_per_loop = 1 logging.info('steps_per_loop not specified. Using steps_per_loop=%d', steps_per_loop) - if steps_per_loop > steps_per_epoch: + if steps_per_loop > steps_between_evals: logging.warning( 'steps_per_loop: %d is specified to be greater than ' - ' steps_per_epoch: %d, we will use steps_per_epoch as' - ' steps_per_loop.', steps_per_loop, steps_per_epoch) - steps_per_loop = steps_per_epoch + ' steps_between_evals: %d, we will use steps_between_evals as' + ' steps_per_loop.', steps_per_loop, steps_between_evals) + steps_per_loop = steps_between_evals assert tf.executing_eagerly() if run_eagerly: @@ -246,8 +250,7 @@ def run_customized_training_loop( total_training_steps = steps_per_epoch * epochs train_iterator = _get_input_iterator(train_input_fn, strategy) - eval_loss_metric = tf.keras.metrics.Mean( - 'training_loss', dtype=tf.float32) + eval_loss_metric = tf.keras.metrics.Mean('training_loss', dtype=tf.float32) with distribution_utils.get_strategy_scope(strategy): # To correctly place the model weights on accelerators, @@ -270,8 +273,7 @@ def run_customized_training_loop( checkpoint.restore(init_checkpoint).assert_existing_objects_matched() logging.info('Loading from checkpoint file completed') - train_loss_metric = tf.keras.metrics.Mean( - 'training_loss', dtype=tf.float32) + train_loss_metric = tf.keras.metrics.Mean('training_loss', dtype=tf.float32) eval_metrics = [metric_fn()] if metric_fn else [] # If evaluation is required, make a copy of metric as it will be used by # both train and evaluation. @@ -440,18 +442,19 @@ def run_customized_training_loop( latest_checkpoint_file = tf.train.latest_checkpoint(model_dir) if latest_checkpoint_file: - logging.info( - 'Checkpoint file %s found and restoring from ' - 'checkpoint', latest_checkpoint_file) + logging.info('Checkpoint file %s found and restoring from ' + 'checkpoint', latest_checkpoint_file) checkpoint.restore(latest_checkpoint_file) logging.info('Loading from checkpoint file completed') current_step = optimizer.iterations.numpy() checkpoint_name = 'ctl_step_{step}.ckpt' + logs = {} while current_step < total_training_steps: if current_step % steps_per_epoch == 0: - callback_list.on_epoch_begin(int(current_step / steps_per_epoch) + 1) + callback_list.on_epoch_begin( + int(current_step / steps_per_epoch) + 1) # Training loss/metric are taking average over steps inside micro # training loop. We reset the their values before each round. @@ -461,7 +464,7 @@ def run_customized_training_loop( callback_list.on_batch_begin(current_step) # Runs several steps in the host while loop. - steps = steps_to_run(current_step, steps_per_epoch, steps_per_loop) + steps = steps_to_run(current_step, steps_between_evals, steps_per_loop) if tf.config.list_physical_devices('GPU'): # TODO(zongweiz): merge with train_steps once tf.while_loop @@ -470,11 +473,9 @@ def run_customized_training_loop( train_single_step(train_iterator) else: # Converts steps to a Tensor to avoid tf.function retracing. - train_steps(train_iterator, - tf.convert_to_tensor(steps, dtype=tf.int32)) + train_steps(train_iterator, tf.convert_to_tensor(steps, dtype=tf.int32)) train_loss = _float_metric_value(train_loss_metric) current_step += steps - callback_list.on_batch_end(current_step - 1, {'loss': train_loss}) # Updates training logging. training_status = 'Train Step: %d/%d / loss = %s' % ( @@ -492,8 +493,7 @@ def run_customized_training_loop( 'learning_rate', optimizer.learning_rate(current_step), step=current_step) - tf.summary.scalar( - train_loss_metric.name, train_loss, step=current_step) + tf.summary.scalar(train_loss_metric.name, train_loss, step=current_step) for metric in train_metrics + model.metrics: metric_value = _float_metric_value(metric) training_status += ' %s = %f' % (metric.name, metric_value) @@ -501,7 +501,11 @@ def run_customized_training_loop( summary_writer.flush() logging.info(training_status) - if current_step % steps_per_epoch == 0: + # If no need for evaluation, we only call on_batch_end with train_loss, + # this is to ensure we get granular global_step/sec on Tensorboard. + if current_step % steps_between_evals: + callback_list.on_batch_end(current_step - 1, {'loss': train_loss}) + else: # Save a submodel with the step in the file name after each epoch. if sub_model_export_name: _save_checkpoint( @@ -514,7 +518,6 @@ def run_customized_training_loop( if current_step < total_training_steps: _save_checkpoint(strategy, checkpoint, model_dir, checkpoint_name.format(step=current_step)) - logs = None if eval_input_fn: logging.info('Running evaluation after step: %s.', current_step) logs = _run_evaluation(current_step, @@ -523,8 +526,15 @@ def run_customized_training_loop( eval_loss_metric.reset_states() for metric in eval_metrics + model.metrics: metric.reset_states() + # We add train_loss here rather than call on_batch_end twice to make + # sure that no duplicated values are generated. + logs['loss'] = train_loss + callback_list.on_batch_end(current_step - 1, logs) - callback_list.on_epoch_end(int(current_step / steps_per_epoch), logs) + # Calls on_epoch_end after each real epoch ends to prevent mis-calculation + # of training steps. + if current_step % steps_per_epoch == 0: + callback_list.on_epoch_end(int(current_step / steps_per_epoch), logs) if sub_model_export_name: _save_checkpoint(strategy, sub_model_checkpoint, model_dir, @@ -532,14 +542,11 @@ def run_customized_training_loop( _save_checkpoint(strategy, checkpoint, model_dir, checkpoint_name.format(step=current_step)) - logs = None if eval_input_fn: logging.info('Running final evaluation after training is complete.') logs = _run_evaluation(current_step, _get_input_iterator(eval_input_fn, strategy)) - callback_list.on_epoch_end(int(current_step / steps_per_epoch), logs) - training_summary = { 'total_training_steps': total_training_steps, 'train_loss': _float_metric_value(train_loss_metric), diff --git a/official/nlp/bert/model_training_utils_test.py b/official/nlp/bert/model_training_utils_test.py index 9a805ca59..4c85a6c9b 100644 --- a/official/nlp/bert/model_training_utils_test.py +++ b/official/nlp/bert/model_training_utils_test.py @@ -258,6 +258,7 @@ class ModelTrainingUtilsTest(tf.test.TestCase, parameterized.TestCase): loss_fn=tf.keras.losses.categorical_crossentropy, model_dir=model_dir, steps_per_epoch=20, + num_eval_per_epoch=4, steps_per_loop=10, epochs=2, train_input_fn=input_fn, @@ -269,14 +270,15 @@ class ModelTrainingUtilsTest(tf.test.TestCase, parameterized.TestCase): run_eagerly=False) self.assertEqual(callback.epoch_begin, [(1, {}), (2, {})]) epoch_ends, epoch_end_infos = zip(*callback.epoch_end) - self.assertEqual(list(epoch_ends), [1, 2]) + self.assertEqual(list(epoch_ends), [1, 2, 2]) for info in epoch_end_infos: self.assertIn('accuracy', info) - self.assertEqual(callback.batch_begin, - [(0, {}), (10, {}), (20, {}), (30, {})]) + self.assertEqual(callback.batch_begin, [(0, {}), (5, {}), (10, {}), + (15, {}), (20, {}), (25, {}), + (30, {}), (35, {})]) batch_ends, batch_end_infos = zip(*callback.batch_end) - self.assertEqual(list(batch_ends), [9, 19, 29, 39]) + self.assertEqual(list(batch_ends), [4, 9, 14, 19, 24, 29, 34, 39]) for info in batch_end_infos: self.assertIn('loss', info) -- GitLab From 43587c64f9e25467c17394504a327d90e05b865c Mon Sep 17 00:00:00 2001 From: Hongkun Yu Date: Tue, 16 Jun 2020 16:54:44 -0700 Subject: [PATCH 09/79] Internal change PiperOrigin-RevId: 316784919 --- official/nlp/tasks/sentence_prediction.py | 30 +++++++++---------- .../nlp/tasks/sentence_prediction_test.py | 19 +++++++++++- 2 files changed, 32 insertions(+), 17 deletions(-) diff --git a/official/nlp/tasks/sentence_prediction.py b/official/nlp/tasks/sentence_prediction.py index a7d9193b1..beebbdbad 100644 --- a/official/nlp/tasks/sentence_prediction.py +++ b/official/nlp/tasks/sentence_prediction.py @@ -29,9 +29,9 @@ from official.nlp.modeling import losses as loss_lib @dataclasses.dataclass class SentencePredictionConfig(cfg.TaskConfig): """The model config.""" - # At most one of `pretrain_checkpoint_dir` and `hub_module_url` can + # At most one of `init_checkpoint` and `hub_module_url` can # be specified. - pretrain_checkpoint_dir: str = '' + init_checkpoint: str = '' hub_module_url: str = '' network: bert.BertPretrainerConfig = bert.BertPretrainerConfig( num_masked_tokens=0, @@ -52,7 +52,7 @@ class SentencePredictionTask(base_task.Task): def __init__(self, params=cfg.TaskConfig): super(SentencePredictionTask, self).__init__(params) - if params.hub_module_url and params.pretrain_checkpoint_dir: + if params.hub_module_url and params.init_checkpoint: raise ValueError('At most one of `hub_module_url` and ' '`pretrain_checkpoint_dir` can be specified.') if params.hub_module_url: @@ -82,8 +82,8 @@ class SentencePredictionTask(base_task.Task): def build_losses(self, labels, model_outputs, aux_losses=None) -> tf.Tensor: loss = loss_lib.weighted_sparse_categorical_crossentropy_loss( labels=labels, - predictions=tf.nn.log_softmax(model_outputs['sentence_prediction'], - axis=-1)) + predictions=tf.nn.log_softmax( + model_outputs['sentence_prediction'], axis=-1)) if aux_losses: loss += tf.add_n(aux_losses) @@ -92,6 +92,7 @@ class SentencePredictionTask(base_task.Task): def build_inputs(self, params, input_context=None): """Returns tf.data.Dataset for sentence_prediction task.""" if params.input_path == 'dummy': + def dummy_data(_): dummy_ids = tf.zeros((1, params.seq_length), dtype=tf.int32) x = dict( @@ -112,9 +113,7 @@ class SentencePredictionTask(base_task.Task): def build_metrics(self, training=None): del training - metrics = [ - tf.keras.metrics.SparseCategoricalAccuracy(name='cls_accuracy') - ] + metrics = [tf.keras.metrics.SparseCategoricalAccuracy(name='cls_accuracy')] return metrics def process_metrics(self, metrics, labels, model_outputs): @@ -126,8 +125,10 @@ class SentencePredictionTask(base_task.Task): def initialize(self, model): """Load a pretrained checkpoint (if exists) and then train from iter 0.""" - pretrain_ckpt_dir = self.task_config.pretrain_checkpoint_dir - if not pretrain_ckpt_dir: + ckpt_dir_or_file = self.task_config.init_checkpoint + if tf.io.gfile.isdir(ckpt_dir_or_file): + ckpt_dir_or_file = tf.train.latest_checkpoint(ckpt_dir_or_file) + if not ckpt_dir_or_file: return pretrain2finetune_mapping = { @@ -137,10 +138,7 @@ class SentencePredictionTask(base_task.Task): model.checkpoint_items['sentence_prediction.pooler_dense'], } ckpt = tf.train.Checkpoint(**pretrain2finetune_mapping) - latest_pretrain_ckpt = tf.train.latest_checkpoint(pretrain_ckpt_dir) - if latest_pretrain_ckpt is None: - raise FileNotFoundError( - 'Cannot find pretrain checkpoint under {}'.format(pretrain_ckpt_dir)) - status = ckpt.restore(latest_pretrain_ckpt) + status = ckpt.restore(ckpt_dir_or_file) status.expect_partial().assert_existing_objects_matched() - logging.info('finished loading pretrained checkpoint.') + logging.info('finished loading pretrained checkpoint from %s', + ckpt_dir_or_file) diff --git a/official/nlp/tasks/sentence_prediction_test.py b/official/nlp/tasks/sentence_prediction_test.py index c52619490..e68db0a1a 100644 --- a/official/nlp/tasks/sentence_prediction_test.py +++ b/official/nlp/tasks/sentence_prediction_test.py @@ -43,8 +43,10 @@ class SentencePredictionTaskTest(tf.test.TestCase): def test_task(self): config = sentence_prediction.SentencePredictionConfig( + init_checkpoint=self.get_temp_dir(), network=bert.BertPretrainerConfig( - encoders.TransformerEncoderConfig(vocab_size=30522, num_layers=1), + encoder=encoders.TransformerEncoderConfig( + vocab_size=30522, num_layers=1), num_masked_tokens=0, cls_heads=[ bert.ClsHeadConfig( @@ -62,6 +64,21 @@ class SentencePredictionTaskTest(tf.test.TestCase): task.train_step(next(iterator), model, optimizer, metrics=metrics) task.validation_step(next(iterator), model, metrics=metrics) + # Saves a checkpoint. + pretrain_cfg = bert.BertPretrainerConfig( + encoder=encoders.TransformerEncoderConfig( + vocab_size=30522, num_layers=1), + num_masked_tokens=20, + cls_heads=[ + bert.ClsHeadConfig( + inner_dim=10, num_classes=3, name="next_sentence") + ]) + pretrain_model = bert.instantiate_from_cfg(pretrain_cfg) + ckpt = tf.train.Checkpoint( + model=pretrain_model, **pretrain_model.checkpoint_items) + ckpt.save(config.init_checkpoint) + task.initialize(model) + def _export_bert_tfhub(self): bert_config = configs.BertConfig( vocab_size=30522, -- GitLab From 0ab249df5d3db9059507061953d9336a076b6ff2 Mon Sep 17 00:00:00 2001 From: Mark Daoust Date: Tue, 16 Jun 2020 16:57:17 -0700 Subject: [PATCH 10/79] Add explaination and examples to `fine_tune_bert.ipynb` PiperOrigin-RevId: 316785333 --- official/colab/fine_tuning_bert.ipynb | 1740 +++++++++++++++++++++---- 1 file changed, 1509 insertions(+), 231 deletions(-) diff --git a/official/colab/fine_tuning_bert.ipynb b/official/colab/fine_tuning_bert.ipynb index 4490c91ae..443674b6b 100644 --- a/official/colab/fine_tuning_bert.ipynb +++ b/official/colab/fine_tuning_bert.ipynb @@ -4,64 +4,79 @@ "cell_type": "markdown", "metadata": { "colab_type": "text", - "id": "YN2ACivEPxgD" + "id": "vXLA5InzXydn" }, "source": [ - "## How-to Guide: Using a PIP package for fine-tuning a BERT model\n", - "\n", - "Authors: [Chen Chen](https://github.com/chenGitHuber), [Claire Yao](https://github.com/claireyao-fen)\n", - "\n", - "In this example, we will work through fine-tuning a BERT model using the tensorflow-models PIP package." + "##### Copyright 2019 The TensorFlow Authors." + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "cellView": "form", + "colab": {}, + "colab_type": "code", + "id": "RuRlpLL-X0R_" + }, + "outputs": [], + "source": [ + "#@title Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# https://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." ] }, { "cell_type": "markdown", "metadata": { "colab_type": "text", - "id": "T7BBEc1-RNCQ" + "id": "1mLJmVotXs64" }, "source": [ - "## License\n", - "\n", - "Copyright 2020 The TensorFlow Authors. All Rights Reserved.\n", - "\n", - "Licensed under the Apache License, Version 2.0 (the \"License\");\n", - "you may not use this file except in compliance with the License.\n", - "You may obtain a copy of the License at\n", - "\n", - " http://www.apache.org/licenses/LICENSE-2.0\n", - "\n", - "Unless required by applicable law or agreed to in writing, software\n", - "distributed under the License is distributed on an \"AS IS\" BASIS,\n", - "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", - "See the License for the specific language governing permissions and\n", - "limitations under the License." + "# Fine-tuning a BERT model" ] }, { "cell_type": "markdown", "metadata": { "colab_type": "text", - "id": "Pf6xzoKjywY_" + "id": "hYEwGTeCXnnX" }, "source": [ - "## Learning objectives\n", - "\n", - "In this Colab notebook, you will learn how to fine-tune a BERT model using the TensorFlow Model Garden PIP package." + "\u003ctable class=\"tfo-notebook-buttons\" align=\"left\"\u003e\n", + " \u003ctd\u003e\n", + " \u003ca target=\"_blank\" href=\"https://www.tensorflow.org/official_models/tutorials/fine_tune_bert.ipynb\"\u003e\u003cimg src=\"https://www.tensorflow.org/images/tf_logo_32px.png\" /\u003eView on TensorFlow.org\u003c/a\u003e\n", + " \u003c/td\u003e\n", + " \u003ctd\u003e\n", + " \u003ca target=\"_blank\" href=\"https://colab.research.google.com/github/tensorflow/models/blob/master/official/colab/fine_tuning_bert.ipynb\"\u003e\u003cimg src=\"https://www.tensorflow.org/images/colab_logo_32px.png\" /\u003eRun in Google Colab\u003c/a\u003e\n", + " \u003c/td\u003e\n", + " \u003ctd\u003e\n", + " \u003ca target=\"_blank\" href=\"https://github.com/tensorflow/models/blob/master/official/colab/fine_tuning_bert.ipynb\"\u003e\u003cimg src=\"https://www.tensorflow.org/images/GitHub-Mark-32px.png\" /\u003eView source on GitHub\u003c/a\u003e\n", + " \u003c/td\u003e\n", + " \u003ctd\u003e\n", + " \u003ca href=\"https://storage.googleapis.com/tensorflow_docs/models/official/colab/fine_tuning_bert.ipynb\"\u003e\u003cimg src=\"https://www.tensorflow.org/images/download_logo_32px.png\" /\u003eDownload notebook\u003c/a\u003e\n", + " \u003c/td\u003e\n", + "\u003c/table\u003e" ] }, { "cell_type": "markdown", "metadata": { "colab_type": "text", - "id": "YHkmV89jRWkS" + "id": "YN2ACivEPxgD" }, "source": [ - "## Enable the GPU acceleration\n", - "Please enable GPU for better performance.\n", - "* Navigate to Edit.\n", - "* Find Notebook settings.\n", - "* Select GPU from the \"Hardware Accelerator\" drop-down list, save it." + "In this example, we will work through fine-tuning a BERT model using the tensorflow-models PIP package.\n", + "\n", + "The pretrained BERT model this tutorial is based on is also available on [TensorFlow Hub](https://tensorflow.org/hub), to see how to use it refer to the [Hub Appendix](#hub_bert)" ] }, { @@ -71,7 +86,7 @@ "id": "s2d9S2CSSO1z" }, "source": [ - "##Install and import" + "## Setup" ] }, { @@ -83,7 +98,7 @@ "source": [ "### Install the TensorFlow Model Garden pip package\n", "\n", - "* tf-models-nightly is the nightly Model Garden package created daily automatically.\n", + "* `tf-models-nightly` is the nightly Model Garden package created daily automatically.\n", "* pip will install all models and dependencies automatically." ] }, @@ -97,7 +112,8 @@ }, "outputs": [], "source": [ - "!pip install tf-models-nightly" + "!pip install -q tf-nightly\n", + "!pip install -q tf-models-nightly" ] }, { @@ -107,7 +123,7 @@ "id": "U-7qPCjWUAyy" }, "source": [ - "### Import Tensorflow and other libraries" + "### Imports" ] }, { @@ -123,67 +139,176 @@ "import os\n", "\n", "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "\n", "import tensorflow as tf\n", "\n", + "import tensorflow_hub as hub\n", + "import tensorflow_datasets as tfds\n", + "tfds.disable_progress_bar()\n", + "\n", "from official.modeling import tf_utils\n", - "from official.nlp import optimization\n", - "from official.nlp.bert import configs as bert_configs\n", - "from official.nlp.bert import tokenization\n", - "from official.nlp.data import classifier_data_lib\n", - "from official.nlp.modeling import losses\n", - "from official.nlp.modeling import models\n", - "from official.nlp.modeling import networks" + "from official import nlp\n", + "from official.nlp import bert\n", + "\n", + "# Load the required submodules\n", + "import official.nlp.optimization\n", + "import official.nlp.bert.bert_models\n", + "import official.nlp.bert.configs\n", + "import official.nlp.bert.run_classifier\n", + "import official.nlp.bert.tokenization\n", + "import official.nlp.data.classifier_data_lib\n", + "import official.nlp.modeling.losses\n", + "import official.nlp.modeling.models\n", + "import official.nlp.modeling.networks" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "mbanlzTvJBsz" + }, + "source": [ + "### Resources" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "PpW0x8TpR8DT" + }, + "source": [ + "This directory contains the configuration, vocabulary, and a pre-trained checkpoint used in this tutorial:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "vzRHOLciR8eq" + }, + "outputs": [], + "source": [ + "gs_folder_bert = \"gs://cloud-tpu-checkpoints/bert/keras_bert/uncased_L-12_H-768_A-12\"\n", + "tf.io.gfile.listdir(gs_folder_bert)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "9uFskufsR2LT" + }, + "source": [ + "You can get a pre-trained BERT encoder from TensorFlow Hub here:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "e0dAkUttJAzj" + }, + "outputs": [], + "source": [ + "hub_url_bert = \"https://tfhub.dev/tensorflow/bert_en_uncased_L-12_H-768_A-12/2\"" ] }, { "cell_type": "markdown", "metadata": { "colab_type": "text", - "id": "C2drjD7OVCmh" + "id": "Qv6abtRvH4xO" }, "source": [ - "## Preprocess the raw data and output tf.record files" + "## The data\n", + "For this example we used the [GLUE MRPC dataset from TFDS](https://www.tensorflow.org/datasets/catalog/glue#gluemrpc).\n", + "\n", + "This dataset is not set up so that it can be directly fed into the BERT model, so this section also handles the necessary preprocessing." ] }, { "cell_type": "markdown", "metadata": { "colab_type": "text", - "id": "qfjcKj5FYQOp" + "id": "28DvUhC1YUiB" }, "source": [ - "### Introduction of dataset\n", + "### Get the dataset from TensorFlow Datasets\n", "\n", "The Microsoft Research Paraphrase Corpus (Dolan \u0026 Brockett, 2005) is a corpus of sentence pairs automatically extracted from online news sources, with human annotations for whether the sentences in the pair are semantically equivalent.\n", "\n", "* Number of labels: 2.\n", "* Size of training dataset: 3668.\n", "* Size of evaluation dataset: 408.\n", - "* Maximum sequence length of training and evaluation dataset: 128.\n", - "* Please refer here for details: https://www.tensorflow.org/datasets/catalog/glue#gluemrpc" + "* Maximum sequence length of training and evaluation dataset: 128.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "Ijikx5OsH9AT" + }, + "outputs": [], + "source": [ + "glue, info = tfds.load('glue/mrpc', with_info=True,\n", + " # It's small, load the whole dataset\n", + " batch_size=-1)" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "xf9zz4vLYXjr" + }, + "outputs": [], + "source": [ + "list(glue.keys())" ] }, { "cell_type": "markdown", "metadata": { "colab_type": "text", - "id": "28DvUhC1YUiB" + "id": "ZgBg2r2nYT-K" }, "source": [ - "### Get dataset from TensorFlow Datasets (TFDS)\n", - "\n", - "For example, we used the GLUE MRPC dataset from TFDS: https://www.tensorflow.org/datasets/catalog/glue#gluemrpc." + "The `info` object describes the dataset and it's features:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "IQrHxv7W7jH5" + }, + "outputs": [], + "source": [ + "info.features" ] }, { "cell_type": "markdown", "metadata": { "colab_type": "text", - "id": "4PhRLWh9jaXp" + "id": "vhsVWYNxazz5" }, "source": [ - "### Preprocess the data and write to TensorFlow record file\n", - "\n" + "The two classes are:" ] }, { @@ -192,43 +317,21 @@ "metadata": { "colab": {}, "colab_type": "code", - "id": "FhcMdzsrjWzG" + "id": "n0gfc_VTayfQ" }, "outputs": [], "source": [ - "gs_folder_bert = \"gs://cloud-tpu-checkpoints/bert/keras_bert/uncased_L-12_H-768_A-12\"\n", - "\n", - "# Set up tokenizer to generate Tensorflow dataset\n", - "tokenizer = tokenization.FullTokenizer(\n", - " vocab_file=os.path.join(gs_folder_bert, \"vocab.txt\"), do_lower_case=True)\n", - "\n", - "# Set up processor to generate Tensorflow dataset\n", - "processor = classifier_data_lib.TfdsProcessor(\n", - " tfds_params=\"dataset=glue/mrpc,text_key=sentence1,text_b_key=sentence2\",\n", - " process_text_fn=tokenization.convert_to_unicode)\n", - "\n", - "# Set up output of training and evaluation Tensorflow dataset\n", - "train_data_output_path=\"./mrpc_train.tf_record\"\n", - "eval_data_output_path=\"./mrpc_eval.tf_record\"\n", - "\n", - "# Generate and save training data into a tf record file\n", - "input_meta_data = classifier_data_lib.generate_tf_record_from_data_file(\n", - " processor=processor,\n", - " data_dir=None, # It is `None` because data is from tfds, not local dir.\n", - " tokenizer=tokenizer,\n", - " train_data_output_path=train_data_output_path,\n", - " eval_data_output_path=eval_data_output_path,\n", - " max_seq_length=128)" + "info.features['label'].names" ] }, { "cell_type": "markdown", "metadata": { "colab_type": "text", - "id": "dbJ76vSJj77j" + "id": "38zJcap6xkbC" }, "source": [ - "### Create tf.dataset for training and evaluation\n" + "Here is one example from the training set:" ] }, { @@ -237,82 +340,38 @@ "metadata": { "colab": {}, "colab_type": "code", - "id": "gCvaLLAxPuMc" + "id": "xON_i6SkwApW" }, "outputs": [], "source": [ - "def create_classifier_dataset(file_path, seq_length, batch_size, is_training):\n", - " \"\"\"Creates input dataset from (tf)records files for train/eval.\"\"\"\n", - " dataset = tf.data.TFRecordDataset(file_path)\n", - " if is_training:\n", - " dataset = dataset.shuffle(100)\n", - " dataset = dataset.repeat()\n", - "\n", - " def decode_record(record):\n", - " name_to_features = {\n", - " 'input_ids': tf.io.FixedLenFeature([seq_length], tf.int64),\n", - " 'input_mask': tf.io.FixedLenFeature([seq_length], tf.int64),\n", - " 'segment_ids': tf.io.FixedLenFeature([seq_length], tf.int64),\n", - " 'label_ids': tf.io.FixedLenFeature([], tf.int64),\n", - " }\n", - " return tf.io.parse_single_example(record, name_to_features)\n", - "\n", - " def _select_data_from_record(record):\n", - " x = {\n", - " 'input_word_ids': record['input_ids'],\n", - " 'input_mask': record['input_mask'],\n", - " 'input_type_ids': record['segment_ids']\n", - " }\n", - " y = record['label_ids']\n", - " return (x, y)\n", - "\n", - " dataset = dataset.map(decode_record,\n", - " num_parallel_calls=tf.data.experimental.AUTOTUNE)\n", - " dataset = dataset.map(\n", - " _select_data_from_record,\n", - " num_parallel_calls=tf.data.experimental.AUTOTUNE)\n", - " dataset = dataset.batch(batch_size, drop_remainder=is_training)\n", - " dataset = dataset.prefetch(tf.data.experimental.AUTOTUNE)\n", - " return dataset\n", - "\n", - "# Set up batch sizes\n", - "batch_size = 32\n", - "eval_batch_size = 32\n", - "\n", - "# Return Tensorflow dataset\n", - "training_dataset = create_classifier_dataset(\n", - " train_data_output_path,\n", - " input_meta_data['max_seq_length'],\n", - " batch_size,\n", - " is_training=True)\n", + "glue_train = glue['train']\n", "\n", - "evaluation_dataset = create_classifier_dataset(\n", - " eval_data_output_path,\n", - " input_meta_data['max_seq_length'],\n", - " eval_batch_size,\n", - " is_training=False)\n" + "for key, value in glue_train.items():\n", + " print(f\"{key:9s}: {value[0].numpy()}\")" ] }, { "cell_type": "markdown", "metadata": { "colab_type": "text", - "id": "Efrj3Cn1kLAp" + "id": "9fbTyfJpNr7x" }, "source": [ - "## Create, compile and train the model" + "### The BERT tokenizer" ] }, { "cell_type": "markdown", "metadata": { "colab_type": "text", - "id": "96ldxDSwkVkj" + "id": "wqeN54S61ZKQ" }, "source": [ - "### Construct a Bert Model\n", + "To fine tune a pre-trained model you need to be sure that you're using exactly the same tokenization, vocabulary, and index mapping as you used during training.\n", "\n", - "Here, a Bert Model is constructed from the json file with parameters. The bert_config defines the core Bert Model, which is a Keras model to predict the outputs of *num_classes* from the inputs with maximum sequence length *max_seq_length*. " + "The BERT tokenizer used in this tutorial is written in pure Python (It's not built out of TensorFlow ops). So you can't just plug it into your model as a `keras.layer` like you can with `preprocessing.TextVectorization`.\n", + "\n", + "The following code rebuilds the tokenizer that was used by the base model:" ] }, { @@ -321,44 +380,26 @@ "metadata": { "colab": {}, "colab_type": "code", - "id": "Qgajw8WPYzJZ" + "id": "idxyhmrCQcw5" }, "outputs": [], "source": [ - "bert_config_file = os.path.join(gs_folder_bert, \"bert_config.json\")\n", - "bert_config = bert_configs.BertConfig.from_json_file(bert_config_file)\n", - "\n", - "bert_encoder = networks.TransformerEncoder(vocab_size=bert_config.vocab_size,\n", - " hidden_size=bert_config.hidden_size,\n", - " num_layers=bert_config.num_hidden_layers,\n", - " num_attention_heads=bert_config.num_attention_heads,\n", - " intermediate_size=bert_config.intermediate_size,\n", - " activation=tf_utils.get_activation(bert_config.hidden_act),\n", - " dropout_rate=bert_config.hidden_dropout_prob,\n", - " attention_dropout_rate=bert_config.attention_probs_dropout_prob,\n", - " sequence_length=input_meta_data['max_seq_length'],\n", - " max_sequence_length=bert_config.max_position_embeddings,\n", - " type_vocab_size=bert_config.type_vocab_size,\n", - " embedding_width=bert_config.embedding_size,\n", - " initializer=tf.keras.initializers.TruncatedNormal(\n", - " stddev=bert_config.initializer_range))\n", - "\n", - "classifier_model = models.BertClassifier(\n", - " bert_encoder,\n", - " num_classes=input_meta_data['num_labels'],\n", - " dropout_rate=bert_config.hidden_dropout_prob,\n", - " initializer=tf.keras.initializers.TruncatedNormal(\n", - " stddev=bert_config.initializer_range))" + "# Set up tokenizer to generate Tensorflow dataset\n", + "tokenizer = bert.tokenization.FullTokenizer(\n", + " vocab_file=os.path.join(gs_folder_bert, \"vocab.txt\"),\n", + " do_lower_case=True)\n", + "\n", + "print(\"Vocab size:\", len(tokenizer.vocab))" ] }, { "cell_type": "markdown", "metadata": { "colab_type": "text", - "id": "pkSq1wbNXBaa" + "id": "zYHDSquU2lDU" }, "source": [ - "### Initialize the encoder from a pretrained model" + "Tokenize a sentence:" ] }, { @@ -367,26 +408,40 @@ "metadata": { "colab": {}, "colab_type": "code", - "id": "X6N9NEqfXJCx" + "id": "L_OfOYPg853R" }, "outputs": [], "source": [ - "checkpoint = tf.train.Checkpoint(model=bert_encoder)\n", - "checkpoint.restore(\n", - " os.path.join(gs_folder_bert, 'bert_model.ckpt')).assert_consumed()" + "tokens = tokenizer.tokenize(\"Hello TensorFlow!\")\n", + "print(tokens)\n", + "ids = tokenizer.convert_tokens_to_ids(tokens)\n", + "print(ids)" ] }, { "cell_type": "markdown", "metadata": { "colab_type": "text", - "id": "115caFLMk-_l" + "id": "kkAXLtuyWWDI" }, "source": [ - "### Set up an optimizer for the model\n", + "### Preprocess the data\n", "\n", - "BERT model adopts the Adam optimizer with weight decay.\n", - "It also employs a learning rate schedule that firstly warms up from 0 and then decays to 0." + "The section manually preprocessed the dataset into the format expected by the model.\n", + "\n", + "This dataset is small, so preprocessing can be done quickly and easily in memory. For larger datasets the `tf_models` library includes some tools for preprocessing and re-serializing a dataset. See [Appendix: Re-encoding a large dataset](#re_encoding_tools) for details." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "62UTWLQd9-LB" + }, + "source": [ + "#### Encode the sentences\n", + "\n", + "The model expects its two inputs sentences to be concatenated together. This input is expected to start with a `[CLS]` \"This is a classification problem\" token, and each sentence should end with a `[SEP]` \"Separator\" token:" ] }, { @@ -395,45 +450,21 @@ "metadata": { "colab": {}, "colab_type": "code", - "id": "2Hf2rpRXk89N" + "id": "bdL-dRNRBRJT" }, "outputs": [], "source": [ - "# Set up epochs and steps\n", - "epochs = 3\n", - "train_data_size = input_meta_data['train_data_size']\n", - "steps_per_epoch = int(train_data_size / batch_size)\n", - "num_train_steps = steps_per_epoch * epochs\n", - "warmup_steps = int(epochs * train_data_size * 0.1 / batch_size)\n", - "\n", - "# Create learning rate schedule that firstly warms up from 0 and they decy to 0.\n", - "lr_schedule = tf.keras.optimizers.schedules.PolynomialDecay(\n", - " initial_learning_rate=2e-5,\n", - " decay_steps=num_train_steps,\n", - " end_learning_rate=0)\n", - "lr_schedule = optimization.WarmUp(\n", - " initial_learning_rate=2e-5,\n", - " decay_schedule_fn=lr_schedule,\n", - " warmup_steps=warmup_steps)\n", - "optimizer = optimization.AdamWeightDecay(\n", - " learning_rate=lr_schedule,\n", - " weight_decay_rate=0.01,\n", - " beta_1=0.9,\n", - " beta_2=0.999,\n", - " epsilon=1e-6,\n", - " exclude_from_weight_decay=['LayerNorm', 'layer_norm', 'bias'])" + "tokenizer.convert_tokens_to_ids(['[CLS]', '[SEP]'])" ] }, { "cell_type": "markdown", "metadata": { "colab_type": "text", - "id": "OTNcA0O0nSq9" + "id": "UrPktnqpwqie" }, "source": [ - "### Define metric_fn and loss_fn\n", - "\n", - "The metric is accuracy and we use sparse categorical cross-entropy as loss." + "Start by encoding all the sentences while appending a `[SEP]` token, and packing them into ragged-tensors:" ] }, { @@ -442,27 +473,43 @@ "metadata": { "colab": {}, "colab_type": "code", - "id": "ELHjRp87nVNH" + "id": "BR7BmtU498Bh" }, "outputs": [], "source": [ - "def metric_fn():\n", - " return tf.keras.metrics.SparseCategoricalAccuracy(\n", - " 'accuracy', dtype=tf.float32)\n", + "def encode_sentence(s):\n", + " tokens = list(tokenizer.tokenize(s.numpy()))\n", + " tokens.append('[SEP]')\n", + " return tokenizer.convert_tokens_to_ids(tokens)\n", "\n", - "def classification_loss_fn(labels, logits):\n", - " return losses.weighted_sparse_categorical_crossentropy_loss(\n", - " labels=labels, predictions=tf.nn.log_softmax(logits, axis=-1))\n" + "sentence1 = tf.ragged.constant([\n", + " encode_sentence(s) for s in glue_train[\"sentence1\"]])\n", + "sentence2 = tf.ragged.constant([\n", + " encode_sentence(s) for s in glue_train[\"sentence2\"]])" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "has42aUdfky-" + }, + "outputs": [], + "source": [ + "print(\"Sentence1 shape:\", sentence1.shape.as_list())\n", + "print(\"Sentence2 shape:\", sentence2.shape.as_list())" ] }, { "cell_type": "markdown", "metadata": { "colab_type": "text", - "id": "78FEUOOEkoP0" + "id": "MU9lTWy_xXbb" }, "source": [ - "### Compile and train the model" + "Now prepend a `[CLS]` token, and concatenate the ragged tensors to form a single `input_word_ids` tensor for each example. `RaggedTensor.to_tensor()` zero pads to the longest sequence." ] }, { @@ -471,29 +518,46 @@ "metadata": { "colab": {}, "colab_type": "code", - "id": "nzi8hjeTQTRs" + "id": "USD8uihw-g4J" }, "outputs": [], "source": [ - "classifier_model.compile(optimizer=optimizer,\n", - " loss=classification_loss_fn,\n", - " metrics=[metric_fn()])\n", - "classifier_model.fit(\n", - " x=training_dataset,\n", - " validation_data=evaluation_dataset,\n", - " steps_per_epoch=steps_per_epoch,\n", - " epochs=epochs,\n", - " validation_steps=int(input_meta_data['eval_data_size'] / eval_batch_size))" + "cls = [tokenizer.convert_tokens_to_ids(['[CLS]'])]*sentence1.shape[0]\n", + "input_word_ids = tf.concat([cls, sentence1, sentence2], axis=-1)\n", + "_ = plt.pcolormesh(input_word_ids.to_tensor())" ] }, { "cell_type": "markdown", "metadata": { "colab_type": "text", - "id": "fVo_AnT0l26j" + "id": "xmNv4l4k-dBZ" + }, + "source": [ + "#### Mask and input type" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "DIWjNIKq-ldh" + }, + "source": [ + "The model expects two additional inputs:\n", + "\n", + "* The input mask\n", + "* The input type" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "ulNZ4U96-8JZ" }, "source": [ - "### Save the model" + "The mask allows the model to cleanly differentiate between the content and the padding. The mask has the same shape as the `input_word_ids`, and contains a `1` anywhere the `input_word_ids` is not padding." ] }, { @@ -502,21 +566,23 @@ "metadata": { "colab": {}, "colab_type": "code", - "id": "Nl5x6nElZqkP" + "id": "EezOO9qj91kP" }, "outputs": [], "source": [ - "classifier_model.save('./saved_model', include_optimizer=False, save_format='tf')" + "input_mask = tf.ones_like(input_word_ids).to_tensor()\n", + "\n", + "plt.pcolormesh(input_mask)" ] }, { "cell_type": "markdown", "metadata": { "colab_type": "text", - "id": "nWsE6yeyfW00" + "id": "rxLenwAvCkBf" }, "source": [ - "## Use the trained model to predict\n" + "The \"input type\" also has the same shape, but inside the non-padded region, contains a `0` or a `1` indicating which sentence the token is a part of. " ] }, { @@ -525,13 +591,1223 @@ "metadata": { "colab": {}, "colab_type": "code", - "id": "vz7YJY2QYAjP" + "id": "2CetH_5C9P2m" }, "outputs": [], "source": [ - "eval_predictions = classifier_model.predict(evaluation_dataset)\n", - "for prediction in eval_predictions:\n", - " print(\"Predicted label id: %s\" % np.argmax(prediction))" + "type_cls = tf.zeros_like(cls)\n", + "type_s1 = tf.zeros_like(sentence1)\n", + "type_s2 = tf.ones_like(sentence2)\n", + "input_type_ids = tf.concat([type_cls, type_s1, type_s2], axis=-1).to_tensor()\n", + "\n", + "plt.pcolormesh(input_type_ids)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "P5UBnCn8Ii6s" + }, + "source": [ + "#### Put it all together\n", + "\n", + "Collect the above text parsing code into a single function, and apply it to each split of the `glue/mrpc` dataset." + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "sDGiWYPLEd5a" + }, + "outputs": [], + "source": [ + "def encode_sentence(s, tokenizer):\n", + " tokens = list(tokenizer.tokenize(s))\n", + " tokens.append('[SEP]')\n", + " return tokenizer.convert_tokens_to_ids(tokens)\n", + "\n", + "def bert_encode(glue_dict, tokenizer):\n", + " num_examples = len(glue_dict[\"sentence1\"])\n", + " \n", + " sentence1 = tf.ragged.constant([\n", + " encode_sentence(s, tokenizer)\n", + " for s in np.array(glue_dict[\"sentence1\"])])\n", + " sentence2 = tf.ragged.constant([\n", + " encode_sentence(s, tokenizer)\n", + " for s in np.array(glue_dict[\"sentence2\"])])\n", + "\n", + " cls = [tokenizer.convert_tokens_to_ids(['[CLS]'])]*sentence1.shape[0]\n", + " input_word_ids = tf.concat([cls, sentence1, sentence2], axis=-1)\n", + "\n", + " input_mask = tf.ones_like(input_word_ids).to_tensor()\n", + "\n", + " type_cls = tf.zeros_like(cls)\n", + " type_s1 = tf.zeros_like(sentence1)\n", + " type_s2 = tf.ones_like(sentence2)\n", + " input_type_ids = tf.concat(\n", + " [type_cls, type_s1, type_s2], axis=-1).to_tensor()\n", + "\n", + " inputs = {\n", + " 'input_word_ids': input_word_ids.to_tensor(),\n", + " 'input_mask': input_mask,\n", + " 'input_type_ids': input_type_ids}\n", + "\n", + " return inputs" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "yuLKxf6zHxw-" + }, + "outputs": [], + "source": [ + "glue_train = bert_encode(glue['train'], tokenizer)\n", + "glue_train_labels = glue['train']['label']\n", + "\n", + "glue_validation = bert_encode(glue['validation'], tokenizer)\n", + "glue_validation_labels = glue['validation']['label']\n", + "\n", + "glue_test = bert_encode(glue['test'], tokenizer)\n", + "glue_test_labels = glue['test']['label']" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "7FC5aLVxKVKK" + }, + "source": [ + "Each subset of the data has been converted to a dictionary of features, and a set of labels. Each feature in the input dictionary has the same shape, and the number of labels should match:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "jyjTdGpFhO_1" + }, + "outputs": [], + "source": [ + "for key, value in glue_train.items():\n", + " print(f'{key:15s} shape: {value.shape}')\n", + "\n", + "print(f'glue_train_labels shape: {glue_train_labels.shape}')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "FSwymsbkbLDA" + }, + "source": [ + "## The model" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "Efrj3Cn1kLAp" + }, + "source": [ + "### Build the model\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "xxpOY5r2Ayq6" + }, + "source": [ + "The first step is to download the configuration for the pre-trained model.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "ujapVfZ_AKW7" + }, + "outputs": [], + "source": [ + "import json\n", + "\n", + "bert_config_file = os.path.join(gs_folder_bert, \"bert_config.json\")\n", + "config_dict = json.loads(tf.io.gfile.GFile(bert_config_file).read())\n", + "\n", + "bert_config = bert.configs.BertConfig.from_dict(config_dict)\n", + "\n", + "config_dict" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "96ldxDSwkVkj" + }, + "source": [ + "The `config` defines the core BERT Model, which is a Keras model to predict the outputs of `num_classes` from the inputs with maximum sequence length `max_seq_length`.\n", + "\n", + "This function returns both the encoder and the classifier." + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "cH682__U0FBv" + }, + "outputs": [], + "source": [ + "bert_classifier, bert_encoder = bert.bert_models.classifier_model(\n", + " bert_config, num_labels=2)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "XqKp3-5GIZlw" + }, + "source": [ + "The classifier has three inputs and one output:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "bAQblMIjwkvx" + }, + "outputs": [], + "source": [ + "tf.keras.utils.plot_model(bert_classifier, show_shapes=True, dpi=48)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "sFmVG4SKZAw8" + }, + "source": [ + "Run it on a test batch of data 10 examples from the training set. The output is the logits for the two classes:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "VTjgPbp4ZDKo" + }, + "outputs": [], + "source": [ + "glue_batch = {key: val[:10] for key, val in glue_train.items()}\n", + "\n", + "bert_classifier(\n", + " glue_batch, training=True\n", + ").numpy()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "Q0NTdwZsQK8n" + }, + "source": [ + "The `TransformerEncoder` in the center of the classifier above **is** the `bert_encoder`.\n", + "\n", + "Inspecting the encoder, we see its stack of `Transformer` layers connected to those same three inputs:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "8L__-erBwLIQ" + }, + "outputs": [], + "source": [ + "tf.keras.utils.plot_model(bert_encoder, show_shapes=True, dpi=48)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "mKAvkQc3heSy" + }, + "source": [ + "### Restore the encoder weights\n", + "\n", + "When built the encoder is randomly initialized. Restore the encoder's weights from the checkpoint:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "97Ll2Gichd_Y" + }, + "outputs": [], + "source": [ + "checkpoint = tf.train.Checkpoint(model=bert_encoder)\n", + "checkpoint.restore(\n", + " os.path.join(gs_folder_bert, 'bert_model.ckpt')).assert_consumed()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "2oHOql35k3Dd" + }, + "source": [ + "Note: The pretrained `TransformerEncoder` is also available on [TensorFlow Hub](https://tensorflow.org/hub). See the [Hub appendix](#hub_bert) for details. " + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "115caFLMk-_l" + }, + "source": [ + "### Set up the optimizer\n", + "\n", + "BERT adopts the Adam optimizer with weight decay (aka \"[AdamW](https://arxiv.org/abs/1711.05101)\").\n", + "It also employs a learning rate schedule that firstly warms up from 0 and then decays to 0." + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "w8qXKRZuCwW4" + }, + "outputs": [], + "source": [ + "# Set up epochs and steps\n", + "epochs = 3\n", + "batch_size = 32\n", + "eval_batch_size = 32\n", + "\n", + "train_data_size = len(glue_train_labels)\n", + "steps_per_epoch = int(train_data_size / batch_size)\n", + "num_train_steps = steps_per_epoch * epochs\n", + "warmup_steps = int(epochs * train_data_size * 0.1 / batch_size)\n", + "\n", + "# creates an optimizer with learning rate schedule\n", + "optimizer = nlp.optimization.create_optimizer(\n", + " 2e-5, num_train_steps=num_train_steps, num_warmup_steps=warmup_steps)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "pXRGxiRNEHS2" + }, + "source": [ + "This returns an `AdamWeightDecay` optimizer with the learning rate schedule set:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "eQNA16bhDpky" + }, + "outputs": [], + "source": [ + "type(optimizer)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "xqu_K71fJQB8" + }, + "source": [ + "To see an example of how to customize the optimizer and it's schedule, see the [Optimizer schedule appendix](#optiizer_schedule)." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "78FEUOOEkoP0" + }, + "source": [ + "### Train the model" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "OTNcA0O0nSq9" + }, + "source": [ + "The metric is accuracy and we use sparse categorical cross-entropy as loss." + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "nzi8hjeTQTRs" + }, + "outputs": [], + "source": [ + "metrics = [tf.keras.metrics.SparseCategoricalAccuracy('accuracy', dtype=tf.float32)]\n", + "loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)\n", + "\n", + "bert_classifier.compile(\n", + " optimizer=optimizer,\n", + " loss=loss,\n", + " metrics=metrics)\n", + "\n", + "bert_classifier.fit(\n", + " glue_train, glue_train_labels,\n", + " validation_data=(glue_validation, glue_validation_labels),\n", + " batch_size=32,\n", + " epochs=epochs)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "IFtKFWbNKb0u" + }, + "source": [ + "Now run the fine-tuned model on a custom example to see that it works.\n", + "\n", + "Start by encoding some sentence pairs:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "9ZoUgDUNJPz3" + }, + "outputs": [], + "source": [ + "my_examples = bert_encode(\n", + " glue_dict = {\n", + " 'sentence1':[\n", + " 'The rain in Spain falls mainly on the plain.',\n", + " 'Look I fine tuned BERT.'],\n", + " 'sentence2':[\n", + " 'It mostly rains on the flat lands of Spain.',\n", + " 'Is it working? This does not match.']\n", + " },\n", + " tokenizer=tokenizer)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "7ynJibkBRTJF" + }, + "source": [ + "The model should report class `1` \"match\" for the first example and class `0` \"no-match\" for the second:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "umo0ttrgRYIM" + }, + "outputs": [], + "source": [ + "result = bert_classifier(my_examples, training=False)\n", + "\n", + "result = tf.argmax(result).numpy()\n", + "result" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "utGl0M3aZCE4" + }, + "outputs": [], + "source": [ + "np.array(info.features['label'].names)[result]" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "fVo_AnT0l26j" + }, + "source": [ + "### Save the model\n", + "\n", + "Often the goal of training a model is to _use_ it for something, so export the model and then restore it to be sure that it works." + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "Nl5x6nElZqkP" + }, + "outputs": [], + "source": [ + "export_dir='./saved_model'\n", + "tf.saved_model.save(bert_classifier, export_dir=export_dir)" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "y_ACvKPsVUXC" + }, + "outputs": [], + "source": [ + "reloaded = tf.saved_model.load(export_dir)\n", + "reloaded_result = reloaded([my_examples['input_word_ids'],\n", + " my_examples['input_mask'],\n", + " my_examples['input_type_ids']], training=False)\n", + "\n", + "original_result = bert_classifier(my_examples, training=False)\n", + "\n", + "# The results are (nearly) identical:\n", + "print(original_result.numpy())\n", + "print()\n", + "print(reloaded_result.numpy())" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "eQceYqRFT_Eg" + }, + "source": [ + "## Appendix" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "SaC1RlFawUpc" + }, + "source": [ + "\u003ca id=re_encoding_tools\u003e\u003c/a\u003e\n", + "### Re-encoding a large dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "CwUdjFBkzUgh" + }, + "source": [ + "This tutorial you re-encoded the dataset in memory, for clarity.\n", + "\n", + "This was only possible because `glue/mrpc` is a very small dataset. To deal with larger datasets `tf_models` library includes some tools for processing and re-encoding a dataset for efficient training." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "2UTQrkyOT5wD" + }, + "source": [ + "The first step is to describe which features of the dataset should be transformed:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "XQeDFOzYR9Z9" + }, + "outputs": [], + "source": [ + "processor = nlp.data.classifier_data_lib.TfdsProcessor(\n", + " tfds_params=\"dataset=glue/mrpc,text_key=sentence1,text_b_key=sentence2\",\n", + " process_text_fn=bert.tokenization.convert_to_unicode)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "XrFQbfErUWxa" + }, + "source": [ + "Then apply the transformation to generate new TFRecord files." + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "ymw7GOHpSHKU" + }, + "outputs": [], + "source": [ + "# Set up output of training and evaluation Tensorflow dataset\n", + "train_data_output_path=\"./mrpc_train.tf_record\"\n", + "eval_data_output_path=\"./mrpc_eval.tf_record\"\n", + "\n", + "max_seq_length = 128\n", + "batch_size = 32\n", + "eval_batch_size = 32\n", + "\n", + "# Generate and save training data into a tf record file\n", + "input_meta_data = (\n", + " nlp.data.classifier_data_lib.generate_tf_record_from_data_file(\n", + " processor=processor,\n", + " data_dir=None, # It is `None` because data is from tfds, not local dir.\n", + " tokenizer=tokenizer,\n", + " train_data_output_path=train_data_output_path,\n", + " eval_data_output_path=eval_data_output_path,\n", + " max_seq_length=max_seq_length))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "uX_Sp-wTUoRm" + }, + "source": [ + "Finally create `tf.data` input pipelines from those TFRecord files:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "rkHxIK57SQ_r" + }, + "outputs": [], + "source": [ + "training_dataset = bert.run_classifier.get_dataset_fn(\n", + " train_data_output_path,\n", + " max_seq_length,\n", + " batch_size,\n", + " is_training=True)()\n", + "\n", + "evaluation_dataset = bert.run_classifier.get_dataset_fn(\n", + " eval_data_output_path,\n", + " max_seq_length,\n", + " eval_batch_size,\n", + " is_training=False)()\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "stbaVouogvzS" + }, + "source": [ + "The resulting `tf.data.Datasets` return `(features, labels)` pairs, as expected by `keras.Model.fit`:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "gwhrlQl4gxVF" + }, + "outputs": [], + "source": [ + "training_dataset.element_spec" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "dbJ76vSJj77j" + }, + "source": [ + "#### Create tf.data.Dataset for training and evaluation\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "9J95LFRohiYw" + }, + "source": [ + "If you need to modify the data loading here is some code to get you started:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "gCvaLLAxPuMc" + }, + "outputs": [], + "source": [ + "def create_classifier_dataset(file_path, seq_length, batch_size, is_training):\n", + " \"\"\"Creates input dataset from (tf)records files for train/eval.\"\"\"\n", + " dataset = tf.data.TFRecordDataset(file_path)\n", + " if is_training:\n", + " dataset = dataset.shuffle(100)\n", + " dataset = dataset.repeat()\n", + "\n", + " def decode_record(record):\n", + " name_to_features = {\n", + " 'input_ids': tf.io.FixedLenFeature([seq_length], tf.int64),\n", + " 'input_mask': tf.io.FixedLenFeature([seq_length], tf.int64),\n", + " 'segment_ids': tf.io.FixedLenFeature([seq_length], tf.int64),\n", + " 'label_ids': tf.io.FixedLenFeature([], tf.int64),\n", + " }\n", + " return tf.io.parse_single_example(record, name_to_features)\n", + "\n", + " def _select_data_from_record(record):\n", + " x = {\n", + " 'input_word_ids': record['input_ids'],\n", + " 'input_mask': record['input_mask'],\n", + " 'input_type_ids': record['segment_ids']\n", + " }\n", + " y = record['label_ids']\n", + " return (x, y)\n", + "\n", + " dataset = dataset.map(decode_record,\n", + " num_parallel_calls=tf.data.experimental.AUTOTUNE)\n", + " dataset = dataset.map(\n", + " _select_data_from_record,\n", + " num_parallel_calls=tf.data.experimental.AUTOTUNE)\n", + " dataset = dataset.batch(batch_size, drop_remainder=is_training)\n", + " dataset = dataset.prefetch(tf.data.experimental.AUTOTUNE)\n", + " return dataset" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "rutkBadrhzdR" + }, + "outputs": [], + "source": [ + "# Set up batch sizes\n", + "batch_size = 32\n", + "eval_batch_size = 32\n", + "\n", + "# Return Tensorflow dataset\n", + "training_dataset = create_classifier_dataset(\n", + " train_data_output_path,\n", + " input_meta_data['max_seq_length'],\n", + " batch_size,\n", + " is_training=True)\n", + "\n", + "evaluation_dataset = create_classifier_dataset(\n", + " eval_data_output_path,\n", + " input_meta_data['max_seq_length'],\n", + " eval_batch_size,\n", + " is_training=False)" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "59TVgt4Z7fuU" + }, + "outputs": [], + "source": [ + "training_dataset.element_spec" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "QbklKt-w_CiI" + }, + "source": [ + "\u003ca id=\"hub_bert\"\u003e\u003c/a\u003e\n", + "\n", + "### TFModels BERT on TFHub\n", + "\n", + "You can get [the BERT model](https://tfhub.dev/tensorflow/bert_en_uncased_L-12_H-768_A-12/2) off the shelf from [TFHub](https://tensorflow.org/hub). It would not be hard to add a classification head on top of this `hub.KerasLayer`" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "lo6479At4sP1" + }, + "outputs": [], + "source": [ + "# Note: 350MB download.\n", + "import tensorflow_hub as hub\n", + "hub_encoder = hub.KerasLayer(hub_url_bert, trainable=True)\n", + "\n", + "print(f\"The Hub encoder has {len(hub_encoder.trainable_variables)} trainable variables\")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "iTzF574wivQv" + }, + "source": [ + "Test run it on a batch of data:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "XEcYrCR45Uwo" + }, + "outputs": [], + "source": [ + "result = hub_encoder(\n", + " inputs=[glue_train['input_word_ids'][:10],\n", + " glue_train['input_mask'][:10],\n", + " glue_train['input_type_ids'][:10],],\n", + " training=False,\n", + ")\n", + "\n", + "print(\"Pooled output shape:\", result[0].shape)\n", + "print(\"Sequence output shape:\", result[1].shape)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "cjojn8SmLSRI" + }, + "source": [ + "At this point it would be simple to add a classification head yourself.\n", + "\n", + "The `bert_models.classifier_model` function can also build a classifier onto the encoder from TensorFlow Hub:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "9nTDaApyLR70" + }, + "outputs": [], + "source": [ + "hub_classifier, hub_encoder = bert.bert_models.classifier_model(\n", + " # Caution: Most of `bert_config` is ignored if you pass a hub url.\n", + " bert_config=bert_config, hub_module_url=hub_url_bert, num_labels=2)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "xMJX3wV0_v7I" + }, + "source": [ + "The one downside to loading this model from TFHub is that the structure of internal keras layers is not restored. So it's more difficult to inspect or modify the model. The `TransformerEncoder` model is now a single layer:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "pD71dnvhM2QS" + }, + "outputs": [], + "source": [ + "tf.keras.utils.plot_model(hub_classifier, show_shapes=True, dpi=64)" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "nLZD-isBzNKi" + }, + "outputs": [], + "source": [ + "try:\n", + " tf.keras.utils.plot_model(hub_encoder, show_shapes=True, dpi=64)\n", + " assert False\n", + "except Exception as e:\n", + " print(f\"{type(e).__name__}: {e}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "ZxSqH0dNAgXV" + }, + "source": [ + "\u003ca id=\"model_builder_functions\"\u003e\u003c/a\u003e\n", + "\n", + "### Low level model building\n", + "\n", + "If you need a more control over the construction of the model it's worth noting that the `classifier_model` function used earlier is really just a thin wrapper over the `nlp.modeling.networks.TransformerEncoder` and `nlp.modeling.models.BertClassifier` classes. Just remember that if you start modifying the architecture it may not be correct or possible to reload the pre-trained checkpoint so you'll need to retrain from scratch." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "0cgABEwDj06P" + }, + "source": [ + "Build the encoder:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "5r_yqhBFSVEM" + }, + "outputs": [], + "source": [ + "transformer_config = config_dict.copy()\n", + "\n", + "# You need to rename a few fields to make this work:\n", + "transformer_config['attention_dropout_rate'] = transformer_config.pop('attention_probs_dropout_prob')\n", + "transformer_config['activation'] = tf_utils.get_activation(transformer_config.pop('hidden_act'))\n", + "transformer_config['dropout_rate'] = transformer_config.pop('hidden_dropout_prob')\n", + "transformer_config['initializer'] = tf.keras.initializers.TruncatedNormal(\n", + " stddev=transformer_config.pop('initializer_range'))\n", + "transformer_config['max_sequence_length'] = transformer_config.pop('max_position_embeddings')\n", + "transformer_config['num_layers'] = transformer_config.pop('num_hidden_layers')\n", + "\n", + "transformer_config" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "rIO8MI7LLijh" + }, + "outputs": [], + "source": [ + "manual_encoder = nlp.modeling.networks.TransformerEncoder(**transformer_config)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "4a4tFSg9krRi" + }, + "source": [ + "Restore the weights:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "X6N9NEqfXJCx" + }, + "outputs": [], + "source": [ + "checkpoint = tf.train.Checkpoint(model=manual_encoder)\n", + "checkpoint.restore(\n", + " os.path.join(gs_folder_bert, 'bert_model.ckpt')).assert_consumed()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "1BPiPO4ykuwM" + }, + "source": [ + "Test run it:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "hlVdgJKmj389" + }, + "outputs": [], + "source": [ + "result = manual_encoder(my_examples, training=True)\n", + "\n", + "print(\"Sequence output shape:\", result[0].shape)\n", + "print(\"Pooled output shape:\", result[1].shape)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "nJMXvVgJkyBv" + }, + "source": [ + "Wrap it in a classifier:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "tQX57GJ6wkAb" + }, + "outputs": [], + "source": [ + "manual_classifier = nlp.modeling.models.BertClassifier(\n", + " bert_encoder,\n", + " num_classes=2,\n", + " dropout_rate=transformer_config['dropout_rate'],\n", + " initializer=tf.keras.initializers.TruncatedNormal(\n", + " stddev=bert_config.initializer_range))" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "kB-nBWhQk0dS" + }, + "outputs": [], + "source": [ + "manual_classifier(my_examples, training=True).numpy()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "E6AJlOSyIO1L" + }, + "source": [ + "\u003ca id=\"optiizer_schedule\"\u003e\u003c/a\u003e\n", + "\n", + "### Optimizers and schedules\n", + "\n", + "The optimizer used to train the model was created using the `nlp.optimization.create_optimizer` function:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "28Dv3BPRlFTD" + }, + "outputs": [], + "source": [ + "optimizer = nlp.optimization.create_optimizer(\n", + " 2e-5, num_train_steps=num_train_steps, num_warmup_steps=warmup_steps)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "LRjcHr0UlT8c" + }, + "source": [ + "That high level wrapper sets up the learning rate schedules and the optimizer.\n", + "\n", + "The base learning rate schedule used here is a linear decay to zero over the training run:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "MHY8K6kDngQn" + }, + "outputs": [], + "source": [ + "epochs = 3\n", + "batch_size = 32\n", + "eval_batch_size = 32\n", + "\n", + "train_data_size = len(glue_train_labels)\n", + "steps_per_epoch = int(train_data_size / batch_size)\n", + "num_train_steps = steps_per_epoch * epochs" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "wKIcSprulu3P" + }, + "outputs": [], + "source": [ + "decay_schedule = tf.keras.optimizers.schedules.PolynomialDecay(\n", + " initial_learning_rate=2e-5,\n", + " decay_steps=num_train_steps,\n", + " end_learning_rate=0)\n", + "\n", + "plt.plot([decay_schedule(n) for n in range(num_train_steps)])" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "IMTC_gfAl_PZ" + }, + "source": [ + "This, in turn is wrapped in a `WarmUp` schedule that linearly increases the learning rate to the target value over the first 10% of training:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "YRt3VTmBmCBY" + }, + "outputs": [], + "source": [ + "warmup_steps = num_train_steps * 0.1\n", + "\n", + "warmup_schedule = nlp.optimization.WarmUp(\n", + " initial_learning_rate=2e-5,\n", + " decay_schedule_fn=decay_schedule,\n", + " warmup_steps=warmup_steps)\n", + "\n", + "# The warmup overshoots, because it warms up to the `initial_learning_rate`\n", + "# following the original implementation. You can set\n", + "# `initial_learning_rate=decay_schedule(warmup_steps)` if you don't like the\n", + "# overshoot.\n", + "plt.plot([warmup_schedule(n) for n in range(num_train_steps)])" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "l8D9Lv3Bn740" + }, + "source": [ + "Then create the `nlp.optimization.AdamWeightDecay` using that schedule, configured for the BERT model:" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "2Hf2rpRXk89N" + }, + "outputs": [], + "source": [ + "optimizer = nlp.optimization.AdamWeightDecay(\n", + " learning_rate=warmup_schedule,\n", + " weight_decay_rate=0.01,\n", + " epsilon=1e-6,\n", + " exclude_from_weight_decay=['LayerNorm', 'layer_norm', 'bias'])" ] } ], @@ -539,8 +1815,10 @@ "accelerator": "GPU", "colab": { "collapsed_sections": [], - "name": "How-to Guide: Using a PIP package for fine-tuning a BERT model", - "provenance": [] + "name": "fine_tuning_bert.ipynb", + "private_outputs": true, + "provenance": [], + "toc_visible": true }, "kernelspec": { "display_name": "Python 3", -- GitLab From e4f044563713bda0e40958b2dfe18bb9daa198b2 Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Tue, 16 Jun 2020 18:19:31 -0700 Subject: [PATCH 11/79] Internal change. PiperOrigin-RevId: 316797039 --- official/benchmark/unet3d_benchmark.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/official/benchmark/unet3d_benchmark.py b/official/benchmark/unet3d_benchmark.py index 750306b22..2614b2925 100644 --- a/official/benchmark/unet3d_benchmark.py +++ b/official/benchmark/unet3d_benchmark.py @@ -32,9 +32,9 @@ from official.vision.segmentation import unet_model as unet_model_lib UNET3D_MIN_ACCURACY = 0.90 UNET3D_MAX_ACCURACY = 0.98 -UNET_TRAINING_FILES = 'unet_training_data_files' -UNET_EVAL_FILES = 'unet_eval_data_files' -UNET_MODEL_CONFIG_FILE = 'unet_model_config' +UNET_TRAINING_FILES = 'gs://mlcompass-data/unet3d/train_data/*' +UNET_EVAL_FILES = 'gs://mlcompass-data/unet3d/eval_data/*' +UNET_MODEL_CONFIG_FILE = 'gs://mlcompass-data/unet3d/config/unet_config.yaml' FLAGS = flags.FLAGS -- GitLab From 6da061c0d537bb7c125a3978a8e5349e02825880 Mon Sep 17 00:00:00 2001 From: Abdullah Rashwan Date: Tue, 16 Jun 2020 18:23:44 -0700 Subject: [PATCH 12/79] Internal change PiperOrigin-RevId: 316797555 --- .../modeling/hyperparams/config_definitions.py | 16 ++++++++++++++++ official/nlp/transformer/misc.py | 2 +- official/utils/misc/keras_utils.py | 7 ++++--- 3 files changed, 21 insertions(+), 4 deletions(-) diff --git a/official/modeling/hyperparams/config_definitions.py b/official/modeling/hyperparams/config_definitions.py index ae7bfce2c..2fbcdea44 100644 --- a/official/modeling/hyperparams/config_definitions.py +++ b/official/modeling/hyperparams/config_definitions.py @@ -162,6 +162,21 @@ class CallbacksConfig(base_config.Config): @dataclasses.dataclass class TrainerConfig(base_config.Config): + """Configuration for trainer. + + Attributes: + optimizer_config: optimizer config, it includes optimizer, learning rate, + and warmup schedule configs. + train_tf_while_loop: whether or not to use tf while loop. + train_tf_function: whether or not to use tf_function for training loop. + eval_tf_function: whether or not to use tf_function for eval. + steps_per_loop: number of steps per loop. + summary_interval: number of steps between each summary. + checkpoint_intervals: number of steps between checkpoints. + max_to_keep: max checkpoints to keep. + continuous_eval_timeout: maximum number of seconds to wait between + checkpoints, if set to None, continuous eval will wait indefinetely. + """ optimizer_config: OptimizationConfig = OptimizationConfig() train_tf_while_loop: bool = True train_tf_function: bool = True @@ -170,6 +185,7 @@ class TrainerConfig(base_config.Config): summary_interval: int = 1000 checkpoint_interval: int = 1000 max_to_keep: int = 5 + continuous_eval_timeout: Optional[int] = None @dataclasses.dataclass diff --git a/official/nlp/transformer/misc.py b/official/nlp/transformer/misc.py index 45b47741b..e2b351ae6 100644 --- a/official/nlp/transformer/misc.py +++ b/official/nlp/transformer/misc.py @@ -218,7 +218,7 @@ def get_callbacks(): time_callback = keras_utils.TimeHistory( FLAGS.batch_size, FLAGS.log_steps, - FLAGS.model_dir if FLAGS.enable_tensorboard else None) + logdir=FLAGS.model_dir if FLAGS.enable_tensorboard else None) callbacks.append(time_callback) if FLAGS.enable_tensorboard: diff --git a/official/utils/misc/keras_utils.py b/official/utils/misc/keras_utils.py index 4a9086b9a..2cca51f1d 100644 --- a/official/utils/misc/keras_utils.py +++ b/official/utils/misc/keras_utils.py @@ -41,12 +41,13 @@ class BatchTimestamp(object): class TimeHistory(tf.keras.callbacks.Callback): """Callback for Keras models.""" - def __init__(self, batch_size, log_steps, logdir=None): + def __init__(self, batch_size, log_steps, initial_step=0, logdir=None): """Callback for logging performance. Args: batch_size: Total batch size. log_steps: Interval of steps between logging of batch level stats. + initial_step: Optional, initial step. logdir: Optional directory to write TensorBoard summaries. """ # TODO(wcromar): remove this parameter and rely on `logs` parameter of @@ -54,8 +55,8 @@ class TimeHistory(tf.keras.callbacks.Callback): self.batch_size = batch_size super(TimeHistory, self).__init__() self.log_steps = log_steps - self.last_log_step = 0 - self.steps_before_epoch = 0 + self.last_log_step = initial_step + self.steps_before_epoch = initial_step self.steps_in_epoch = 0 self.start_time = None -- GitLab From d0ef3913ccb119a2a4bf7acb9fd4477d0a86d245 Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Tue, 16 Jun 2020 18:58:46 -0700 Subject: [PATCH 13/79] Update model_training_utils for BERT. PiperOrigin-RevId: 316801831 --- official/nlp/bert/model_training_utils.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/official/nlp/bert/model_training_utils.py b/official/nlp/bert/model_training_utils.py index 7c72f6d76..f0fe67615 100644 --- a/official/nlp/bert/model_training_utils.py +++ b/official/nlp/bert/model_training_utils.py @@ -160,9 +160,10 @@ def run_customized_training_loop( init_checkpoint: Optional checkpoint to load to `sub_model` returned by `model_fn`. custom_callbacks: A list of Keras Callbacks objects to run during - training. More specifically, `on_batch_begin()`, `on_batch_end()`, - `on_epoch_begin()`, `on_epoch_end()` methods are invoked during - training. Note that some metrics may be missing from `logs`. + training. More specifically, `on_train_begin(), on_train_end(), + on_batch_begin()`, `on_batch_end()`, `on_epoch_begin()`, + `on_epoch_end()` methods are invoked during training. + Note that some metrics may be missing from `logs`. run_eagerly: Whether to run model training in pure eager execution. This should be disable for TPUStrategy. sub_model_export_name: If not None, will export `sub_model` returned by @@ -246,8 +247,6 @@ def run_customized_training_loop( raise ValueError( 'if `metric_fn` is specified, metric_fn must be a callable.') - callback_list = tf.keras.callbacks.CallbackList(custom_callbacks) - total_training_steps = steps_per_epoch * epochs train_iterator = _get_input_iterator(train_input_fn, strategy) eval_loss_metric = tf.keras.metrics.Mean('training_loss', dtype=tf.float32) @@ -263,6 +262,9 @@ def run_customized_training_loop( raise ValueError('sub_model_export_name is specified as %s, but ' 'sub_model is None.' % sub_model_export_name) + callback_list = tf.keras.callbacks.CallbackList( + callbacks=custom_callbacks, model=model) + optimizer = model.optimizer if init_checkpoint: @@ -451,7 +453,8 @@ def run_customized_training_loop( checkpoint_name = 'ctl_step_{step}.ckpt' logs = {} - while current_step < total_training_steps: + callback_list.on_train_begin() + while current_step < total_training_steps and not model.stop_training: if current_step % steps_per_epoch == 0: callback_list.on_epoch_begin( int(current_step / steps_per_epoch) + 1) @@ -564,4 +567,6 @@ def run_customized_training_loop( if not _should_export_summary(strategy): tf.io.gfile.rmtree(summary_dir) + callback_list.on_train_end() + return model -- GitLab From 420a7253e034a12ae2208e6ec94d3e4936177a53 Mon Sep 17 00:00:00 2001 From: pkulzc Date: Wed, 17 Jun 2020 00:59:17 -0700 Subject: [PATCH 14/79] Refactor tests for Object Detection API. (#8688) Internal changes -- PiperOrigin-RevId: 316837667 --- .../builders/box_predictor_builder_test.py | 6 + .../builders/calibration_builder_test.py | 46 +- .../builders/dataset_builder.py | 40 +- .../builders/dataset_builder_test.py | 25 +- .../builders/decoder_builder.py | 4 +- .../builders/decoder_builder_test.py | 156 +- ....py => graph_rewriter_builder_tf1_test.py} | 13 +- .../builders/hyperparams_builder_test.py | 631 ++--- .../builders/image_resizer_builder_test.py | 24 +- .../builders/input_reader_builder.py | 13 +- ...st.py => input_reader_builder_tf1_test.py} | 3 + .../builders/matcher_builder.py | 7 +- .../builders/matcher_builder_test.py | 20 +- .../builders/model_builder.py | 25 +- .../builders/model_builder_tf1_test.py | 13 +- .../builders/model_builder_tf2_test.py | 261 ++ .../builders/optimizer_builder.py | 7 +- .../builders/optimizer_builder_tf1_test.py | 9 +- .../builders/optimizer_builder_tf2_test.py | 104 + .../builders/post_processing_builder_test.py | 52 +- .../context_rcnn_tutorial.ipynb | 1500 +++++++++++ .../object_detection_tutorial.ipynb | 4 +- .../core/batch_multiclass_nms_test.py | 877 +++--- research/object_detection/core/batcher.py | 4 - .../object_detection/core/batcher_tf1_test.py | 3 + ...st.py => freezable_batch_norm_tf2_test.py} | 4 +- research/object_detection/core/losses.py | 92 + research/object_detection/core/prefetcher.py | 4 - .../core/prefetcher_tf1_test.py | 8 +- .../core/preprocessor_test.py | 5 +- .../object_detection/core/target_assigner.py | 946 ++++++- .../core/target_assigner_test.py | 690 ++++- .../dataset_tools/context_rcnn/__init__.py | 0 .../context_rcnn/add_context_to_examples.py | 845 ++++++ .../add_context_to_examples_tf1_test.py | 384 +++ .../create_cococameratraps_tfexample_main.py | 324 +++ ...eate_cococameratraps_tfexample_tf1_test.py | 201 ++ .../context_rcnn/generate_detection_data.py | 262 ++ .../generate_detection_data_tf1_test.py | 267 ++ .../context_rcnn/generate_embedding_data.py | 378 +++ .../generate_embedding_data_tf1_test.py | 337 +++ .../dataset_tools/seq_example_util_test.py | 17 +- research/object_detection/eval_util.py | 2 + research/object_detection/eval_util_test.py | 124 +- .../export_inference_graph.py | 46 +- .../export_tflite_ssd_graph_lib.py | 5 +- ...> export_tflite_ssd_graph_lib_tf1_test.py} | 3 + research/object_detection/exporter.py | 136 +- .../object_detection/exporter_lib_tf2_test.py | 237 ++ research/object_detection/exporter_lib_v2.py | 182 ++ research/object_detection/exporter_main_v2.py | 126 + ...{exporter_test.py => exporter_tf1_test.py} | 5 +- .../object_detection/g3doc/context_rcnn.md | 173 ++ .../g3doc/detection_model_zoo.md | 231 +- ...est.py => detection_inference_tf1_test.py} | 4 +- research/object_detection/inputs.py | 25 +- research/object_detection/inputs_test.py | 1152 ++++---- .../{trainer_test.py => trainer_tf1_test.py} | 4 +- ..._test.py => bipartite_matcher_tf1_test.py} | 8 +- .../center_net_meta_arch.py | 2348 +++++++++++++++++ .../center_net_meta_arch_tf2_test.py | 1683 ++++++++++++ .../meta_architectures/context_rcnn_lib.py | 224 ++ .../context_rcnn_lib_tf1_test.py | 126 + .../context_rcnn_meta_arch.py | 340 +++ .../context_rcnn_meta_arch_tf1_test.py | 7 +- ....py => calibration_evaluation_tf1_test.py} | 3 + ...est.py => calibration_metrics_tf1_test.py} | 3 + .../metrics/coco_evaluation.py | 533 ++++ .../metrics/coco_evaluation_test.py | 221 ++ .../object_detection/metrics/coco_tools.py | 5 +- .../metrics/offline_eval_map_corloc.py | 4 +- ...odel_lib_test.py => model_lib_tf1_test.py} | 13 +- ...l_lib_v2_test.py => model_lib_tf2_test.py} | 6 +- research/object_detection/model_lib_v2.py | 18 +- research/object_detection/model_main_tf2.py | 112 + .../center_net_hourglass_feature_extractor.py | 75 + ...et_hourglass_feature_extractor_tf2_test.py | 44 + .../center_net_resnet_feature_extractor.py | 149 ++ ...r_net_resnet_feature_extractor_tf2_test.py | 54 + ...ter_net_resnet_v1_fpn_feature_extractor.py | 176 ++ ...esnet_v1_fpn_feature_extractor_tf2_test.py | 49 + ...obilenet_v1_feature_extractor_tf1_test.py} | 3 + ...n_resnet_v2_feature_extractor_tf1_test.py} | 4 +- ...et_v2_keras_feature_extractor_tf2_test.py} | 43 +- ...nception_v2_feature_extractor_tf1_test.py} | 4 +- ...obilenet_v1_feature_extractor_tf1_test.py} | 4 +- .../faster_rcnn_nas_feature_extractor.py | 10 +- ...er_rcnn_nas_feature_extractor_tf1_test.py} | 4 +- .../faster_rcnn_pnas_feature_extractor.py | 6 +- ...r_rcnn_pnas_feature_extractor_tf1_test.py} | 4 +- ...ter_rcnn_resnet_keras_feature_extractor.py | 271 ++ ...resnet_keras_feature_extractor_tf2_test.py | 80 + ...n_resnet_v1_feature_extractor_tf1_test.py} | 4 +- .../models/feature_map_generators_test.py | 625 ++--- .../keras_models/convert_keras_models.py | 85 + ..._test.py => hourglass_network_tf2_test.py} | 5 +- ...est.py => inception_resnet_v2_tf2_test.py} | 7 +- ...et_v1_test.py => mobilenet_v1_tf2_test.py} | 61 +- ...et_v2_test.py => mobilenet_v2_tf2_test.py} | 66 +- ...esnet_v1_test.py => resnet_v1_tf2_test.py} | 7 +- .../models/ssd_feature_extractor_test.py | 100 +- ...nception_v2_feature_extractor_tf1_test.py} | 3 + ...nception_v3_feature_extractor_tf1_test.py} | 3 + .../models/ssd_mobiledet_feature_extractor.py | 94 + ...d_mobiledet_feature_extractor_tf1_test.py} | 23 +- ...net_edgetpu_feature_extractor_tf1_test.py} | 4 +- ...obilenet_v1_feature_extractor_tf1_test.py} | 101 +- ...mobilenet_v1_feature_extractor_tf2_test.py | 248 ++ ...enet_v1_fpn_feature_extractor_tf1_test.py} | 100 +- ...lenet_v1_fpn_feature_extractor_tf2_test.py | 179 ++ ...obilenet_v1_fpn_keras_feature_extractor.py | 17 +- ...sd_mobilenet_v1_keras_feature_extractor.py | 17 +- ...enet_v1_ppn_feature_extractor_tf1_test.py} | 3 + ...obilenet_v2_feature_extractor_tf1_test.py} | 104 +- ...mobilenet_v2_feature_extractor_tf2_test.py | 192 ++ ...enet_v2_fpn_feature_extractor_tf1_test.py} | 148 +- ...lenet_v2_fpn_feature_extractor_tf2_test.py | 269 ++ ...obilenet_v2_fpn_keras_feature_extractor.py | 17 +- ...sd_mobilenet_v2_keras_feature_extractor.py | 17 +- ..._v2_mnasfpn_feature_extractor_tf1_test.py} | 3 + ...obilenet_v3_feature_extractor_tf1_test.py} | 9 +- .../models/ssd_pnasnet_feature_extractor.py | 5 +- ...ssd_pnasnet_feature_extractor_tf1_test.py} | 3 + ...sd_resnet_v1_fpn_feature_extractor_test.py | 126 - ...esnet_v1_fpn_feature_extractor_testbase.py | 86 +- ...esnet_v1_fpn_feature_extractor_tf1_test.py | 85 + ...esnet_v1_fpn_feature_extractor_tf2_test.py | 103 + ...snet_v1_ppn_feature_extractor_tf1_test.py} | 5 + ...> convolutional_box_predictor_tf1_test.py} | 5 +- ...olutional_keras_box_predictor_tf2_test.py} | 678 ++--- ...{box_head_test.py => box_head_tf1_test.py} | 5 + ...ss_head_test.py => class_head_tf1_test.py} | 5 + ...ead_test.py => keras_box_head_tf2_test.py} | 107 +- ...d_test.py => keras_class_head_tf2_test.py} | 112 +- ...ad_test.py => keras_mask_head_tf2_test.py} | 99 +- ...head_test.py => keypoint_head_tf1_test.py} | 3 + ...ask_head_test.py => mask_head_tf1_test.py} | 5 + ...py => mask_rcnn_box_predictor_tf1_test.py} | 3 + ...mask_rcnn_keras_box_predictor_tf2_test.py} | 70 +- ...test.py => rfcn_box_predictor_tf1_test.py} | 3 + ...y => rfcn_keras_box_predictor_tf2_test.py} | 24 +- .../object_detection/protos/center_net.proto | 203 ++ .../object_detection/protos/faster_rcnn.proto | 2 +- research/object_detection/protos/model.proto | 2 + ...mobiledet_gpu_320x320_coco_sync_4x4.config | 204 ++ .../test_images/snapshot_serengeti/README.md | 17 + .../S1_E03_R3_PICT0038.jpeg | Bin 0 -> 630625 bytes .../S1_E03_R3_PICT0039.jpeg | Bin 0 -> 631194 bytes .../S1_E03_R3_PICT0040.jpeg | Bin 0 -> 629981 bytes .../S1_E03_R3_PICT0041.jpeg | Bin 0 -> 641490 bytes .../context_rcnn_demo_metadata.json | 1 + ...=> export_saved_model_tpu_lib_tf1_test.py} | 3 + .../utils/config_util_test.py | 15 +- ...el_util_test.py => model_util_tf2_test.py} | 3 + .../utils/object_detection_evaluation_test.py | 5 + research/object_detection/utils/ops.py | 2 +- research/object_detection/utils/ops_test.py | 12 +- .../utils/target_assigner_utils.py | 122 +- .../utils/target_assigner_utils_test.py | 66 +- research/object_detection/utils/test_utils.py | 16 + .../utils/variables_helper.py | 10 - ...r_test.py => variables_helper_tf1_test.py} | 7 +- 162 files changed, 19143 insertions(+), 3290 deletions(-) rename research/object_detection/builders/{graph_rewriter_builder_test.py => graph_rewriter_builder_tf1_test.py} (91%) rename research/object_detection/builders/{input_reader_builder_test.py => input_reader_builder_tf1_test.py} (98%) create mode 100644 research/object_detection/builders/model_builder_tf2_test.py create mode 100644 research/object_detection/builders/optimizer_builder_tf2_test.py create mode 100644 research/object_detection/colab_tutorials/context_rcnn_tutorial.ipynb rename research/object_detection/{ => colab_tutorials}/object_detection_tutorial.ipynb (98%) rename research/object_detection/core/{freezable_batch_norm_test.py => freezable_batch_norm_tf2_test.py} (98%) create mode 100644 research/object_detection/dataset_tools/context_rcnn/__init__.py create mode 100644 research/object_detection/dataset_tools/context_rcnn/add_context_to_examples.py create mode 100644 research/object_detection/dataset_tools/context_rcnn/add_context_to_examples_tf1_test.py create mode 100644 research/object_detection/dataset_tools/context_rcnn/create_cococameratraps_tfexample_main.py create mode 100644 research/object_detection/dataset_tools/context_rcnn/create_cococameratraps_tfexample_tf1_test.py create mode 100644 research/object_detection/dataset_tools/context_rcnn/generate_detection_data.py create mode 100644 research/object_detection/dataset_tools/context_rcnn/generate_detection_data_tf1_test.py create mode 100644 research/object_detection/dataset_tools/context_rcnn/generate_embedding_data.py create mode 100644 research/object_detection/dataset_tools/context_rcnn/generate_embedding_data_tf1_test.py rename research/object_detection/{export_tflite_ssd_graph_lib_test.py => export_tflite_ssd_graph_lib_tf1_test.py} (99%) create mode 100644 research/object_detection/exporter_lib_tf2_test.py create mode 100644 research/object_detection/exporter_lib_v2.py create mode 100644 research/object_detection/exporter_main_v2.py rename research/object_detection/{exporter_test.py => exporter_tf1_test.py} (99%) create mode 100644 research/object_detection/g3doc/context_rcnn.md rename research/object_detection/inference/{detection_inference_test.py => detection_inference_tf1_test.py} (98%) rename research/object_detection/legacy/{trainer_test.py => trainer_tf1_test.py} (98%) rename research/object_detection/matchers/{bipartite_matcher_test.py => bipartite_matcher_tf1_test.py} (94%) create mode 100644 research/object_detection/meta_architectures/center_net_meta_arch.py create mode 100644 research/object_detection/meta_architectures/center_net_meta_arch_tf2_test.py create mode 100644 research/object_detection/meta_architectures/context_rcnn_lib.py create mode 100644 research/object_detection/meta_architectures/context_rcnn_lib_tf1_test.py create mode 100644 research/object_detection/meta_architectures/context_rcnn_meta_arch.py rename research/object_detection/metrics/{calibration_evaluation_test.py => calibration_evaluation_tf1_test.py} (98%) rename research/object_detection/metrics/{calibration_metrics_test.py => calibration_metrics_tf1_test.py} (97%) rename research/object_detection/{model_lib_test.py => model_lib_tf1_test.py} (98%) rename research/object_detection/{model_lib_v2_test.py => model_lib_tf2_test.py} (96%) create mode 100644 research/object_detection/model_main_tf2.py create mode 100644 research/object_detection/models/center_net_hourglass_feature_extractor.py create mode 100644 research/object_detection/models/center_net_hourglass_feature_extractor_tf2_test.py create mode 100644 research/object_detection/models/center_net_resnet_feature_extractor.py create mode 100644 research/object_detection/models/center_net_resnet_feature_extractor_tf2_test.py create mode 100644 research/object_detection/models/center_net_resnet_v1_fpn_feature_extractor.py create mode 100644 research/object_detection/models/center_net_resnet_v1_fpn_feature_extractor_tf2_test.py rename research/object_detection/models/{embedded_ssd_mobilenet_v1_feature_extractor_test.py => embedded_ssd_mobilenet_v1_feature_extractor_tf1_test.py} (97%) rename research/object_detection/models/{faster_rcnn_inception_resnet_v2_feature_extractor_test.py => faster_rcnn_inception_resnet_v2_feature_extractor_tf1_test.py} (97%) rename research/object_detection/models/{faster_rcnn_inception_resnet_v2_keras_feature_extractor_test.py => faster_rcnn_inception_resnet_v2_keras_feature_extractor_tf2_test.py} (67%) rename research/object_detection/models/{faster_rcnn_inception_v2_feature_extractor_test.py => faster_rcnn_inception_v2_feature_extractor_tf1_test.py} (97%) rename research/object_detection/models/{faster_rcnn_mobilenet_v1_feature_extractor_test.py => faster_rcnn_mobilenet_v1_feature_extractor_tf1_test.py} (97%) rename research/object_detection/models/{faster_rcnn_nas_feature_extractor_test.py => faster_rcnn_nas_feature_extractor_tf1_test.py} (97%) rename research/object_detection/models/{faster_rcnn_pnas_feature_extractor_test.py => faster_rcnn_pnas_feature_extractor_tf1_test.py} (97%) create mode 100644 research/object_detection/models/faster_rcnn_resnet_keras_feature_extractor.py create mode 100644 research/object_detection/models/faster_rcnn_resnet_keras_feature_extractor_tf2_test.py rename research/object_detection/models/{faster_rcnn_resnet_v1_feature_extractor_test.py => faster_rcnn_resnet_v1_feature_extractor_tf1_test.py} (98%) create mode 100644 research/object_detection/models/keras_models/convert_keras_models.py rename research/object_detection/models/keras_models/{hourglass_network_test.py => hourglass_network_tf2_test.py} (96%) rename research/object_detection/models/keras_models/{inception_resnet_v2_test.py => inception_resnet_v2_tf2_test.py} (97%) rename research/object_detection/models/keras_models/{mobilenet_v1_test.py => mobilenet_v1_tf2_test.py} (85%) rename research/object_detection/models/keras_models/{mobilenet_v2_test.py => mobilenet_v2_tf2_test.py} (84%) rename research/object_detection/models/keras_models/{resnet_v1_test.py => resnet_v1_tf2_test.py} (97%) rename research/object_detection/models/{ssd_inception_v2_feature_extractor_test.py => ssd_inception_v2_feature_extractor_tf1_test.py} (98%) rename research/object_detection/models/{ssd_inception_v3_feature_extractor_test.py => ssd_inception_v3_feature_extractor_tf1_test.py} (98%) rename research/object_detection/models/{ssd_mobiledet_feature_extractor_test.py => ssd_mobiledet_feature_extractor_tf1_test.py} (86%) rename research/object_detection/models/{ssd_mobilenet_edgetpu_feature_extractor_test.py => ssd_mobilenet_edgetpu_feature_extractor_tf1_test.py} (94%) rename research/object_detection/models/{ssd_mobilenet_v1_feature_extractor_test.py => ssd_mobilenet_v1_feature_extractor_tf1_test.py} (77%) create mode 100644 research/object_detection/models/ssd_mobilenet_v1_feature_extractor_tf2_test.py rename research/object_detection/models/{ssd_mobilenet_v1_fpn_feature_extractor_test.py => ssd_mobilenet_v1_fpn_feature_extractor_tf1_test.py} (76%) create mode 100644 research/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor_tf2_test.py rename research/object_detection/models/{ssd_mobilenet_v1_ppn_feature_extractor_test.py => ssd_mobilenet_v1_ppn_feature_extractor_tf1_test.py} (98%) rename research/object_detection/models/{ssd_mobilenet_v2_feature_extractor_test.py => ssd_mobilenet_v2_feature_extractor_tf1_test.py} (70%) create mode 100644 research/object_detection/models/ssd_mobilenet_v2_feature_extractor_tf2_test.py rename research/object_detection/models/{ssd_mobilenet_v2_fpn_feature_extractor_test.py => ssd_mobilenet_v2_fpn_feature_extractor_tf1_test.py} (70%) create mode 100644 research/object_detection/models/ssd_mobilenet_v2_fpn_feature_extractor_tf2_test.py rename research/object_detection/models/{ssd_mobilenet_v2_mnasfpn_feature_extractor_test.py => ssd_mobilenet_v2_mnasfpn_feature_extractor_tf1_test.py} (96%) rename research/object_detection/models/{ssd_mobilenet_v3_feature_extractor_test.py => ssd_mobilenet_v3_feature_extractor_tf1_test.py} (95%) rename research/object_detection/models/{ssd_pnasnet_feature_extractor_test.py => ssd_pnasnet_feature_extractor_tf1_test.py} (97%) delete mode 100644 research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_test.py create mode 100644 research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_tf1_test.py create mode 100644 research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_tf2_test.py rename research/object_detection/models/{ssd_resnet_v1_ppn_feature_extractor_test.py => ssd_resnet_v1_ppn_feature_extractor_tf1_test.py} (92%) rename research/object_detection/predictors/{convolutional_box_predictor_test.py => convolutional_box_predictor_tf1_test.py} (99%) rename research/object_detection/predictors/{convolutional_keras_box_predictor_test.py => convolutional_keras_box_predictor_tf2_test.py} (64%) rename research/object_detection/predictors/heads/{box_head_test.py => box_head_tf1_test.py} (94%) rename research/object_detection/predictors/heads/{class_head_test.py => class_head_tf1_test.py} (96%) rename research/object_detection/predictors/heads/{keras_box_head_test.py => keras_box_head_tf2_test.py} (67%) rename research/object_detection/predictors/heads/{keras_class_head_test.py => keras_class_head_tf2_test.py} (66%) rename research/object_detection/predictors/heads/{keras_mask_head_test.py => keras_mask_head_tf2_test.py} (67%) rename research/object_detection/predictors/heads/{keypoint_head_test.py => keypoint_head_tf1_test.py} (94%) rename research/object_detection/predictors/heads/{mask_head_test.py => mask_head_tf1_test.py} (96%) rename research/object_detection/predictors/{mask_rcnn_box_predictor_test.py => mask_rcnn_box_predictor_tf1_test.py} (97%) rename research/object_detection/predictors/{mask_rcnn_keras_box_predictor_test.py => mask_rcnn_keras_box_predictor_tf2_test.py} (76%) rename research/object_detection/predictors/{rfcn_box_predictor_test.py => rfcn_box_predictor_tf1_test.py} (95%) rename research/object_detection/predictors/{rfcn_keras_box_predictor_test.py => rfcn_keras_box_predictor_tf2_test.py} (85%) create mode 100644 research/object_detection/protos/center_net.proto create mode 100644 research/object_detection/samples/configs/ssdlite_mobiledet_gpu_320x320_coco_sync_4x4.config create mode 100644 research/object_detection/test_images/snapshot_serengeti/README.md create mode 100644 research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0038.jpeg create mode 100644 research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0039.jpeg create mode 100644 research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0040.jpeg create mode 100644 research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0041.jpeg create mode 100644 research/object_detection/test_images/snapshot_serengeti/context_rcnn_demo_metadata.json rename research/object_detection/tpu_exporters/{export_saved_model_tpu_lib_test.py => export_saved_model_tpu_lib_tf1_test.py} (95%) rename research/object_detection/utils/{model_util_test.py => model_util_tf2_test.py} (94%) rename research/object_detection/utils/{variables_helper_test.py => variables_helper_tf1_test.py} (96%) diff --git a/research/object_detection/builders/box_predictor_builder_test.py b/research/object_detection/builders/box_predictor_builder_test.py index 72a71b794..7154cd2ef 100644 --- a/research/object_detection/builders/box_predictor_builder_test.py +++ b/research/object_detection/builders/box_predictor_builder_test.py @@ -16,6 +16,7 @@ """Tests for box_predictor_builder.""" +import unittest import mock import tensorflow.compat.v1 as tf @@ -25,8 +26,10 @@ from object_detection.builders import hyperparams_builder from object_detection.predictors import mask_rcnn_box_predictor from object_detection.protos import box_predictor_pb2 from object_detection.protos import hyperparams_pb2 +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only Tests.') class ConvolutionalBoxPredictorBuilderTest(tf.test.TestCase): def test_box_predictor_calls_conv_argscope_fn(self): @@ -161,6 +164,7 @@ class ConvolutionalBoxPredictorBuilderTest(tf.test.TestCase): self.assertFalse(class_head._use_depthwise) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only Tests.') class WeightSharedConvolutionalBoxPredictorBuilderTest(tf.test.TestCase): def test_box_predictor_calls_conv_argscope_fn(self): @@ -357,6 +361,7 @@ class WeightSharedConvolutionalBoxPredictorBuilderTest(tf.test.TestCase): +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only Tests.') class MaskRCNNBoxPredictorBuilderTest(tf.test.TestCase): def test_box_predictor_builder_calls_fc_argscope_fn(self): @@ -537,6 +542,7 @@ class MaskRCNNBoxPredictorBuilderTest(tf.test.TestCase): ._convolve_then_upsample) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only Tests.') class RfcnBoxPredictorBuilderTest(tf.test.TestCase): def test_box_predictor_calls_fc_argscope_fn(self): diff --git a/research/object_detection/builders/calibration_builder_test.py b/research/object_detection/builders/calibration_builder_test.py index a077ef4f9..a81d53a86 100644 --- a/research/object_detection/builders/calibration_builder_test.py +++ b/research/object_detection/builders/calibration_builder_test.py @@ -25,31 +25,34 @@ from six.moves import zip import tensorflow.compat.v1 as tf from object_detection.builders import calibration_builder from object_detection.protos import calibration_pb2 +from object_detection.utils import test_case -class CalibrationBuilderTest(tf.test.TestCase): +class CalibrationBuilderTest(test_case.TestCase): def test_tf_linear_interp1d_map(self): """Tests TF linear interpolation mapping to a single number.""" - with self.test_session() as sess: + def graph_fn(): tf_x = tf.constant([0., 0.5, 1.]) tf_y = tf.constant([0.5, 0.5, 0.5]) new_x = tf.constant([0., 0.25, 0.5, 0.75, 1.]) tf_map_outputs = calibration_builder._tf_linear_interp1d( new_x, tf_x, tf_y) - tf_map_outputs_np = sess.run([tf_map_outputs]) - self.assertAllClose(tf_map_outputs_np, [[0.5, 0.5, 0.5, 0.5, 0.5]]) + return tf_map_outputs + tf_map_outputs_np = self.execute(graph_fn, []) + self.assertAllClose(tf_map_outputs_np, [0.5, 0.5, 0.5, 0.5, 0.5]) def test_tf_linear_interp1d_interpolate(self): """Tests TF 1d linear interpolation not mapping to a single number.""" - with self.test_session() as sess: + def graph_fn(): tf_x = tf.constant([0., 0.5, 1.]) tf_y = tf.constant([0.6, 0.7, 1.0]) new_x = tf.constant([0., 0.25, 0.5, 0.75, 1.]) tf_interpolate_outputs = calibration_builder._tf_linear_interp1d( new_x, tf_x, tf_y) - tf_interpolate_outputs_np = sess.run([tf_interpolate_outputs]) - self.assertAllClose(tf_interpolate_outputs_np, [[0.6, 0.65, 0.7, 0.85, 1.]]) + return tf_interpolate_outputs + tf_interpolate_outputs_np = self.execute(graph_fn, []) + self.assertAllClose(tf_interpolate_outputs_np, [0.6, 0.65, 0.7, 0.85, 1.]) @staticmethod def _get_scipy_interp1d(new_x, x, y): @@ -59,12 +62,13 @@ class CalibrationBuilderTest(tf.test.TestCase): def _get_tf_interp1d(self, new_x, x, y): """Helper performing 1d linear interpolation using Tensorflow.""" - with self.test_session() as sess: + def graph_fn(): tf_interp_outputs = calibration_builder._tf_linear_interp1d( tf.convert_to_tensor(new_x, dtype=tf.float32), tf.convert_to_tensor(x, dtype=tf.float32), tf.convert_to_tensor(y, dtype=tf.float32)) - np_tf_interp_outputs = sess.run(tf_interp_outputs) + return tf_interp_outputs + np_tf_interp_outputs = self.execute(graph_fn, []) return np_tf_interp_outputs def test_tf_linear_interp1d_against_scipy_map(self): @@ -128,8 +132,7 @@ class CalibrationBuilderTest(tf.test.TestCase): self._add_function_approximation_to_calibration_proto( calibration_config, class_agnostic_x, class_agnostic_y, class_id=None) - od_graph = tf.Graph() - with self.test_session(graph=od_graph) as sess: + def graph_fn(): calibration_fn = calibration_builder.build(calibration_config) # batch_size = 2, num_classes = 2, num_anchors = 2. class_predictions_with_background = tf.constant( @@ -140,7 +143,8 @@ class CalibrationBuilderTest(tf.test.TestCase): # Everything should map to 0.5 if classes are ignored. calibrated_scores = calibration_fn(class_predictions_with_background) - calibrated_scores_np = sess.run(calibrated_scores) + return calibrated_scores + calibrated_scores_np = self.execute(graph_fn, []) self.assertAllClose(calibrated_scores_np, [[[0.05, 0.1, 0.15], [0.2, 0.25, 0.0]], [[0.35, 0.45, 0.55], @@ -161,8 +165,7 @@ class CalibrationBuilderTest(tf.test.TestCase): self._add_function_approximation_to_calibration_proto( calibration_config, class_1_x, class_1_y, class_id=1) - od_graph = tf.Graph() - with self.test_session(graph=od_graph) as sess: + def graph_fn(): calibration_fn = calibration_builder.build(calibration_config) # batch_size = 2, num_classes = 2, num_anchors = 2. class_predictions_with_background = tf.constant( @@ -170,7 +173,8 @@ class CalibrationBuilderTest(tf.test.TestCase): [[0.6, 0.4], [0.08, 0.92]]], dtype=tf.float32) calibrated_scores = calibration_fn(class_predictions_with_background) - calibrated_scores_np = sess.run(calibrated_scores) + return calibrated_scores + calibrated_scores_np = self.execute(graph_fn, []) self.assertAllClose(calibrated_scores_np, [[[0.5, 0.6], [0.5, 0.3]], [[0.5, 0.7], [0.5, 0.96]]]) @@ -179,8 +183,7 @@ class CalibrationBuilderTest(tf.test.TestCase): calibration_config = calibration_pb2.CalibrationConfig() calibration_config.temperature_scaling_calibration.scaler = 2.0 - od_graph = tf.Graph() - with self.test_session(graph=od_graph) as sess: + def graph_fn(): calibration_fn = calibration_builder.build(calibration_config) # batch_size = 2, num_classes = 2, num_anchors = 2. class_predictions_with_background = tf.constant( @@ -188,7 +191,8 @@ class CalibrationBuilderTest(tf.test.TestCase): [[0.6, 0.7, 0.8], [0.9, 1.0, 1.0]]], dtype=tf.float32) calibrated_scores = calibration_fn(class_predictions_with_background) - calibrated_scores_np = sess.run(calibrated_scores) + return calibrated_scores + calibrated_scores_np = self.execute(graph_fn, []) self.assertAllClose(calibrated_scores_np, [[[0.05, 0.1, 0.15], [0.2, 0.25, 0.0]], [[0.3, 0.35, 0.4], [0.45, 0.5, 0.5]]]) @@ -212,8 +216,7 @@ class CalibrationBuilderTest(tf.test.TestCase): calibration_config = calibration_pb2.CalibrationConfig() self._add_function_approximation_to_calibration_proto( calibration_config, class_0_x, class_0_y, class_id=0) - od_graph = tf.Graph() - with self.test_session(graph=od_graph) as sess: + def graph_fn(): calibration_fn = calibration_builder.build(calibration_config) # batch_size = 2, num_classes = 2, num_anchors = 2. class_predictions_with_background = tf.constant( @@ -221,7 +224,8 @@ class CalibrationBuilderTest(tf.test.TestCase): [[0.6, 0.4], [0.08, 0.92]]], dtype=tf.float32) calibrated_scores = calibration_fn(class_predictions_with_background) - calibrated_scores_np = sess.run(calibrated_scores) + return calibrated_scores + calibrated_scores_np = self.execute(graph_fn, []) self.assertAllClose(calibrated_scores_np, [[[0.5, 0.2], [0.5, 0.1]], [[0.5, 0.4], [0.5, 0.92]]]) diff --git a/research/object_detection/builders/dataset_builder.py b/research/object_detection/builders/dataset_builder.py index 772086619..c1c1ce3ec 100644 --- a/research/object_detection/builders/dataset_builder.py +++ b/research/object_detection/builders/dataset_builder.py @@ -29,7 +29,6 @@ from __future__ import print_function import functools import tensorflow.compat.v1 as tf -from tensorflow.contrib import data as tf_data from object_detection.builders import decoder_builder from object_detection.protos import input_reader_pb2 @@ -94,7 +93,7 @@ def read_dataset(file_read_func, input_files, config, filename_dataset = filename_dataset.repeat(config.num_epochs or None) records_dataset = filename_dataset.apply( - tf_data.parallel_interleave( + tf.data.experimental.parallel_interleave( file_read_func, cycle_length=num_readers, block_length=config.read_block_length, @@ -153,6 +152,30 @@ def build(input_reader_config, batch_size=None, transform_input_data_fn=None, if not config.input_path: raise ValueError('At least one input path must be specified in ' '`input_reader_config`.') + def dataset_map_fn(dataset, fn_to_map, batch_size=None, + input_reader_config=None): + """Handles whether or not to use the legacy map function. + + Args: + dataset: A tf.Dataset. + fn_to_map: The function to be mapped for that dataset. + batch_size: Batch size. If batch size is None, no batching is performed. + input_reader_config: A input_reader_pb2.InputReader object. + + Returns: + A tf.data.Dataset mapped with fn_to_map. + """ + if hasattr(dataset, 'map_with_legacy_function'): + if batch_size: + num_parallel_calls = batch_size * ( + input_reader_config.num_parallel_batches) + else: + num_parallel_calls = input_reader_config.num_parallel_map_calls + dataset = dataset.map_with_legacy_function( + fn_to_map, num_parallel_calls=num_parallel_calls) + else: + dataset = dataset.map(fn_to_map, tf.data.experimental.AUTOTUNE) + return dataset shard_fn = shard_function_for_context(input_context) if input_context is not None: batch_size = input_context.get_per_replica_batch_size(batch_size) @@ -163,15 +186,16 @@ def build(input_reader_config, batch_size=None, transform_input_data_fn=None, dataset = dataset.shard(input_reader_config.sample_1_of_n_examples, 0) # TODO(rathodv): make batch size a required argument once the old binaries # are deleted. - dataset = dataset.map(decoder.decode, tf.data.experimental.AUTOTUNE) + dataset = dataset_map_fn(dataset, decoder.decode, batch_size, + input_reader_config) if reduce_to_frame_fn: - dataset = reduce_to_frame_fn(dataset) + dataset = reduce_to_frame_fn(dataset, dataset_map_fn, batch_size, + input_reader_config) if transform_input_data_fn is not None: - dataset = dataset.map(transform_input_data_fn, - tf.data.experimental.AUTOTUNE) + dataset = dataset_map_fn(dataset, transform_input_data_fn, + batch_size, input_reader_config) if batch_size: - dataset = dataset.apply( - tf_data.batch_and_drop_remainder(batch_size)) + dataset = dataset.batch(batch_size, drop_remainder=True) dataset = dataset.prefetch(input_reader_config.num_prefetch_batches) return dataset diff --git a/research/object_detection/builders/dataset_builder_test.py b/research/object_detection/builders/dataset_builder_test.py index 741ff3bcf..7c3de113e 100644 --- a/research/object_detection/builders/dataset_builder_test.py +++ b/research/object_detection/builders/dataset_builder_test.py @@ -197,13 +197,13 @@ class DatasetBuilderTest(test_case.TestCase): output_dict[fields.InputDataFields.groundtruth_boxes][0][0]) def get_mock_reduce_to_frame_fn(self): - def mock_reduce_to_frame_fn(dataset): + def mock_reduce_to_frame_fn(dataset, dataset_map_fn, batch_size, config): def get_frame(tensor_dict): out_tensor_dict = {} out_tensor_dict[fields.InputDataFields.source_id] = ( tensor_dict[fields.InputDataFields.source_id][0]) return out_tensor_dict - return dataset.map(get_frame, tf.data.experimental.AUTOTUNE) + return dataset_map_fn(dataset, get_frame, batch_size, config) return mock_reduce_to_frame_fn def test_build_tf_record_input_reader_sequence_example_train(self): @@ -537,8 +537,15 @@ class ReadDatasetTest(test_case.TestCase): def graph_fn(): keys = [1, 0, -1] dataset = tf.data.Dataset.from_tensor_slices([[1, 2, -1, 5]]) - table = contrib_lookup.HashTable( - initializer=contrib_lookup.KeyValueTensorInitializer( + try: + # Dynamically try to load the tf v2 lookup, falling back to contrib + lookup = tf.compat.v2.lookup + hash_table_class = tf.compat.v2.lookup.StaticHashTable + except AttributeError: + lookup = contrib_lookup + hash_table_class = contrib_lookup.HashTable + table = hash_table_class( + initializer=lookup.KeyValueTensorInitializer( keys=keys, values=list(reversed(keys))), default_value=100) dataset = dataset.map(table.lookup) @@ -559,7 +566,7 @@ class ReadDatasetTest(test_case.TestCase): data = self.execute(graph_fn, []) # Note that the execute function extracts single outputs if the return # value is of size 1. - self.assertAllEqual( + self.assertCountEqual( data, [ 1, 10, 2, 20, 3, 30, 4, 40, 5, 50, 1, 10, 2, 20, 3, 30, 4, 40, 5, 50 @@ -577,7 +584,7 @@ class ReadDatasetTest(test_case.TestCase): data = self.execute(graph_fn, []) # Note that the execute function extracts single outputs if the return # value is of size 1. - self.assertAllEqual( + self.assertCountEqual( data, [ 1, 10, 2, 20, 3, 30, 4, 40, 5, 50, 1, 10, 2, 20, 3, 30, 4, 40, 5, 50 @@ -607,12 +614,14 @@ class ReadDatasetTest(test_case.TestCase): def graph_fn(): return self._get_dataset_next( [self._shuffle_path_template % '*'], config, batch_size=10) - expected_non_shuffle_output = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1] + expected_non_shuffle_output1 = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1] + expected_non_shuffle_output2 = [1, 1, 1, 1, 1, 0, 0, 0, 0, 0] # Note that the execute function extracts single outputs if the return # value is of size 1. data = self.execute(graph_fn, []) - self.assertAllEqual(data, expected_non_shuffle_output) + self.assertTrue(all(data == expected_non_shuffle_output1) or + all(data == expected_non_shuffle_output2)) def test_read_dataset_single_epoch(self): config = input_reader_pb2.InputReader() diff --git a/research/object_detection/builders/decoder_builder.py b/research/object_detection/builders/decoder_builder.py index d3cac57d0..59880735c 100644 --- a/research/object_detection/builders/decoder_builder.py +++ b/research/object_detection/builders/decoder_builder.py @@ -48,7 +48,7 @@ def build(input_reader_config): if input_reader_config.HasField('label_map_path'): label_map_proto_file = input_reader_config.label_map_path input_type = input_reader_config.input_type - if input_type == input_reader_pb2.InputType.TF_EXAMPLE: + if input_type == input_reader_pb2.InputType.Value('TF_EXAMPLE'): decoder = tf_example_decoder.TfExampleDecoder( load_instance_masks=input_reader_config.load_instance_masks, load_multiclass_scores=input_reader_config.load_multiclass_scores, @@ -60,7 +60,7 @@ def build(input_reader_config): num_keypoints=input_reader_config.num_keypoints, expand_hierarchy_labels=input_reader_config.expand_labels_hierarchy) return decoder - elif input_type == input_reader_pb2.InputType.TF_SEQUENCE_EXAMPLE: + elif input_type == input_reader_pb2.InputType.Value('TF_SEQUENCE_EXAMPLE'): decoder = tf_sequence_example_decoder.TfSequenceExampleDecoder( label_map_proto_file=label_map_proto_file, load_context_features=input_reader_config.load_context_features) diff --git a/research/object_detection/builders/decoder_builder_test.py b/research/object_detection/builders/decoder_builder_test.py index 767c108e9..d45285fd1 100644 --- a/research/object_detection/builders/decoder_builder_test.py +++ b/research/object_detection/builders/decoder_builder_test.py @@ -29,6 +29,7 @@ from object_detection.core import standard_fields as fields from object_detection.dataset_tools import seq_example_util from object_detection.protos import input_reader_pb2 from object_detection.utils import dataset_util +from object_detection.utils import test_case def _get_labelmap_path(): @@ -38,17 +39,20 @@ def _get_labelmap_path(): 'pet_label_map.pbtxt') -class DecoderBuilderTest(tf.test.TestCase): +class DecoderBuilderTest(test_case.TestCase): def _make_serialized_tf_example(self, has_additional_channels=False): - image_tensor = np.random.randint(255, size=(4, 5, 3)).astype(np.uint8) - additional_channels_tensor = np.random.randint( + image_tensor_np = np.random.randint(255, size=(4, 5, 3)).astype(np.uint8) + additional_channels_tensor_np = np.random.randint( 255, size=(4, 5, 1)).astype(np.uint8) flat_mask = (4 * 5) * [1.0] - with self.test_session(): - encoded_jpeg = tf.image.encode_jpeg(tf.constant(image_tensor)).eval() - encoded_additional_channels_jpeg = tf.image.encode_jpeg( - tf.constant(additional_channels_tensor)).eval() + def graph_fn(image_tensor): + encoded_jpeg = tf.image.encode_jpeg(image_tensor) + return encoded_jpeg + encoded_jpeg = self.execute_cpu(graph_fn, [image_tensor_np]) + encoded_additional_channels_jpeg = self.execute_cpu( + graph_fn, [additional_channels_tensor_np]) + features = { 'image/source_id': dataset_util.bytes_feature('0'.encode()), 'image/encoded': dataset_util.bytes_feature(encoded_jpeg), @@ -71,46 +75,45 @@ class DecoderBuilderTest(tf.test.TestCase): def _make_random_serialized_jpeg_images(self, num_frames, image_height, image_width): - images = tf.cast(tf.random.uniform( - [num_frames, image_height, image_width, 3], - maxval=256, - dtype=tf.int32), dtype=tf.uint8) - images_list = tf.unstack(images, axis=0) - encoded_images_list = [tf.io.encode_jpeg(image) for image in images_list] - with tf.Session() as sess: - encoded_images = sess.run(encoded_images_list) - return encoded_images + def graph_fn(): + images = tf.cast(tf.random.uniform( + [num_frames, image_height, image_width, 3], + maxval=256, + dtype=tf.int32), dtype=tf.uint8) + images_list = tf.unstack(images, axis=0) + encoded_images = [tf.io.encode_jpeg(image) for image in images_list] + return encoded_images + return self.execute_cpu(graph_fn, []) def _make_serialized_tf_sequence_example(self): num_frames = 4 image_height = 20 image_width = 30 image_source_ids = [str(i) for i in range(num_frames)] - with self.test_session(): - encoded_images = self._make_random_serialized_jpeg_images( - num_frames, image_height, image_width) - sequence_example_serialized = seq_example_util.make_sequence_example( - dataset_name='video_dataset', - video_id='video', - encoded_images=encoded_images, - image_height=image_height, - image_width=image_width, - image_source_ids=image_source_ids, - image_format='JPEG', - is_annotated=[[1], [1], [1], [1]], - bboxes=[ - [[]], # Frame 0. - [[0., 0., 1., 1.]], # Frame 1. - [[0., 0., 1., 1.], - [0.1, 0.1, 0.2, 0.2]], # Frame 2. - [[]], # Frame 3. - ], - label_strings=[ - [], # Frame 0. - ['Abyssinian'], # Frame 1. - ['Abyssinian', 'american_bulldog'], # Frame 2. - [], # Frame 3 - ]).SerializeToString() + encoded_images = self._make_random_serialized_jpeg_images( + num_frames, image_height, image_width) + sequence_example_serialized = seq_example_util.make_sequence_example( + dataset_name='video_dataset', + video_id='video', + encoded_images=encoded_images, + image_height=image_height, + image_width=image_width, + image_source_ids=image_source_ids, + image_format='JPEG', + is_annotated=[[1], [1], [1], [1]], + bboxes=[ + [[]], # Frame 0. + [[0., 0., 1., 1.]], # Frame 1. + [[0., 0., 1., 1.], + [0.1, 0.1, 0.2, 0.2]], # Frame 2. + [[]], # Frame 3. + ], + label_strings=[ + [], # Frame 0. + ['Abyssinian'], # Frame 1. + ['Abyssinian', 'american_bulldog'], # Frame 2. + [], # Frame 3 + ]).SerializeToString() return sequence_example_serialized def test_build_tf_record_input_reader(self): @@ -119,21 +122,19 @@ class DecoderBuilderTest(tf.test.TestCase): text_format.Parse(input_reader_text_proto, input_reader_proto) decoder = decoder_builder.build(input_reader_proto) - tensor_dict = decoder.decode(self._make_serialized_tf_example()) - - with tf.train.MonitoredSession() as sess: - output_dict = sess.run(tensor_dict) - - self.assertNotIn( - fields.InputDataFields.groundtruth_instance_masks, output_dict) - self.assertEqual((4, 5, 3), output_dict[fields.InputDataFields.image].shape) - self.assertAllEqual([2], - output_dict[fields.InputDataFields.groundtruth_classes]) - self.assertEqual( - (1, 4), output_dict[fields.InputDataFields.groundtruth_boxes].shape) - self.assertAllEqual( - [0.0, 0.0, 1.0, 1.0], - output_dict[fields.InputDataFields.groundtruth_boxes][0]) + serialized_seq_example = self._make_serialized_tf_example() + def graph_fn(): + tensor_dict = decoder.decode(serialized_seq_example) + return (tensor_dict[fields.InputDataFields.image], + tensor_dict[fields.InputDataFields.groundtruth_classes], + tensor_dict[fields.InputDataFields.groundtruth_boxes]) + + (image, groundtruth_classes, + groundtruth_boxes) = self.execute_cpu(graph_fn, []) + self.assertEqual((4, 5, 3), image.shape) + self.assertAllEqual([2], groundtruth_classes) + self.assertEqual((1, 4), groundtruth_boxes.shape) + self.assertAllEqual([0.0, 0.0, 1.0, 1.0], groundtruth_boxes[0]) def test_build_tf_record_input_reader_sequence_example(self): label_map_path = _get_labelmap_path() @@ -145,12 +146,16 @@ class DecoderBuilderTest(tf.test.TestCase): input_reader_proto.label_map_path = label_map_path text_format.Parse(input_reader_text_proto, input_reader_proto) - decoder = decoder_builder.build(input_reader_proto) - tensor_dict = decoder.decode(self._make_serialized_tf_sequence_example()) - - with tf.train.MonitoredSession() as sess: - output_dict = sess.run(tensor_dict) - + serialized_seq_example = self._make_serialized_tf_sequence_example() + def graph_fn(): + decoder = decoder_builder.build(input_reader_proto) + tensor_dict = decoder.decode(serialized_seq_example) + return (tensor_dict[fields.InputDataFields.image], + tensor_dict[fields.InputDataFields.groundtruth_classes], + tensor_dict[fields.InputDataFields.groundtruth_boxes], + tensor_dict[fields.InputDataFields.num_groundtruth_boxes]) + (actual_image, actual_groundtruth_classes, actual_groundtruth_boxes, + actual_num_groundtruth_boxes) = self.execute_cpu(graph_fn, []) expected_groundtruth_classes = [[-1, -1], [1, -1], [1, 2], [-1, -1]] expected_groundtruth_boxes = [[[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0]], [[0.0, 0.0, 1.0, 1.0], [0.0, 0.0, 0.0, 0.0]], @@ -158,19 +163,14 @@ class DecoderBuilderTest(tf.test.TestCase): [[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0]]] expected_num_groundtruth_boxes = [0, 1, 2, 0] - self.assertNotIn( - fields.InputDataFields.groundtruth_instance_masks, output_dict) # Sequence example images are encoded. - self.assertEqual((4,), output_dict[fields.InputDataFields.image].shape) + self.assertEqual((4,), actual_image.shape) self.assertAllEqual(expected_groundtruth_classes, - output_dict[fields.InputDataFields.groundtruth_classes]) - self.assertEqual( - (4, 2, 4), output_dict[fields.InputDataFields.groundtruth_boxes].shape) + actual_groundtruth_classes) self.assertAllClose(expected_groundtruth_boxes, - output_dict[fields.InputDataFields.groundtruth_boxes]) + actual_groundtruth_boxes) self.assertAllClose( - expected_num_groundtruth_boxes, - output_dict[fields.InputDataFields.num_groundtruth_boxes]) + expected_num_groundtruth_boxes, actual_num_groundtruth_boxes) def test_build_tf_record_input_reader_and_load_instance_masks(self): input_reader_text_proto = """ @@ -181,14 +181,12 @@ class DecoderBuilderTest(tf.test.TestCase): text_format.Parse(input_reader_text_proto, input_reader_proto) decoder = decoder_builder.build(input_reader_proto) - tensor_dict = decoder.decode(self._make_serialized_tf_example()) - - with tf.train.MonitoredSession() as sess: - output_dict = sess.run(tensor_dict) - - self.assertAllEqual( - (1, 4, 5), - output_dict[fields.InputDataFields.groundtruth_instance_masks].shape) + serialized_seq_example = self._make_serialized_tf_example() + def graph_fn(): + tensor_dict = decoder.decode(serialized_seq_example) + return tensor_dict[fields.InputDataFields.groundtruth_instance_masks] + masks = self.execute_cpu(graph_fn, []) + self.assertAllEqual((1, 4, 5), masks.shape) if __name__ == '__main__': diff --git a/research/object_detection/builders/graph_rewriter_builder_test.py b/research/object_detection/builders/graph_rewriter_builder_tf1_test.py similarity index 91% rename from research/object_detection/builders/graph_rewriter_builder_test.py rename to research/object_detection/builders/graph_rewriter_builder_tf1_test.py index 02692ce91..8af8fe962 100644 --- a/research/object_detection/builders/graph_rewriter_builder_test.py +++ b/research/object_detection/builders/graph_rewriter_builder_tf1_test.py @@ -13,22 +13,21 @@ # limitations under the License. # ============================================================================== """Tests for graph_rewriter_builder.""" +import unittest import mock import tensorflow.compat.v1 as tf import tf_slim as slim from object_detection.builders import graph_rewriter_builder from object_detection.protos import graph_rewriter_pb2 +from object_detection.utils import tf_version -# pylint: disable=g-import-not-at-top -try: - from tensorflow.contrib import quantize as contrib_quantize -except ImportError: - # TF 2.0 doesn't ship with contrib. - pass -# pylint: enable=g-import-not-at-top +if tf_version.is_tf1(): + from tensorflow.contrib import quantize as contrib_quantize # pylint: disable=g-import-not-at-top + +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class QuantizationBuilderTest(tf.test.TestCase): def testQuantizationBuilderSetsUpCorrectTrainArguments(self): diff --git a/research/object_detection/builders/hyperparams_builder_test.py b/research/object_detection/builders/hyperparams_builder_test.py index 0f92f7d75..2c6fcd5af 100644 --- a/research/object_detection/builders/hyperparams_builder_test.py +++ b/research/object_detection/builders/hyperparams_builder_test.py @@ -16,6 +16,7 @@ """Tests object_detection.core.hyperparams_builder.""" +import unittest import numpy as np import tensorflow.compat.v1 as tf import tf_slim as slim @@ -24,12 +25,14 @@ from google.protobuf import text_format from object_detection.builders import hyperparams_builder from object_detection.core import freezable_batch_norm from object_detection.protos import hyperparams_pb2 +from object_detection.utils import tf_version def _get_scope_key(op): return getattr(op, '_key_op', str(op)) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only tests.') class HyperparamsBuilderTest(tf.test.TestCase): def test_default_arg_scope_has_conv2d_op(self): @@ -149,29 +152,6 @@ class HyperparamsBuilderTest(tf.test.TestCase): result = sess.run(regularizer(tf.constant(weights))) self.assertAllClose(np.abs(weights).sum() * 0.5, result) - def test_return_l1_regularized_weights_keras(self): - conv_hyperparams_text_proto = """ - regularizer { - l1_regularizer { - weight: 0.5 - } - } - initializer { - truncated_normal_initializer { - } - } - """ - conv_hyperparams_proto = hyperparams_pb2.Hyperparams() - text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) - keras_config = hyperparams_builder.KerasLayerHyperparams( - conv_hyperparams_proto) - - regularizer = keras_config.params()['kernel_regularizer'] - weights = np.array([1., -1, 4., 2.]) - with self.test_session() as sess: - result = sess.run(regularizer(tf.constant(weights))) - self.assertAllClose(np.abs(weights).sum() * 0.5, result) - def test_return_l2_regularizer_weights(self): conv_hyperparams_text_proto = """ regularizer { @@ -197,29 +177,6 @@ class HyperparamsBuilderTest(tf.test.TestCase): result = sess.run(regularizer(tf.constant(weights))) self.assertAllClose(np.power(weights, 2).sum() / 2.0 * 0.42, result) - def test_return_l2_regularizer_weights_keras(self): - conv_hyperparams_text_proto = """ - regularizer { - l2_regularizer { - weight: 0.42 - } - } - initializer { - truncated_normal_initializer { - } - } - """ - conv_hyperparams_proto = hyperparams_pb2.Hyperparams() - text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) - keras_config = hyperparams_builder.KerasLayerHyperparams( - conv_hyperparams_proto) - - regularizer = keras_config.params()['kernel_regularizer'] - weights = np.array([1., -1, 4., 2.]) - with self.test_session() as sess: - result = sess.run(regularizer(tf.constant(weights))) - self.assertAllClose(np.power(weights, 2).sum() / 2.0 * 0.42, result) - def test_return_non_default_batch_norm_params_with_train_during_train(self): conv_hyperparams_text_proto = """ regularizer { @@ -252,70 +209,6 @@ class HyperparamsBuilderTest(tf.test.TestCase): self.assertTrue(batch_norm_params['scale']) self.assertTrue(batch_norm_params['is_training']) - def test_return_non_default_batch_norm_params_keras( - self): - conv_hyperparams_text_proto = """ - regularizer { - l2_regularizer { - } - } - initializer { - truncated_normal_initializer { - } - } - batch_norm { - decay: 0.7 - center: false - scale: true - epsilon: 0.03 - } - """ - conv_hyperparams_proto = hyperparams_pb2.Hyperparams() - text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) - keras_config = hyperparams_builder.KerasLayerHyperparams( - conv_hyperparams_proto) - - self.assertTrue(keras_config.use_batch_norm()) - batch_norm_params = keras_config.batch_norm_params() - self.assertAlmostEqual(batch_norm_params['momentum'], 0.7) - self.assertAlmostEqual(batch_norm_params['epsilon'], 0.03) - self.assertFalse(batch_norm_params['center']) - self.assertTrue(batch_norm_params['scale']) - - batch_norm_layer = keras_config.build_batch_norm() - self.assertIsInstance(batch_norm_layer, - freezable_batch_norm.FreezableBatchNorm) - - def test_return_non_default_batch_norm_params_keras_override( - self): - conv_hyperparams_text_proto = """ - regularizer { - l2_regularizer { - } - } - initializer { - truncated_normal_initializer { - } - } - batch_norm { - decay: 0.7 - center: false - scale: true - epsilon: 0.03 - } - """ - conv_hyperparams_proto = hyperparams_pb2.Hyperparams() - text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) - keras_config = hyperparams_builder.KerasLayerHyperparams( - conv_hyperparams_proto) - - self.assertTrue(keras_config.use_batch_norm()) - batch_norm_params = keras_config.batch_norm_params(momentum=0.4) - self.assertAlmostEqual(batch_norm_params['momentum'], 0.4) - self.assertAlmostEqual(batch_norm_params['epsilon'], 0.03) - self.assertFalse(batch_norm_params['center']) - self.assertTrue(batch_norm_params['scale']) - def test_return_batch_norm_params_with_notrain_during_eval(self): conv_hyperparams_text_proto = """ regularizer { @@ -399,29 +292,6 @@ class HyperparamsBuilderTest(tf.test.TestCase): conv_scope_arguments = scope[_get_scope_key(slim.conv2d)] self.assertEqual(conv_scope_arguments['normalizer_fn'], None) - def test_do_not_use_batch_norm_if_default_keras(self): - conv_hyperparams_text_proto = """ - regularizer { - l2_regularizer { - } - } - initializer { - truncated_normal_initializer { - } - } - """ - conv_hyperparams_proto = hyperparams_pb2.Hyperparams() - text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) - keras_config = hyperparams_builder.KerasLayerHyperparams( - conv_hyperparams_proto) - self.assertFalse(keras_config.use_batch_norm()) - self.assertEqual(keras_config.batch_norm_params(), {}) - - # The batch norm builder should build an identity Lambda layer - identity_layer = keras_config.build_batch_norm() - self.assertIsInstance(identity_layer, - tf.keras.layers.Lambda) - def test_use_none_activation(self): conv_hyperparams_text_proto = """ regularizer { @@ -442,29 +312,6 @@ class HyperparamsBuilderTest(tf.test.TestCase): conv_scope_arguments = scope[_get_scope_key(slim.conv2d)] self.assertEqual(conv_scope_arguments['activation_fn'], None) - def test_use_none_activation_keras(self): - conv_hyperparams_text_proto = """ - regularizer { - l2_regularizer { - } - } - initializer { - truncated_normal_initializer { - } - } - activation: NONE - """ - conv_hyperparams_proto = hyperparams_pb2.Hyperparams() - text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) - keras_config = hyperparams_builder.KerasLayerHyperparams( - conv_hyperparams_proto) - self.assertEqual(keras_config.params()['activation'], None) - self.assertEqual( - keras_config.params(include_activation=True)['activation'], None) - activation_layer = keras_config.build_activation_layer() - self.assertIsInstance(activation_layer, tf.keras.layers.Lambda) - self.assertEqual(activation_layer.function, tf.identity) - def test_use_relu_activation(self): conv_hyperparams_text_proto = """ regularizer { @@ -485,29 +332,6 @@ class HyperparamsBuilderTest(tf.test.TestCase): conv_scope_arguments = scope[_get_scope_key(slim.conv2d)] self.assertEqual(conv_scope_arguments['activation_fn'], tf.nn.relu) - def test_use_relu_activation_keras(self): - conv_hyperparams_text_proto = """ - regularizer { - l2_regularizer { - } - } - initializer { - truncated_normal_initializer { - } - } - activation: RELU - """ - conv_hyperparams_proto = hyperparams_pb2.Hyperparams() - text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) - keras_config = hyperparams_builder.KerasLayerHyperparams( - conv_hyperparams_proto) - self.assertEqual(keras_config.params()['activation'], None) - self.assertEqual( - keras_config.params(include_activation=True)['activation'], tf.nn.relu) - activation_layer = keras_config.build_activation_layer() - self.assertIsInstance(activation_layer, tf.keras.layers.Lambda) - self.assertEqual(activation_layer.function, tf.nn.relu) - def test_use_relu_6_activation(self): conv_hyperparams_text_proto = """ regularizer { @@ -528,29 +352,6 @@ class HyperparamsBuilderTest(tf.test.TestCase): conv_scope_arguments = scope[_get_scope_key(slim.conv2d)] self.assertEqual(conv_scope_arguments['activation_fn'], tf.nn.relu6) - def test_use_relu_6_activation_keras(self): - conv_hyperparams_text_proto = """ - regularizer { - l2_regularizer { - } - } - initializer { - truncated_normal_initializer { - } - } - activation: RELU_6 - """ - conv_hyperparams_proto = hyperparams_pb2.Hyperparams() - text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) - keras_config = hyperparams_builder.KerasLayerHyperparams( - conv_hyperparams_proto) - self.assertEqual(keras_config.params()['activation'], None) - self.assertEqual( - keras_config.params(include_activation=True)['activation'], tf.nn.relu6) - activation_layer = keras_config.build_activation_layer() - self.assertIsInstance(activation_layer, tf.keras.layers.Lambda) - self.assertEqual(activation_layer.function, tf.nn.relu6) - def test_use_swish_activation(self): conv_hyperparams_text_proto = """ regularizer { @@ -571,48 +372,6 @@ class HyperparamsBuilderTest(tf.test.TestCase): conv_scope_arguments = scope[_get_scope_key(slim.conv2d)] self.assertEqual(conv_scope_arguments['activation_fn'], tf.nn.swish) - def test_use_swish_activation_keras(self): - conv_hyperparams_text_proto = """ - regularizer { - l2_regularizer { - } - } - initializer { - truncated_normal_initializer { - } - } - activation: SWISH - """ - conv_hyperparams_proto = hyperparams_pb2.Hyperparams() - text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) - keras_config = hyperparams_builder.KerasLayerHyperparams( - conv_hyperparams_proto) - self.assertEqual(keras_config.params()['activation'], None) - self.assertEqual( - keras_config.params(include_activation=True)['activation'], tf.nn.swish) - activation_layer = keras_config.build_activation_layer() - self.assertIsInstance(activation_layer, tf.keras.layers.Lambda) - self.assertEqual(activation_layer.function, tf.nn.swish) - - def test_override_activation_keras(self): - conv_hyperparams_text_proto = """ - regularizer { - l2_regularizer { - } - } - initializer { - truncated_normal_initializer { - } - } - activation: RELU_6 - """ - conv_hyperparams_proto = hyperparams_pb2.Hyperparams() - text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) - keras_config = hyperparams_builder.KerasLayerHyperparams( - conv_hyperparams_proto) - new_params = keras_config.params(activation=tf.nn.relu) - self.assertEqual(new_params['activation'], tf.nn.relu) - def _assert_variance_in_range(self, initializer, shape, variance, tol=1e-2): with tf.Graph().as_default() as g: @@ -650,8 +409,7 @@ class HyperparamsBuilderTest(tf.test.TestCase): self._assert_variance_in_range(initializer, shape=[100, 40], variance=2. / 100.) - def test_variance_in_range_with_variance_scaling_initializer_fan_in_keras( - self): + def test_variance_in_range_with_variance_scaling_initializer_fan_out(self): conv_hyperparams_text_proto = """ regularizer { l2_regularizer { @@ -660,20 +418,22 @@ class HyperparamsBuilderTest(tf.test.TestCase): initializer { variance_scaling_initializer { factor: 2.0 - mode: FAN_IN + mode: FAN_OUT uniform: false } } """ conv_hyperparams_proto = hyperparams_pb2.Hyperparams() text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) - keras_config = hyperparams_builder.KerasLayerHyperparams( - conv_hyperparams_proto) - initializer = keras_config.params()['kernel_initializer'] + scope_fn = hyperparams_builder.build(conv_hyperparams_proto, + is_training=True) + scope = scope_fn() + conv_scope_arguments = scope[_get_scope_key(slim.conv2d)] + initializer = conv_scope_arguments['weights_initializer'] self._assert_variance_in_range(initializer, shape=[100, 40], - variance=2. / 100.) + variance=2. / 40.) - def test_variance_in_range_with_variance_scaling_initializer_fan_out(self): + def test_variance_in_range_with_variance_scaling_initializer_fan_avg(self): conv_hyperparams_text_proto = """ regularizer { l2_regularizer { @@ -682,7 +442,7 @@ class HyperparamsBuilderTest(tf.test.TestCase): initializer { variance_scaling_initializer { factor: 2.0 - mode: FAN_OUT + mode: FAN_AVG uniform: false } } @@ -695,10 +455,9 @@ class HyperparamsBuilderTest(tf.test.TestCase): conv_scope_arguments = scope[_get_scope_key(slim.conv2d)] initializer = conv_scope_arguments['weights_initializer'] self._assert_variance_in_range(initializer, shape=[100, 40], - variance=2. / 40.) + variance=4. / (100. + 40.)) - def test_variance_in_range_with_variance_scaling_initializer_fan_out_keras( - self): + def test_variance_in_range_with_variance_scaling_initializer_uniform(self): conv_hyperparams_text_proto = """ regularizer { l2_regularizer { @@ -707,30 +466,31 @@ class HyperparamsBuilderTest(tf.test.TestCase): initializer { variance_scaling_initializer { factor: 2.0 - mode: FAN_OUT - uniform: false + mode: FAN_IN + uniform: true } } """ conv_hyperparams_proto = hyperparams_pb2.Hyperparams() text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) - keras_config = hyperparams_builder.KerasLayerHyperparams( - conv_hyperparams_proto) - initializer = keras_config.params()['kernel_initializer'] + scope_fn = hyperparams_builder.build(conv_hyperparams_proto, + is_training=True) + scope = scope_fn() + conv_scope_arguments = scope[_get_scope_key(slim.conv2d)] + initializer = conv_scope_arguments['weights_initializer'] self._assert_variance_in_range(initializer, shape=[100, 40], - variance=2. / 40.) + variance=2. / 100.) - def test_variance_in_range_with_variance_scaling_initializer_fan_avg(self): + def test_variance_in_range_with_truncated_normal_initializer(self): conv_hyperparams_text_proto = """ regularizer { l2_regularizer { } } initializer { - variance_scaling_initializer { - factor: 2.0 - mode: FAN_AVG - uniform: false + truncated_normal_initializer { + mean: 0.0 + stddev: 0.8 } } """ @@ -742,9 +502,85 @@ class HyperparamsBuilderTest(tf.test.TestCase): conv_scope_arguments = scope[_get_scope_key(slim.conv2d)] initializer = conv_scope_arguments['weights_initializer'] self._assert_variance_in_range(initializer, shape=[100, 40], - variance=4. / (100. + 40.)) + variance=0.49, tol=1e-1) - def test_variance_in_range_with_variance_scaling_initializer_fan_avg_keras( + def test_variance_in_range_with_random_normal_initializer(self): + conv_hyperparams_text_proto = """ + regularizer { + l2_regularizer { + } + } + initializer { + random_normal_initializer { + mean: 0.0 + stddev: 0.8 + } + } + """ + conv_hyperparams_proto = hyperparams_pb2.Hyperparams() + text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) + scope_fn = hyperparams_builder.build(conv_hyperparams_proto, + is_training=True) + scope = scope_fn() + conv_scope_arguments = scope[_get_scope_key(slim.conv2d)] + initializer = conv_scope_arguments['weights_initializer'] + self._assert_variance_in_range(initializer, shape=[100, 40], + variance=0.64, tol=1e-1) + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only tests.') +class KerasHyperparamsBuilderTest(tf.test.TestCase): + + def _assert_variance_in_range(self, initializer, shape, variance, + tol=1e-2): + var = tf.Variable(initializer(shape=shape, dtype=tf.float32)) + self.assertAllClose(np.var(var.numpy()), variance, tol, tol) + + def test_return_l1_regularized_weights_keras(self): + conv_hyperparams_text_proto = """ + regularizer { + l1_regularizer { + weight: 0.5 + } + } + initializer { + truncated_normal_initializer { + } + } + """ + conv_hyperparams_proto = hyperparams_pb2.Hyperparams() + text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) + keras_config = hyperparams_builder.KerasLayerHyperparams( + conv_hyperparams_proto) + + regularizer = keras_config.params()['kernel_regularizer'] + weights = np.array([1., -1, 4., 2.]) + result = regularizer(tf.constant(weights)).numpy() + self.assertAllClose(np.abs(weights).sum() * 0.5, result) + + def test_return_l2_regularizer_weights_keras(self): + conv_hyperparams_text_proto = """ + regularizer { + l2_regularizer { + weight: 0.42 + } + } + initializer { + truncated_normal_initializer { + } + } + """ + conv_hyperparams_proto = hyperparams_pb2.Hyperparams() + text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) + keras_config = hyperparams_builder.KerasLayerHyperparams( + conv_hyperparams_proto) + + regularizer = keras_config.params()['kernel_regularizer'] + weights = np.array([1., -1, 4., 2.]) + result = regularizer(tf.constant(weights)).numpy() + self.assertAllClose(np.power(weights, 2).sum() / 2.0 * 0.42, result) + + def test_return_non_default_batch_norm_params_keras( self): conv_hyperparams_text_proto = """ regularizer { @@ -752,22 +588,198 @@ class HyperparamsBuilderTest(tf.test.TestCase): } } initializer { - variance_scaling_initializer { - factor: 2.0 - mode: FAN_AVG - uniform: false + truncated_normal_initializer { } } + batch_norm { + decay: 0.7 + center: false + scale: true + epsilon: 0.03 + } """ conv_hyperparams_proto = hyperparams_pb2.Hyperparams() text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) keras_config = hyperparams_builder.KerasLayerHyperparams( conv_hyperparams_proto) - initializer = keras_config.params()['kernel_initializer'] - self._assert_variance_in_range(initializer, shape=[100, 40], - variance=4. / (100. + 40.)) - def test_variance_in_range_with_variance_scaling_initializer_uniform(self): + self.assertTrue(keras_config.use_batch_norm()) + batch_norm_params = keras_config.batch_norm_params() + self.assertAlmostEqual(batch_norm_params['momentum'], 0.7) + self.assertAlmostEqual(batch_norm_params['epsilon'], 0.03) + self.assertFalse(batch_norm_params['center']) + self.assertTrue(batch_norm_params['scale']) + + batch_norm_layer = keras_config.build_batch_norm() + self.assertIsInstance(batch_norm_layer, + freezable_batch_norm.FreezableBatchNorm) + + def test_return_non_default_batch_norm_params_keras_override( + self): + conv_hyperparams_text_proto = """ + regularizer { + l2_regularizer { + } + } + initializer { + truncated_normal_initializer { + } + } + batch_norm { + decay: 0.7 + center: false + scale: true + epsilon: 0.03 + } + """ + conv_hyperparams_proto = hyperparams_pb2.Hyperparams() + text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) + keras_config = hyperparams_builder.KerasLayerHyperparams( + conv_hyperparams_proto) + + self.assertTrue(keras_config.use_batch_norm()) + batch_norm_params = keras_config.batch_norm_params(momentum=0.4) + self.assertAlmostEqual(batch_norm_params['momentum'], 0.4) + self.assertAlmostEqual(batch_norm_params['epsilon'], 0.03) + self.assertFalse(batch_norm_params['center']) + self.assertTrue(batch_norm_params['scale']) + + def test_do_not_use_batch_norm_if_default_keras(self): + conv_hyperparams_text_proto = """ + regularizer { + l2_regularizer { + } + } + initializer { + truncated_normal_initializer { + } + } + """ + conv_hyperparams_proto = hyperparams_pb2.Hyperparams() + text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) + keras_config = hyperparams_builder.KerasLayerHyperparams( + conv_hyperparams_proto) + self.assertFalse(keras_config.use_batch_norm()) + self.assertEqual(keras_config.batch_norm_params(), {}) + + # The batch norm builder should build an identity Lambda layer + identity_layer = keras_config.build_batch_norm() + self.assertIsInstance(identity_layer, + tf.keras.layers.Lambda) + + def test_use_none_activation_keras(self): + conv_hyperparams_text_proto = """ + regularizer { + l2_regularizer { + } + } + initializer { + truncated_normal_initializer { + } + } + activation: NONE + """ + conv_hyperparams_proto = hyperparams_pb2.Hyperparams() + text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) + keras_config = hyperparams_builder.KerasLayerHyperparams( + conv_hyperparams_proto) + self.assertIsNone(keras_config.params()['activation']) + self.assertIsNone( + keras_config.params(include_activation=True)['activation']) + activation_layer = keras_config.build_activation_layer() + self.assertIsInstance(activation_layer, tf.keras.layers.Lambda) + self.assertEqual(activation_layer.function, tf.identity) + + def test_use_relu_activation_keras(self): + conv_hyperparams_text_proto = """ + regularizer { + l2_regularizer { + } + } + initializer { + truncated_normal_initializer { + } + } + activation: RELU + """ + conv_hyperparams_proto = hyperparams_pb2.Hyperparams() + text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) + keras_config = hyperparams_builder.KerasLayerHyperparams( + conv_hyperparams_proto) + self.assertIsNone(keras_config.params()['activation']) + self.assertEqual( + keras_config.params(include_activation=True)['activation'], tf.nn.relu) + activation_layer = keras_config.build_activation_layer() + self.assertIsInstance(activation_layer, tf.keras.layers.Lambda) + self.assertEqual(activation_layer.function, tf.nn.relu) + + def test_use_relu_6_activation_keras(self): + conv_hyperparams_text_proto = """ + regularizer { + l2_regularizer { + } + } + initializer { + truncated_normal_initializer { + } + } + activation: RELU_6 + """ + conv_hyperparams_proto = hyperparams_pb2.Hyperparams() + text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) + keras_config = hyperparams_builder.KerasLayerHyperparams( + conv_hyperparams_proto) + self.assertIsNone(keras_config.params()['activation']) + self.assertEqual( + keras_config.params(include_activation=True)['activation'], tf.nn.relu6) + activation_layer = keras_config.build_activation_layer() + self.assertIsInstance(activation_layer, tf.keras.layers.Lambda) + self.assertEqual(activation_layer.function, tf.nn.relu6) + + def test_use_swish_activation_keras(self): + conv_hyperparams_text_proto = """ + regularizer { + l2_regularizer { + } + } + initializer { + truncated_normal_initializer { + } + } + activation: SWISH + """ + conv_hyperparams_proto = hyperparams_pb2.Hyperparams() + text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) + keras_config = hyperparams_builder.KerasLayerHyperparams( + conv_hyperparams_proto) + self.assertIsNone(keras_config.params()['activation']) + self.assertEqual( + keras_config.params(include_activation=True)['activation'], tf.nn.swish) + activation_layer = keras_config.build_activation_layer() + self.assertIsInstance(activation_layer, tf.keras.layers.Lambda) + self.assertEqual(activation_layer.function, tf.nn.swish) + + def test_override_activation_keras(self): + conv_hyperparams_text_proto = """ + regularizer { + l2_regularizer { + } + } + initializer { + truncated_normal_initializer { + } + } + activation: RELU_6 + """ + conv_hyperparams_proto = hyperparams_pb2.Hyperparams() + text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) + keras_config = hyperparams_builder.KerasLayerHyperparams( + conv_hyperparams_proto) + new_params = keras_config.params(activation=tf.nn.relu) + self.assertEqual(new_params['activation'], tf.nn.relu) + + def test_variance_in_range_with_variance_scaling_initializer_fan_in_keras( + self): conv_hyperparams_text_proto = """ regularizer { l2_regularizer { @@ -777,21 +789,19 @@ class HyperparamsBuilderTest(tf.test.TestCase): variance_scaling_initializer { factor: 2.0 mode: FAN_IN - uniform: true + uniform: false } } """ conv_hyperparams_proto = hyperparams_pb2.Hyperparams() text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) - scope_fn = hyperparams_builder.build(conv_hyperparams_proto, - is_training=True) - scope = scope_fn() - conv_scope_arguments = scope[_get_scope_key(slim.conv2d)] - initializer = conv_scope_arguments['weights_initializer'] + keras_config = hyperparams_builder.KerasLayerHyperparams( + conv_hyperparams_proto) + initializer = keras_config.params()['kernel_initializer'] self._assert_variance_in_range(initializer, shape=[100, 40], variance=2. / 100.) - def test_variance_in_range_with_variance_scaling_initializer_uniform_keras( + def test_variance_in_range_with_variance_scaling_initializer_fan_out_keras( self): conv_hyperparams_text_proto = """ regularizer { @@ -801,8 +811,8 @@ class HyperparamsBuilderTest(tf.test.TestCase): initializer { variance_scaling_initializer { factor: 2.0 - mode: FAN_IN - uniform: true + mode: FAN_OUT + uniform: false } } """ @@ -812,41 +822,43 @@ class HyperparamsBuilderTest(tf.test.TestCase): conv_hyperparams_proto) initializer = keras_config.params()['kernel_initializer'] self._assert_variance_in_range(initializer, shape=[100, 40], - variance=2. / 100.) + variance=2. / 40.) - def test_variance_in_range_with_truncated_normal_initializer(self): + def test_variance_in_range_with_variance_scaling_initializer_fan_avg_keras( + self): conv_hyperparams_text_proto = """ regularizer { l2_regularizer { } } initializer { - truncated_normal_initializer { - mean: 0.0 - stddev: 0.8 + variance_scaling_initializer { + factor: 2.0 + mode: FAN_AVG + uniform: false } } """ conv_hyperparams_proto = hyperparams_pb2.Hyperparams() text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) - scope_fn = hyperparams_builder.build(conv_hyperparams_proto, - is_training=True) - scope = scope_fn() - conv_scope_arguments = scope[_get_scope_key(slim.conv2d)] - initializer = conv_scope_arguments['weights_initializer'] + keras_config = hyperparams_builder.KerasLayerHyperparams( + conv_hyperparams_proto) + initializer = keras_config.params()['kernel_initializer'] self._assert_variance_in_range(initializer, shape=[100, 40], - variance=0.49, tol=1e-1) + variance=4. / (100. + 40.)) - def test_variance_in_range_with_truncated_normal_initializer_keras(self): + def test_variance_in_range_with_variance_scaling_initializer_uniform_keras( + self): conv_hyperparams_text_proto = """ regularizer { l2_regularizer { } } initializer { - truncated_normal_initializer { - mean: 0.0 - stddev: 0.8 + variance_scaling_initializer { + factor: 2.0 + mode: FAN_IN + uniform: true } } """ @@ -856,16 +868,16 @@ class HyperparamsBuilderTest(tf.test.TestCase): conv_hyperparams_proto) initializer = keras_config.params()['kernel_initializer'] self._assert_variance_in_range(initializer, shape=[100, 40], - variance=0.49, tol=1e-1) + variance=2. / 100.) - def test_variance_in_range_with_random_normal_initializer(self): + def test_variance_in_range_with_truncated_normal_initializer_keras(self): conv_hyperparams_text_proto = """ regularizer { l2_regularizer { } } initializer { - random_normal_initializer { + truncated_normal_initializer { mean: 0.0 stddev: 0.8 } @@ -873,13 +885,11 @@ class HyperparamsBuilderTest(tf.test.TestCase): """ conv_hyperparams_proto = hyperparams_pb2.Hyperparams() text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) - scope_fn = hyperparams_builder.build(conv_hyperparams_proto, - is_training=True) - scope = scope_fn() - conv_scope_arguments = scope[_get_scope_key(slim.conv2d)] - initializer = conv_scope_arguments['weights_initializer'] + keras_config = hyperparams_builder.KerasLayerHyperparams( + conv_hyperparams_proto) + initializer = keras_config.params()['kernel_initializer'] self._assert_variance_in_range(initializer, shape=[100, 40], - variance=0.64, tol=1e-1) + variance=0.49, tol=1e-1) def test_variance_in_range_with_random_normal_initializer_keras(self): conv_hyperparams_text_proto = """ @@ -902,6 +912,5 @@ class HyperparamsBuilderTest(tf.test.TestCase): self._assert_variance_in_range(initializer, shape=[100, 40], variance=0.64, tol=1e-1) - if __name__ == '__main__': tf.test.main() diff --git a/research/object_detection/builders/image_resizer_builder_test.py b/research/object_detection/builders/image_resizer_builder_test.py index 62ea5dc9b..dfc456eab 100644 --- a/research/object_detection/builders/image_resizer_builder_test.py +++ b/research/object_detection/builders/image_resizer_builder_test.py @@ -18,21 +18,23 @@ import tensorflow.compat.v1 as tf from google.protobuf import text_format from object_detection.builders import image_resizer_builder from object_detection.protos import image_resizer_pb2 +from object_detection.utils import test_case -class ImageResizerBuilderTest(tf.test.TestCase): +class ImageResizerBuilderTest(test_case.TestCase): def _shape_of_resized_random_image_given_text_proto(self, input_shape, text_proto): image_resizer_config = image_resizer_pb2.ImageResizer() text_format.Merge(text_proto, image_resizer_config) image_resizer_fn = image_resizer_builder.build(image_resizer_config) - images = tf.cast( - tf.random_uniform(input_shape, minval=0, maxval=255, dtype=tf.int32), - dtype=tf.float32) - resized_images, _ = image_resizer_fn(images) - with self.test_session() as sess: - return sess.run(resized_images).shape + def graph_fn(): + images = tf.cast( + tf.random_uniform(input_shape, minval=0, maxval=255, dtype=tf.int32), + dtype=tf.float32) + resized_images, _ = image_resizer_fn(images) + return resized_images + return self.execute_cpu(graph_fn, []).shape def test_build_keep_aspect_ratio_resizer_returns_expected_shape(self): image_resizer_text_proto = """ @@ -125,10 +127,10 @@ class ImageResizerBuilderTest(tf.test.TestCase): image_resizer_config = image_resizer_pb2.ImageResizer() text_format.Merge(text_proto, image_resizer_config) image_resizer_fn = image_resizer_builder.build(image_resizer_config) - image_placeholder = tf.placeholder(tf.uint8, [1, None, None, 3]) - resized_image, _ = image_resizer_fn(image_placeholder) - with self.test_session() as sess: - return sess.run(resized_image, feed_dict={image_placeholder: image}) + def graph_fn(image): + resized_image, _ = image_resizer_fn(image) + return resized_image + return self.execute_cpu(graph_fn, [image]) def test_fixed_shape_resizer_nearest_neighbor_method(self): image_resizer_text_proto = """ diff --git a/research/object_detection/builders/input_reader_builder.py b/research/object_detection/builders/input_reader_builder.py index 0ab9c05b7..c7755177e 100644 --- a/research/object_detection/builders/input_reader_builder.py +++ b/research/object_detection/builders/input_reader_builder.py @@ -29,19 +29,12 @@ from __future__ import division from __future__ import print_function import tensorflow.compat.v1 as tf +import tf_slim as slim from object_detection.data_decoders import tf_example_decoder from object_detection.data_decoders import tf_sequence_example_decoder from object_detection.protos import input_reader_pb2 -# pylint: disable=g-import-not-at-top -try: - import tf_slim as slim -except ImportError: - # TF 2.0 doesn't ship with contrib. - pass -# pylint: enable=g-import-not-at-top - parallel_reader = slim.parallel_reader @@ -82,14 +75,14 @@ def build(input_reader_config): if input_reader_config.HasField('label_map_path'): label_map_proto_file = input_reader_config.label_map_path input_type = input_reader_config.input_type - if input_type == input_reader_pb2.InputType.TF_EXAMPLE: + if input_type == input_reader_pb2.InputType.Value('TF_EXAMPLE'): decoder = tf_example_decoder.TfExampleDecoder( load_instance_masks=input_reader_config.load_instance_masks, instance_mask_type=input_reader_config.mask_type, label_map_proto_file=label_map_proto_file, load_context_features=input_reader_config.load_context_features) return decoder.decode(string_tensor) - elif input_type == input_reader_pb2.InputType.TF_SEQUENCE_EXAMPLE: + elif input_type == input_reader_pb2.InputType.Value('TF_SEQUENCE_EXAMPLE'): decoder = tf_sequence_example_decoder.TfSequenceExampleDecoder( label_map_proto_file=label_map_proto_file, load_context_features=input_reader_config.load_context_features) diff --git a/research/object_detection/builders/input_reader_builder_test.py b/research/object_detection/builders/input_reader_builder_tf1_test.py similarity index 98% rename from research/object_detection/builders/input_reader_builder_test.py rename to research/object_detection/builders/input_reader_builder_tf1_test.py index 14a8eb819..6049128b0 100644 --- a/research/object_detection/builders/input_reader_builder_test.py +++ b/research/object_detection/builders/input_reader_builder_tf1_test.py @@ -16,6 +16,7 @@ """Tests for input_reader_builder.""" import os +import unittest import numpy as np import tensorflow.compat.v1 as tf @@ -26,6 +27,7 @@ from object_detection.core import standard_fields as fields from object_detection.dataset_tools import seq_example_util from object_detection.protos import input_reader_pb2 from object_detection.utils import dataset_util +from object_detection.utils import tf_version def _get_labelmap_path(): @@ -35,6 +37,7 @@ def _get_labelmap_path(): 'pet_label_map.pbtxt') +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class InputReaderBuilderTest(tf.test.TestCase): def create_tf_record(self): diff --git a/research/object_detection/builders/matcher_builder.py b/research/object_detection/builders/matcher_builder.py index d334f4353..086f74b5c 100644 --- a/research/object_detection/builders/matcher_builder.py +++ b/research/object_detection/builders/matcher_builder.py @@ -16,8 +16,11 @@ """A function to build an object detection matcher from configuration.""" from object_detection.matchers import argmax_matcher -from object_detection.matchers import bipartite_matcher from object_detection.protos import matcher_pb2 +from object_detection.utils import tf_version + +if tf_version.is_tf1(): + from object_detection.matchers import bipartite_matcher # pylint: disable=g-import-not-at-top def build(matcher_config): @@ -48,6 +51,8 @@ def build(matcher_config): force_match_for_each_row=matcher.force_match_for_each_row, use_matmul_gather=matcher.use_matmul_gather) if matcher_config.WhichOneof('matcher_oneof') == 'bipartite_matcher': + if tf_version.is_tf2(): + raise ValueError('bipartite_matcher is not supported in TF 2.X') matcher = matcher_config.bipartite_matcher return bipartite_matcher.GreedyBipartiteMatcher(matcher.use_matmul_gather) raise ValueError('Empty matcher.') diff --git a/research/object_detection/builders/matcher_builder_test.py b/research/object_detection/builders/matcher_builder_test.py index 451e1f9cc..cfa55ff94 100644 --- a/research/object_detection/builders/matcher_builder_test.py +++ b/research/object_detection/builders/matcher_builder_test.py @@ -20,11 +20,15 @@ import tensorflow.compat.v1 as tf from google.protobuf import text_format from object_detection.builders import matcher_builder from object_detection.matchers import argmax_matcher -from object_detection.matchers import bipartite_matcher from object_detection.protos import matcher_pb2 +from object_detection.utils import test_case +from object_detection.utils import tf_version +if tf_version.is_tf1(): + from object_detection.matchers import bipartite_matcher # pylint: disable=g-import-not-at-top -class MatcherBuilderTest(tf.test.TestCase): + +class MatcherBuilderTest(test_case.TestCase): def test_build_arg_max_matcher_with_defaults(self): matcher_text_proto = """ @@ -34,7 +38,7 @@ class MatcherBuilderTest(tf.test.TestCase): matcher_proto = matcher_pb2.Matcher() text_format.Merge(matcher_text_proto, matcher_proto) matcher_object = matcher_builder.build(matcher_proto) - self.assertTrue(isinstance(matcher_object, argmax_matcher.ArgMaxMatcher)) + self.assertIsInstance(matcher_object, argmax_matcher.ArgMaxMatcher) self.assertAlmostEqual(matcher_object._matched_threshold, 0.5) self.assertAlmostEqual(matcher_object._unmatched_threshold, 0.5) self.assertTrue(matcher_object._negatives_lower_than_unmatched) @@ -49,7 +53,7 @@ class MatcherBuilderTest(tf.test.TestCase): matcher_proto = matcher_pb2.Matcher() text_format.Merge(matcher_text_proto, matcher_proto) matcher_object = matcher_builder.build(matcher_proto) - self.assertTrue(isinstance(matcher_object, argmax_matcher.ArgMaxMatcher)) + self.assertIsInstance(matcher_object, argmax_matcher.ArgMaxMatcher) self.assertEqual(matcher_object._matched_threshold, None) self.assertEqual(matcher_object._unmatched_threshold, None) self.assertTrue(matcher_object._negatives_lower_than_unmatched) @@ -68,7 +72,7 @@ class MatcherBuilderTest(tf.test.TestCase): matcher_proto = matcher_pb2.Matcher() text_format.Merge(matcher_text_proto, matcher_proto) matcher_object = matcher_builder.build(matcher_proto) - self.assertTrue(isinstance(matcher_object, argmax_matcher.ArgMaxMatcher)) + self.assertIsInstance(matcher_object, argmax_matcher.ArgMaxMatcher) self.assertAlmostEqual(matcher_object._matched_threshold, 0.7) self.assertAlmostEqual(matcher_object._unmatched_threshold, 0.3) self.assertFalse(matcher_object._negatives_lower_than_unmatched) @@ -76,6 +80,8 @@ class MatcherBuilderTest(tf.test.TestCase): self.assertTrue(matcher_object._use_matmul_gather) def test_build_bipartite_matcher(self): + if tf_version.is_tf2(): + self.skipTest('BipartiteMatcher unsupported in TF 2.X. Skipping.') matcher_text_proto = """ bipartite_matcher { } @@ -83,8 +89,8 @@ class MatcherBuilderTest(tf.test.TestCase): matcher_proto = matcher_pb2.Matcher() text_format.Merge(matcher_text_proto, matcher_proto) matcher_object = matcher_builder.build(matcher_proto) - self.assertTrue( - isinstance(matcher_object, bipartite_matcher.GreedyBipartiteMatcher)) + self.assertIsInstance(matcher_object, + bipartite_matcher.GreedyBipartiteMatcher) def test_raise_error_on_empty_matcher(self): matcher_text_proto = """ diff --git a/research/object_detection/builders/model_builder.py b/research/object_detection/builders/model_builder.py index d5afb825f..cdb17e88b 100644 --- a/research/object_detection/builders/model_builder.py +++ b/research/object_detection/builders/model_builder.py @@ -28,6 +28,8 @@ from object_detection.builders import region_similarity_calculator_builder as si from object_detection.core import balanced_positive_negative_sampler as sampler from object_detection.core import post_processing from object_detection.core import target_assigner +from object_detection.meta_architectures import center_net_meta_arch +from object_detection.meta_architectures import context_rcnn_meta_arch from object_detection.meta_architectures import faster_rcnn_meta_arch from object_detection.meta_architectures import rfcn_meta_arch from object_detection.meta_architectures import ssd_meta_arch @@ -46,6 +48,7 @@ from object_detection.utils import tf_version if tf_version.is_tf2(): from object_detection.models import center_net_hourglass_feature_extractor from object_detection.models import center_net_resnet_feature_extractor + from object_detection.models import center_net_resnet_v1_fpn_feature_extractor from object_detection.models import faster_rcnn_inception_resnet_v2_keras_feature_extractor as frcnn_inc_res_keras from object_detection.models import faster_rcnn_resnet_keras_feature_extractor as frcnn_resnet_keras from object_detection.models import ssd_resnet_v1_fpn_keras_feature_extractor as ssd_resnet_v1_fpn_keras @@ -78,6 +81,7 @@ if tf_version.is_tf1(): from object_detection.models.ssd_mobiledet_feature_extractor import SSDMobileDetCPUFeatureExtractor from object_detection.models.ssd_mobiledet_feature_extractor import SSDMobileDetDSPFeatureExtractor from object_detection.models.ssd_mobiledet_feature_extractor import SSDMobileDetEdgeTPUFeatureExtractor + from object_detection.models.ssd_mobiledet_feature_extractor import SSDMobileDetGPUFeatureExtractor from object_detection.models.ssd_pnasnet_feature_extractor import SSDPNASNetFeatureExtractor from object_detection.predictors import rfcn_box_predictor # pylint: enable=g-import-not-at-top @@ -108,8 +112,12 @@ if tf_version.is_tf2(): } CENTER_NET_EXTRACTOR_FUNCTION_MAP = { - 'resnet_v2_101': center_net_resnet_feature_extractor.resnet_v2_101, 'resnet_v2_50': center_net_resnet_feature_extractor.resnet_v2_50, + 'resnet_v2_101': center_net_resnet_feature_extractor.resnet_v2_101, + 'resnet_v1_50_fpn': + center_net_resnet_v1_fpn_feature_extractor.resnet_v1_50_fpn, + 'resnet_v1_101_fpn': + center_net_resnet_v1_fpn_feature_extractor.resnet_v1_101_fpn, 'hourglass_104': center_net_hourglass_feature_extractor.hourglass_104, } @@ -159,9 +167,14 @@ if tf_version.is_tf1(): EmbeddedSSDMobileNetV1FeatureExtractor, 'ssd_pnasnet': SSDPNASNetFeatureExtractor, - 'ssd_mobiledet_cpu': SSDMobileDetCPUFeatureExtractor, - 'ssd_mobiledet_dsp': SSDMobileDetDSPFeatureExtractor, - 'ssd_mobiledet_edgetpu': SSDMobileDetEdgeTPUFeatureExtractor, + 'ssd_mobiledet_cpu': + SSDMobileDetCPUFeatureExtractor, + 'ssd_mobiledet_dsp': + SSDMobileDetDSPFeatureExtractor, + 'ssd_mobiledet_edgetpu': + SSDMobileDetEdgeTPUFeatureExtractor, + 'ssd_mobiledet_gpu': + SSDMobileDetGPUFeatureExtractor, } FASTER_RCNN_FEATURE_EXTRACTOR_CLASS_MAP = { @@ -765,7 +778,9 @@ def keypoint_proto_to_params(kp_config, keypoint_map_dict): unmatched_keypoint_score=kp_config.unmatched_keypoint_score, box_scale=kp_config.box_scale, candidate_search_scale=kp_config.candidate_search_scale, - candidate_ranking_mode=kp_config.candidate_ranking_mode) + candidate_ranking_mode=kp_config.candidate_ranking_mode, + offset_peak_radius=kp_config.offset_peak_radius, + per_keypoint_offset=kp_config.per_keypoint_offset) def object_detection_proto_to_params(od_config): diff --git a/research/object_detection/builders/model_builder_tf1_test.py b/research/object_detection/builders/model_builder_tf1_test.py index a4d2913f5..083275ac4 100644 --- a/research/object_detection/builders/model_builder_tf1_test.py +++ b/research/object_detection/builders/model_builder_tf1_test.py @@ -14,16 +14,19 @@ # limitations under the License. # ============================================================================== """Tests for model_builder under TensorFlow 1.X.""" - +import unittest from absl.testing import parameterized import tensorflow.compat.v1 as tf from object_detection.builders import model_builder from object_detection.builders import model_builder_test +from object_detection.meta_architectures import context_rcnn_meta_arch from object_detection.meta_architectures import ssd_meta_arch from object_detection.protos import losses_pb2 +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class ModelBuilderTF1Test(model_builder_test.ModelBuilderTest): def default_ssd_feature_extractor(self): @@ -39,6 +42,14 @@ class ModelBuilderTF1Test(model_builder_test.ModelBuilderTest): return model_builder.FASTER_RCNN_FEATURE_EXTRACTOR_CLASS_MAP + @parameterized.parameters(True, False) + def test_create_context_rcnn_from_config_with_params(self, is_training): + model_proto = self.create_default_faster_rcnn_model_proto() + model_proto.faster_rcnn.context_config.attention_bottleneck_dimension = 10 + model_proto.faster_rcnn.context_config.attention_temperature = 0.5 + model = model_builder.build(model_proto, is_training=is_training) + self.assertIsInstance(model, context_rcnn_meta_arch.ContextRCNNMetaArch) + if __name__ == '__main__': tf.test.main() diff --git a/research/object_detection/builders/model_builder_tf2_test.py b/research/object_detection/builders/model_builder_tf2_test.py new file mode 100644 index 000000000..c2cd23729 --- /dev/null +++ b/research/object_detection/builders/model_builder_tf2_test.py @@ -0,0 +1,261 @@ +# Lint as: python2, python3 +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for model_builder under TensorFlow 2.X.""" + +import os +import unittest + +import tensorflow.compat.v1 as tf + +from google.protobuf import text_format +from object_detection.builders import model_builder +from object_detection.builders import model_builder_test +from object_detection.core import losses +from object_detection.models import center_net_resnet_feature_extractor +from object_detection.protos import center_net_pb2 +from object_detection.protos import model_pb2 +from object_detection.utils import tf_version + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class ModelBuilderTF2Test(model_builder_test.ModelBuilderTest): + + def default_ssd_feature_extractor(self): + return 'ssd_resnet50_v1_fpn_keras' + + def default_faster_rcnn_feature_extractor(self): + return 'faster_rcnn_resnet101_keras' + + def ssd_feature_extractors(self): + return model_builder.SSD_KERAS_FEATURE_EXTRACTOR_CLASS_MAP + + def faster_rcnn_feature_extractors(self): + return model_builder.FASTER_RCNN_KERAS_FEATURE_EXTRACTOR_CLASS_MAP + + def get_fake_label_map_file_path(self): + keypoint_spec_text = """ + item { + name: "/m/01g317" + id: 1 + display_name: "person" + keypoints { + id: 0 + label: 'nose' + } + keypoints { + id: 1 + label: 'left_shoulder' + } + keypoints { + id: 2 + label: 'right_shoulder' + } + keypoints { + id: 3 + label: 'hip' + } + } + """ + keypoint_label_map_path = os.path.join( + self.get_temp_dir(), 'keypoint_label_map') + with tf.gfile.Open(keypoint_label_map_path, 'wb') as f: + f.write(keypoint_spec_text) + return keypoint_label_map_path + + def get_fake_keypoint_proto(self): + task_proto_txt = """ + task_name: "human_pose" + task_loss_weight: 0.9 + keypoint_regression_loss_weight: 1.0 + keypoint_heatmap_loss_weight: 0.1 + keypoint_offset_loss_weight: 0.5 + heatmap_bias_init: 2.14 + keypoint_class_name: "/m/01g317" + loss { + classification_loss { + penalty_reduced_logistic_focal_loss { + alpha: 3.0 + beta: 4.0 + } + } + localization_loss { + l1_localization_loss { + } + } + } + keypoint_label_to_std { + key: "nose" + value: 0.3 + } + keypoint_label_to_std { + key: "hip" + value: 0.0 + } + keypoint_candidate_score_threshold: 0.3 + num_candidates_per_keypoint: 12 + peak_max_pool_kernel_size: 5 + unmatched_keypoint_score: 0.05 + box_scale: 1.7 + candidate_search_scale: 0.2 + candidate_ranking_mode: "score_distance_ratio" + offset_peak_radius: 3 + per_keypoint_offset: true + """ + config = text_format.Merge(task_proto_txt, + center_net_pb2.CenterNet.KeypointEstimation()) + return config + + def get_fake_object_center_proto(self): + proto_txt = """ + object_center_loss_weight: 0.5 + heatmap_bias_init: 3.14 + min_box_overlap_iou: 0.2 + max_box_predictions: 15 + classification_loss { + penalty_reduced_logistic_focal_loss { + alpha: 3.0 + beta: 4.0 + } + } + """ + return text_format.Merge(proto_txt, + center_net_pb2.CenterNet.ObjectCenterParams()) + + def get_fake_object_detection_proto(self): + proto_txt = """ + task_loss_weight: 0.5 + offset_loss_weight: 0.1 + scale_loss_weight: 0.2 + localization_loss { + l1_localization_loss { + } + } + """ + return text_format.Merge(proto_txt, + center_net_pb2.CenterNet.ObjectDetection()) + + def get_fake_mask_proto(self): + proto_txt = """ + task_loss_weight: 0.7 + classification_loss { + weighted_softmax {} + } + mask_height: 8 + mask_width: 8 + score_threshold: 0.7 + heatmap_bias_init: -2.0 + """ + return text_format.Merge(proto_txt, + center_net_pb2.CenterNet.MaskEstimation()) + + def test_create_center_net_model(self): + """Test building a CenterNet model from proto txt.""" + proto_txt = """ + center_net { + num_classes: 10 + feature_extractor { + type: "resnet_v2_101" + channel_stds: [4, 5, 6] + bgr_ordering: true + } + image_resizer { + keep_aspect_ratio_resizer { + min_dimension: 512 + max_dimension: 512 + pad_to_max_dimension: true + } + } + } + """ + # Set up the configuration proto. + config = text_format.Merge(proto_txt, model_pb2.DetectionModel()) + config.center_net.object_center_params.CopyFrom( + self.get_fake_object_center_proto()) + config.center_net.object_detection_task.CopyFrom( + self.get_fake_object_detection_proto()) + config.center_net.keypoint_estimation_task.append( + self.get_fake_keypoint_proto()) + config.center_net.keypoint_label_map_path = ( + self.get_fake_label_map_file_path()) + config.center_net.mask_estimation_task.CopyFrom( + self.get_fake_mask_proto()) + + # Build the model from the configuration. + model = model_builder.build(config, is_training=True) + + # Check object center related parameters. + self.assertEqual(model._num_classes, 10) + self.assertIsInstance(model._center_params.classification_loss, + losses.PenaltyReducedLogisticFocalLoss) + self.assertEqual(model._center_params.classification_loss._alpha, 3.0) + self.assertEqual(model._center_params.classification_loss._beta, 4.0) + self.assertAlmostEqual(model._center_params.min_box_overlap_iou, 0.2) + self.assertAlmostEqual( + model._center_params.heatmap_bias_init, 3.14, places=4) + self.assertEqual(model._center_params.max_box_predictions, 15) + + # Check object detection related parameters. + self.assertAlmostEqual(model._od_params.offset_loss_weight, 0.1) + self.assertAlmostEqual(model._od_params.scale_loss_weight, 0.2) + self.assertAlmostEqual(model._od_params.task_loss_weight, 0.5) + self.assertIsInstance(model._od_params.localization_loss, + losses.L1LocalizationLoss) + + # Check keypoint estimation related parameters. + kp_params = model._kp_params_dict['human_pose'] + self.assertAlmostEqual(kp_params.task_loss_weight, 0.9) + self.assertAlmostEqual(kp_params.keypoint_regression_loss_weight, 1.0) + self.assertAlmostEqual(kp_params.keypoint_offset_loss_weight, 0.5) + self.assertAlmostEqual(kp_params.heatmap_bias_init, 2.14, places=4) + self.assertEqual(kp_params.classification_loss._alpha, 3.0) + self.assertEqual(kp_params.keypoint_indices, [0, 1, 2, 3]) + self.assertEqual(kp_params.keypoint_labels, + ['nose', 'left_shoulder', 'right_shoulder', 'hip']) + self.assertAllClose(kp_params.keypoint_std_dev, [0.3, 1.0, 1.0, 0.0]) + self.assertEqual(kp_params.classification_loss._beta, 4.0) + self.assertIsInstance(kp_params.localization_loss, + losses.L1LocalizationLoss) + self.assertAlmostEqual(kp_params.keypoint_candidate_score_threshold, 0.3) + self.assertEqual(kp_params.num_candidates_per_keypoint, 12) + self.assertEqual(kp_params.peak_max_pool_kernel_size, 5) + self.assertAlmostEqual(kp_params.unmatched_keypoint_score, 0.05) + self.assertAlmostEqual(kp_params.box_scale, 1.7) + self.assertAlmostEqual(kp_params.candidate_search_scale, 0.2) + self.assertEqual(kp_params.candidate_ranking_mode, 'score_distance_ratio') + self.assertEqual(kp_params.offset_peak_radius, 3) + self.assertEqual(kp_params.per_keypoint_offset, True) + + # Check mask related parameters. + self.assertAlmostEqual(model._mask_params.task_loss_weight, 0.7) + self.assertIsInstance(model._mask_params.classification_loss, + losses.WeightedSoftmaxClassificationLoss) + self.assertEqual(model._mask_params.mask_height, 8) + self.assertEqual(model._mask_params.mask_width, 8) + self.assertAlmostEqual(model._mask_params.score_threshold, 0.7) + self.assertAlmostEqual( + model._mask_params.heatmap_bias_init, -2.0, places=4) + + # Check feature extractor parameters. + self.assertIsInstance( + model._feature_extractor, + center_net_resnet_feature_extractor.CenterNetResnetFeatureExtractor) + self.assertAllClose(model._feature_extractor._channel_means, [0, 0, 0]) + self.assertAllClose(model._feature_extractor._channel_stds, [4, 5, 6]) + self.assertTrue(model._feature_extractor._bgr_ordering) + + +if __name__ == '__main__': + tf.test.main() diff --git a/research/object_detection/builders/optimizer_builder.py b/research/object_detection/builders/optimizer_builder.py index 548b5cdcf..d602bad12 100644 --- a/research/object_detection/builders/optimizer_builder.py +++ b/research/object_detection/builders/optimizer_builder.py @@ -17,10 +17,13 @@ import tensorflow.compat.v1 as tf - -from tensorflow.contrib import opt as tf_opt from object_detection.utils import learning_schedules +try: + from tensorflow.contrib import opt as tf_opt # pylint: disable=g-import-not-at-top +except: # pylint: disable=bare-except + pass + def build_optimizers_tf_v1(optimizer_config, global_step=None): """Create a TF v1 compatible optimizer based on config. diff --git a/research/object_detection/builders/optimizer_builder_tf1_test.py b/research/object_detection/builders/optimizer_builder_tf1_test.py index 9a6d1e404..350ecb84b 100644 --- a/research/object_detection/builders/optimizer_builder_tf1_test.py +++ b/research/object_detection/builders/optimizer_builder_tf1_test.py @@ -20,6 +20,7 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function +import unittest import six import tensorflow.compat.v1 as tf @@ -27,16 +28,15 @@ from google.protobuf import text_format from object_detection.builders import optimizer_builder from object_detection.protos import optimizer_pb2 +from object_detection.utils import tf_version # pylint: disable=g-import-not-at-top -try: +if tf_version.is_tf1(): from tensorflow.contrib import opt as contrib_opt -except ImportError: - # TF 2.0 doesn't ship with contrib. - pass # pylint: enable=g-import-not-at-top +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class LearningRateBuilderTest(tf.test.TestCase): def testBuildConstantLearningRate(self): @@ -118,6 +118,7 @@ class LearningRateBuilderTest(tf.test.TestCase): optimizer_builder._create_learning_rate(learning_rate_proto) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class OptimizerBuilderTest(tf.test.TestCase): def testBuildRMSPropOptimizer(self): diff --git a/research/object_detection/builders/optimizer_builder_tf2_test.py b/research/object_detection/builders/optimizer_builder_tf2_test.py new file mode 100644 index 000000000..2c555f9a0 --- /dev/null +++ b/research/object_detection/builders/optimizer_builder_tf2_test.py @@ -0,0 +1,104 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""Tests for optimizer_builder.""" +import unittest +import tensorflow.compat.v1 as tf + +from google.protobuf import text_format + +from object_detection.builders import optimizer_builder +from object_detection.protos import optimizer_pb2 +from object_detection.utils import tf_version + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class OptimizerBuilderV2Test(tf.test.TestCase): + """Test building optimizers in V2 mode.""" + + def testBuildRMSPropOptimizer(self): + optimizer_text_proto = """ + rms_prop_optimizer: { + learning_rate: { + exponential_decay_learning_rate { + initial_learning_rate: 0.004 + decay_steps: 800720 + decay_factor: 0.95 + } + } + momentum_optimizer_value: 0.9 + decay: 0.9 + epsilon: 1.0 + } + use_moving_average: false + """ + optimizer_proto = optimizer_pb2.Optimizer() + text_format.Merge(optimizer_text_proto, optimizer_proto) + optimizer, _ = optimizer_builder.build(optimizer_proto) + self.assertIsInstance(optimizer, tf.keras.optimizers.RMSprop) + + def testBuildMomentumOptimizer(self): + optimizer_text_proto = """ + momentum_optimizer: { + learning_rate: { + constant_learning_rate { + learning_rate: 0.001 + } + } + momentum_optimizer_value: 0.99 + } + use_moving_average: false + """ + optimizer_proto = optimizer_pb2.Optimizer() + text_format.Merge(optimizer_text_proto, optimizer_proto) + optimizer, _ = optimizer_builder.build(optimizer_proto) + self.assertIsInstance(optimizer, tf.keras.optimizers.SGD) + + def testBuildAdamOptimizer(self): + optimizer_text_proto = """ + adam_optimizer: { + learning_rate: { + constant_learning_rate { + learning_rate: 0.002 + } + } + } + use_moving_average: false + """ + optimizer_proto = optimizer_pb2.Optimizer() + text_format.Merge(optimizer_text_proto, optimizer_proto) + optimizer, _ = optimizer_builder.build(optimizer_proto) + self.assertIsInstance(optimizer, tf.keras.optimizers.Adam) + + def testMovingAverageOptimizerUnsupported(self): + optimizer_text_proto = """ + adam_optimizer: { + learning_rate: { + constant_learning_rate { + learning_rate: 0.002 + } + } + } + use_moving_average: True + """ + optimizer_proto = optimizer_pb2.Optimizer() + text_format.Merge(optimizer_text_proto, optimizer_proto) + with self.assertRaises(ValueError): + optimizer_builder.build(optimizer_proto) + + +if __name__ == '__main__': + tf.enable_v2_behavior() + tf.test.main() diff --git a/research/object_detection/builders/post_processing_builder_test.py b/research/object_detection/builders/post_processing_builder_test.py index d163aa8f2..b7383c92f 100644 --- a/research/object_detection/builders/post_processing_builder_test.py +++ b/research/object_detection/builders/post_processing_builder_test.py @@ -19,9 +19,10 @@ import tensorflow.compat.v1 as tf from google.protobuf import text_format from object_detection.builders import post_processing_builder from object_detection.protos import post_processing_pb2 +from object_detection.utils import test_case -class PostProcessingBuilderTest(tf.test.TestCase): +class PostProcessingBuilderTest(test_case.TestCase): def test_build_non_max_suppressor_with_correct_parameters(self): post_processing_text_proto = """ @@ -77,13 +78,12 @@ class PostProcessingBuilderTest(tf.test.TestCase): _, score_converter = post_processing_builder.build( post_processing_config) self.assertEqual(score_converter.__name__, 'identity_with_logit_scale') - - inputs = tf.constant([1, 1], tf.float32) - outputs = score_converter(inputs) - with self.test_session() as sess: - converted_scores = sess.run(outputs) - expected_converted_scores = sess.run(inputs) - self.assertAllClose(converted_scores, expected_converted_scores) + def graph_fn(): + inputs = tf.constant([1, 1], tf.float32) + outputs = score_converter(inputs) + return outputs + converted_scores = self.execute_cpu(graph_fn, []) + self.assertAllClose(converted_scores, [1, 1]) def test_build_identity_score_converter_with_logit_scale(self): post_processing_text_proto = """ @@ -95,12 +95,12 @@ class PostProcessingBuilderTest(tf.test.TestCase): _, score_converter = post_processing_builder.build(post_processing_config) self.assertEqual(score_converter.__name__, 'identity_with_logit_scale') - inputs = tf.constant([1, 1], tf.float32) - outputs = score_converter(inputs) - with self.test_session() as sess: - converted_scores = sess.run(outputs) - expected_converted_scores = sess.run(tf.constant([.5, .5], tf.float32)) - self.assertAllClose(converted_scores, expected_converted_scores) + def graph_fn(): + inputs = tf.constant([1, 1], tf.float32) + outputs = score_converter(inputs) + return outputs + converted_scores = self.execute_cpu(graph_fn, []) + self.assertAllClose(converted_scores, [.5, .5]) def test_build_sigmoid_score_converter(self): post_processing_text_proto = """ @@ -153,12 +153,12 @@ class PostProcessingBuilderTest(tf.test.TestCase): self.assertEqual(calibrated_score_conversion_fn.__name__, 'calibrate_with_function_approximation') - input_scores = tf.constant([1, 1], tf.float32) - outputs = calibrated_score_conversion_fn(input_scores) - with self.test_session() as sess: - calibrated_scores = sess.run(outputs) - expected_calibrated_scores = sess.run(tf.constant([0.5, 0.5], tf.float32)) - self.assertAllClose(calibrated_scores, expected_calibrated_scores) + def graph_fn(): + input_scores = tf.constant([1, 1], tf.float32) + outputs = calibrated_score_conversion_fn(input_scores) + return outputs + calibrated_scores = self.execute_cpu(graph_fn, []) + self.assertAllClose(calibrated_scores, [0.5, 0.5]) def test_build_temperature_scaling_calibrator(self): post_processing_text_proto = """ @@ -174,12 +174,12 @@ class PostProcessingBuilderTest(tf.test.TestCase): self.assertEqual(calibrated_score_conversion_fn.__name__, 'calibrate_with_temperature_scaling_calibration') - input_scores = tf.constant([1, 1], tf.float32) - outputs = calibrated_score_conversion_fn(input_scores) - with self.test_session() as sess: - calibrated_scores = sess.run(outputs) - expected_calibrated_scores = sess.run(tf.constant([0.5, 0.5], tf.float32)) - self.assertAllClose(calibrated_scores, expected_calibrated_scores) + def graph_fn(): + input_scores = tf.constant([1, 1], tf.float32) + outputs = calibrated_score_conversion_fn(input_scores) + return outputs + calibrated_scores = self.execute_cpu(graph_fn, []) + self.assertAllClose(calibrated_scores, [0.5, 0.5]) if __name__ == '__main__': tf.test.main() diff --git a/research/object_detection/colab_tutorials/context_rcnn_tutorial.ipynb b/research/object_detection/colab_tutorials/context_rcnn_tutorial.ipynb new file mode 100644 index 000000000..b735cfbce --- /dev/null +++ b/research/object_detection/colab_tutorials/context_rcnn_tutorial.ipynb @@ -0,0 +1,1500 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "accelerator": "GPU", + "colab": { + "name": "context_rcnn_tutorial.ipynb", + "provenance": [], + "collapsed_sections": [] + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + } + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "jZc1kMel3sZP", + "colab_type": "text" + }, + "source": [ + "# Context R-CNN Demo\n", + "\n", + "
\n", + " \n", + " Run in Google Colab\n", + " \n", + "\n", + " \n", + " View source on GitHub\n", + "
\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "XuHWvdag3_b9", + "colab_type": "text" + }, + "source": [ + " This notebook will walk you step by step through the process of using a pre-trained model to build up a contextual memory bank for a set of images, and then detect objects in those images+context using [Context R-CNN](https://arxiv.org/abs/1912.03538)." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "u0e-OOtn4hQ8", + "colab_type": "text" + }, + "source": [ + "# Setup" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "w-UrhxBw4iLA", + "colab_type": "text" + }, + "source": [ + "Important: If you're running on a local machine, be sure to follow the [installation instructions](https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/installation.md). This notebook includes only what's necessary to run in Colab." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "SAqMxS4V4lqS", + "colab_type": "text" + }, + "source": [ + "### Install" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "BPkovrxF4o8n", + "colab_type": "code", + "outputId": "e1b8debc-ab73-4b3e-9e44-c86446c7cda1", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 785 + } + }, + "source": [ + "!pip install -U --pre tensorflow==\"2.*\"\n", + "!pip install tf_slim" + ], + "execution_count": 1, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Requirement already up-to-date: tensorflow==2.* in /usr/local/lib/python3.6/dist-packages (2.2.0)\n", + "Requirement already satisfied, skipping upgrade: scipy==1.4.1; python_version >= \"3\" in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (1.4.1)\n", + "Requirement already satisfied, skipping upgrade: protobuf>=3.8.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (3.10.0)\n", + "Requirement already satisfied, skipping upgrade: h5py<2.11.0,>=2.10.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (2.10.0)\n", + "Requirement already satisfied, skipping upgrade: opt-einsum>=2.3.2 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (3.2.1)\n", + "Requirement already satisfied, skipping upgrade: numpy<2.0,>=1.16.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (1.18.5)\n", + "Requirement already satisfied, skipping upgrade: wheel>=0.26; python_version >= \"3\" in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (0.34.2)\n", + "Requirement already satisfied, skipping upgrade: absl-py>=0.7.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (0.9.0)\n", + "Requirement already satisfied, skipping upgrade: tensorflow-estimator<2.3.0,>=2.2.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (2.2.0)\n", + "Requirement already satisfied, skipping upgrade: google-pasta>=0.1.8 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (0.2.0)\n", + "Requirement already satisfied, skipping upgrade: grpcio>=1.8.6 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (1.29.0)\n", + "Requirement already satisfied, skipping upgrade: tensorboard<2.3.0,>=2.2.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (2.2.2)\n", + "Requirement already satisfied, skipping upgrade: gast==0.3.3 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (0.3.3)\n", + "Requirement already satisfied, skipping upgrade: astunparse==1.6.3 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (1.6.3)\n", + "Requirement already satisfied, skipping upgrade: keras-preprocessing>=1.1.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (1.1.2)\n", + "Requirement already satisfied, skipping upgrade: termcolor>=1.1.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (1.1.0)\n", + "Requirement already satisfied, skipping upgrade: six>=1.12.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (1.12.0)\n", + "Requirement already satisfied, skipping upgrade: wrapt>=1.11.1 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (1.12.1)\n", + "Requirement already satisfied, skipping upgrade: setuptools in /usr/local/lib/python3.6/dist-packages (from protobuf>=3.8.0->tensorflow==2.*) (47.1.1)\n", + "Requirement already satisfied, skipping upgrade: google-auth<2,>=1.6.3 in /usr/local/lib/python3.6/dist-packages (from tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (1.7.2)\n", + "Requirement already satisfied, skipping upgrade: werkzeug>=0.11.15 in /usr/local/lib/python3.6/dist-packages (from tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (1.0.1)\n", + "Requirement already satisfied, skipping upgrade: requests<3,>=2.21.0 in /usr/local/lib/python3.6/dist-packages (from tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (2.23.0)\n", + "Requirement already satisfied, skipping upgrade: google-auth-oauthlib<0.5,>=0.4.1 in /usr/local/lib/python3.6/dist-packages (from tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (0.4.1)\n", + "Requirement already satisfied, skipping upgrade: markdown>=2.6.8 in /usr/local/lib/python3.6/dist-packages (from tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (3.2.2)\n", + "Requirement already satisfied, skipping upgrade: tensorboard-plugin-wit>=1.6.0 in /usr/local/lib/python3.6/dist-packages (from tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (1.6.0.post3)\n", + "Requirement already satisfied, skipping upgrade: cachetools<3.2,>=2.0.0 in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (3.1.1)\n", + "Requirement already satisfied, skipping upgrade: rsa<4.1,>=3.1.4 in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (4.0)\n", + "Requirement already satisfied, skipping upgrade: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (0.2.8)\n", + "Requirement already satisfied, skipping upgrade: idna<3,>=2.5 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (2.9)\n", + "Requirement already satisfied, skipping upgrade: chardet<4,>=3.0.2 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (3.0.4)\n", + "Requirement already satisfied, skipping upgrade: certifi>=2017.4.17 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (2020.4.5.1)\n", + "Requirement already satisfied, skipping upgrade: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (1.24.3)\n", + "Requirement already satisfied, skipping upgrade: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.6/dist-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (1.3.0)\n", + "Requirement already satisfied, skipping upgrade: importlib-metadata; python_version < \"3.8\" in /usr/local/lib/python3.6/dist-packages (from markdown>=2.6.8->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (1.6.0)\n", + "Requirement already satisfied, skipping upgrade: pyasn1>=0.1.3 in /usr/local/lib/python3.6/dist-packages (from rsa<4.1,>=3.1.4->google-auth<2,>=1.6.3->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (0.4.8)\n", + "Requirement already satisfied, skipping upgrade: oauthlib>=3.0.0 in /usr/local/lib/python3.6/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (3.1.0)\n", + "Requirement already satisfied, skipping upgrade: zipp>=0.5 in /usr/local/lib/python3.6/dist-packages (from importlib-metadata; python_version < \"3.8\"->markdown>=2.6.8->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (3.1.0)\n", + "Collecting tf_slim\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/02/97/b0f4a64df018ca018cc035d44f2ef08f91e2e8aa67271f6f19633a015ff7/tf_slim-1.1.0-py2.py3-none-any.whl (352kB)\n", + "\u001b[K |████████████████████████████████| 358kB 2.8MB/s \n", + "\u001b[?25hRequirement already satisfied: absl-py>=0.2.2 in /usr/local/lib/python3.6/dist-packages (from tf_slim) (0.9.0)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from absl-py>=0.2.2->tf_slim) (1.12.0)\n", + "Installing collected packages: tf-slim\n", + "Successfully installed tf-slim-1.1.0\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "zpKF8a2x4tec", + "colab_type": "text" + }, + "source": [ + "Make sure you have `pycocotools` installed" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "owcrp0AW4uCg", + "colab_type": "code", + "outputId": "001148a8-b0a8-43a1-f6df-225d86d90b8f", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + } + }, + "source": [ + "!pip install pycocotools" + ], + "execution_count": 2, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Requirement already satisfied: pycocotools in /usr/local/lib/python3.6/dist-packages (2.0.0)\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "wHFSRVaO4wuq", + "colab_type": "text" + }, + "source": [ + "Get `tensorflow/models` or `cd` to parent directory of the repository." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "E0ZuGKoi4wTn", + "colab_type": "code", + "outputId": "2b5d93cb-3548-4347-9b76-ce12bea44a56", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 136 + } + }, + "source": [ + "import os\n", + "import pathlib\n", + "\n", + "\n", + "if \"models\" in pathlib.Path.cwd().parts:\n", + " while \"models\" in pathlib.Path.cwd().parts:\n", + " os.chdir('..')\n", + "elif not pathlib.Path('models').exists():\n", + " !git clone --depth 1 https://github.com/tensorflow/models" + ], + "execution_count": 3, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Cloning into 'models'...\n", + "remote: Enumerating objects: 2694, done.\u001b[K\n", + "remote: Counting objects: 100% (2694/2694), done.\u001b[K\n", + "remote: Compressing objects: 100% (2370/2370), done.\u001b[K\n", + "remote: Total 2694 (delta 520), reused 1332 (delta 290), pack-reused 0\u001b[K\n", + "Receiving objects: 100% (2694/2694), 34.10 MiB | 29.32 MiB/s, done.\n", + "Resolving deltas: 100% (520/520), done.\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "GkqRm-WY47MR", + "colab_type": "text" + }, + "source": [ + "Compile protobufs and install the object_detection package" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "62Dn1_YU45O2", + "colab_type": "code", + "outputId": "439166dd-6202-4ff9-897d-100a35ae5af5", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 54 + } + }, + "source": [ + "%%bash\n", + "cd models/research/\n", + "protoc object_detection/protos/*.proto --python_out=." + ], + "execution_count": 4, + "outputs": [ + { + "output_type": "stream", + "text": [ + "object_detection/protos/input_reader.proto: warning: Import object_detection/protos/image_resizer.proto but not used.\n" + ], + "name": "stderr" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "83kNiD-24-ZB", + "colab_type": "code", + "outputId": "aa148939-7dcc-4fbd-ea48-41236523712c", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 343 + } + }, + "source": [ + "%%bash \n", + "cd models/research\n", + "pip install ." + ], + "execution_count": 5, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Processing /content/models/research\n", + "Requirement already satisfied: Pillow>=1.0 in /usr/local/lib/python3.6/dist-packages (from object-detection==0.1) (7.0.0)\n", + "Requirement already satisfied: Matplotlib>=2.1 in /usr/local/lib/python3.6/dist-packages (from object-detection==0.1) (3.2.1)\n", + "Requirement already satisfied: Cython>=0.28.1 in /usr/local/lib/python3.6/dist-packages (from object-detection==0.1) (0.29.19)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.6/dist-packages (from Matplotlib>=2.1->object-detection==0.1) (0.10.0)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.6/dist-packages (from Matplotlib>=2.1->object-detection==0.1) (2.4.7)\n", + "Requirement already satisfied: numpy>=1.11 in /usr/local/lib/python3.6/dist-packages (from Matplotlib>=2.1->object-detection==0.1) (1.18.5)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.6/dist-packages (from Matplotlib>=2.1->object-detection==0.1) (2.8.1)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.6/dist-packages (from Matplotlib>=2.1->object-detection==0.1) (1.2.0)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from cycler>=0.10->Matplotlib>=2.1->object-detection==0.1) (1.12.0)\n", + "Building wheels for collected packages: object-detection\n", + " Building wheel for object-detection (setup.py): started\n", + " Building wheel for object-detection (setup.py): finished with status 'done'\n", + " Created wheel for object-detection: filename=object_detection-0.1-cp36-none-any.whl size=1141324 sha256=1dff68de415a4ccc3af0e20b8f409a73d147d79720a713dcdc30f9bc8d4ab3a2\n", + " Stored in directory: /tmp/pip-ephem-wheel-cache-rlyj8yrw/wheels/94/49/4b/39b051683087a22ef7e80ec52152a27249d1a644ccf4e442ea\n", + "Successfully built object-detection\n", + "Installing collected packages: object-detection\n", + "Successfully installed object-detection-0.1\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "LBdjK2G5ywuc" + }, + "source": [ + "### Imports" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab_type": "code", + "id": "hV4P5gyTWKMI", + "colab": {} + }, + "source": [ + "import numpy as np\n", + "import os\n", + "import six\n", + "import six.moves.urllib as urllib\n", + "import sys\n", + "import tarfile\n", + "import tensorflow as tf\n", + "import zipfile\n", + "import pathlib\n", + "import json\n", + "import datetime\n", + "import matplotlib.pyplot as plt\n", + "\n", + "from collections import defaultdict\n", + "from io import StringIO\n", + "from matplotlib import pyplot as plt\n", + "from PIL import Image\n", + "from IPython.display import display" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "r5FNuiRPWKMN" + }, + "source": [ + "Import the object detection module." + ] + }, + { + "cell_type": "code", + "metadata": { + "colab_type": "code", + "id": "4-IMl4b6BdGO", + "colab": {} + }, + "source": [ + "from object_detection.utils import ops as utils_ops\n", + "from object_detection.utils import label_map_util\n", + "from object_detection.utils import visualization_utils as vis_utils" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "RYPCiag2iz_q" + }, + "source": [ + "Patches:" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab_type": "code", + "id": "mF-YlMl8c_bM", + "colab": {} + }, + "source": [ + "# patch tf1 into `utils.ops`\n", + "utils_ops.tf = tf.compat.v1\n", + "\n", + "# Patch the location of gfile\n", + "tf.gfile = tf.io.gfile" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "cfn_tRFOWKMO" + }, + "source": [ + "# Model preparation " + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "7ai8pLZZWKMS" + }, + "source": [ + "## Loader" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab_type": "code", + "id": "zm8xp-0eoItE", + "colab": {} + }, + "source": [ + "def load_model(model_name):\n", + " base_url = 'http://download.tensorflow.org/models/object_detection/'\n", + " model_file = model_name + '.tar.gz'\n", + " model_dir = tf.keras.utils.get_file(\n", + " fname=model_name,\n", + " origin=base_url + model_file,\n", + " untar=True)\n", + "\n", + " model_dir = pathlib.Path(model_dir)/\"saved_model\"\n", + " model = tf.saved_model.load(str(model_dir))\n", + " model = model.signatures['serving_default']\n", + "\n", + " return model" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "_1MVVTcLWKMW" + }, + "source": [ + "## Loading label map\n", + "Label maps map indices to category names, so that when our convolution network predicts `5`, we know that this corresponds to `zebra`. Here we use internal utility functions, but anything that returns a dictionary mapping integers to appropriate string labels would be fine" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab_type": "code", + "id": "hDbpHkiWWKMX", + "colab": {} + }, + "source": [ + "# List of the strings that is used to add correct label for each box.\n", + "PATH_TO_LABELS = 'models/research/object_detection/data/snapshot_serengeti_label_map.pbtxt'\n", + "category_index = label_map_util.create_category_index_from_labelmap(PATH_TO_LABELS, use_display_name=False)" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "oVU3U_J6IJVb" + }, + "source": [ + "We will test on a context group of images from one month at one camera from the Snapshot Serengeti val split defined on [LILA.science](http://lila.science/datasets/snapshot-serengeti), which was not seen during model training:\n", + "\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab_type": "code", + "id": "jG-zn5ykWKMd", + "outputId": "c7bbbb2f-0f6e-4380-fd92-c88c088bd766", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 85 + } + }, + "source": [ + "# If you want to test the code with your images, just add path to the images to\n", + "# the TEST_IMAGE_PATHS.\n", + "PATH_TO_TEST_IMAGES_DIR = pathlib.Path('models/research/object_detection/test_images/snapshot_serengeti')\n", + "TEST_IMAGE_PATHS = sorted(list(PATH_TO_TEST_IMAGES_DIR.glob(\"*.jpeg\")))\n", + "TEST_IMAGE_PATHS" + ], + "execution_count": 11, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "[PosixPath('models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0038.jpeg'),\n", + " PosixPath('models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0039.jpeg'),\n", + " PosixPath('models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0040.jpeg'),\n", + " PosixPath('models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0041.jpeg')]" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 11 + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "oBcQzptnQ-x6", + "colab_type": "text" + }, + "source": [ + "Load the metadata for each image" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "ZLLINOHcQ-An", + "colab_type": "code", + "colab": {} + }, + "source": [ + "test_data_json = 'models/research/object_detection/test_images/snapshot_serengeti/context_rcnn_demo_metadata.json'\n", + "with open(test_data_json, 'r') as f:\n", + " test_metadata = json.load(f)" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "BgGTPHhkOAel", + "colab_type": "code", + "outputId": "1421a32a-c208-498f-931f-1bfeb25d6488", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 105 + } + }, + "source": [ + "image_id_to_datetime = {im['id']:im['date_captured'] for im in test_metadata['images']}\n", + "image_path_to_id = {im['file_name']: im['id'] \n", + " for im in test_metadata['images']}\n", + "image_path_to_id" + ], + "execution_count": 13, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "{'models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0038.jpeg': 'S1/E03/E03_R3/S1_E03_R3_PICT0038',\n", + " 'models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0039.jpeg': 'S1/E03/E03_R3/S1_E03_R3_PICT0039',\n", + " 'models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0040.jpeg': 'S1/E03/E03_R3/S1_E03_R3_PICT0040',\n", + " 'models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0041.jpeg': 'S1/E03/E03_R3/S1_E03_R3_PICT0041'}" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 13 + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "H0_1AGhrWKMc" + }, + "source": [ + "# Generate Context Features for each image" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "kt3_pPQOj7ii", + "colab_type": "code", + "outputId": "fc72e978-f576-43f4-bcf1-3eb49fef5726", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 88 + } + }, + "source": [ + "faster_rcnn_model_name = 'faster_rcnn_resnet101_snapshot_serengeti_2020_06_10'\n", + "faster_rcnn_model = load_model(faster_rcnn_model_name)" + ], + "execution_count": 14, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Downloading data from http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_snapshot_serengeti_2020_06_10.tar.gz\n", + "588832768/588829839 [==============================] - 3s 0us/step\n", + "INFO:tensorflow:Saver not created because there are no variables in the graph to restore\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "k6Clkv_mBo_U", + "colab_type": "text" + }, + "source": [ + "Check the model's input signature, it expects a batch of 3-color images of type uint8." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "H1qNlFESBsTR", + "colab_type": "code", + "outputId": "9b8b84e0-d7a8-4ec9-d6e0-22d574cb6209", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + } + }, + "source": [ + "faster_rcnn_model.inputs" + ], + "execution_count": 15, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "[]" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 15 + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "eYS8KpRCBtBH", + "colab_type": "text" + }, + "source": [ + "And it returns several outputs. Note this model has been exported with additional output 'detection_features' which will be used to build the contextual memory bank." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "5M-1yxgfkmQl", + "colab_type": "code", + "outputId": "1da98c3b-79c5-4d19-d64c-3e9dbadc97c0", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 153 + } + }, + "source": [ + "faster_rcnn_model.output_dtypes" + ], + "execution_count": 16, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "{'detection_boxes': tf.float32,\n", + " 'detection_classes': tf.float32,\n", + " 'detection_features': tf.float32,\n", + " 'detection_multiclass_scores': tf.float32,\n", + " 'detection_scores': tf.float32,\n", + " 'num_detections': tf.float32,\n", + " 'raw_detection_boxes': tf.float32,\n", + " 'raw_detection_scores': tf.float32}" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 16 + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "zVjNFFNIDCst", + "colab_type": "code", + "outputId": "edb46db0-05fb-4952-bc88-db09d7811b01", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 153 + } + }, + "source": [ + "faster_rcnn_model.output_shapes" + ], + "execution_count": 17, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "{'detection_boxes': TensorShape([None, 300, 4]),\n", + " 'detection_classes': TensorShape([None, 300]),\n", + " 'detection_features': TensorShape([None, None, None, None, None]),\n", + " 'detection_multiclass_scores': TensorShape([None, 300, 49]),\n", + " 'detection_scores': TensorShape([None, 300]),\n", + " 'num_detections': TensorShape([None]),\n", + " 'raw_detection_boxes': TensorShape([None, 300, 4]),\n", + " 'raw_detection_scores': TensorShape([None, 300, 49])}" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 17 + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "JP5qZ7sXJpwG" + }, + "source": [ + "Add a wrapper function to call the model, and cleanup the outputs:" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab_type": "code", + "id": "ajmR_exWyN76", + "colab": {} + }, + "source": [ + "def run_inference_for_single_image(model, image):\n", + " '''Run single image through tensorflow object detection saved_model.\n", + "\n", + " This function runs a saved_model on a (single) provided image and returns\n", + " inference results in numpy arrays.\n", + "\n", + " Args:\n", + " model: tensorflow saved_model. This model can be obtained using \n", + " export_inference_graph.py.\n", + " image: uint8 numpy array with shape (img_height, img_width, 3)\n", + "\n", + " Returns:\n", + " output_dict: a dictionary holding the following entries:\n", + " `num_detections`: an integer\n", + " `detection_boxes`: a numpy (float32) array of shape [N, 4]\n", + " `detection_classes`: a numpy (uint8) array of shape [N]\n", + " `detection_scores`: a numpy (float32) array of shape [N]\n", + " `detection_features`: a numpy (float32) array of shape [N, 7, 7, 2048]\n", + " '''\n", + " image = np.asarray(image)\n", + " # The input needs to be a tensor, convert it using `tf.convert_to_tensor`.\n", + " input_tensor = tf.convert_to_tensor(image)\n", + " # The model expects a batch of images, so add an axis with `tf.newaxis`.\n", + " input_tensor = input_tensor[tf.newaxis,...]\n", + "\n", + " # Run inference\n", + " output_dict = model(input_tensor)\n", + " # All outputs are batches tensors.\n", + " # Convert to numpy arrays, and take index [0] to remove the batch dimension.\n", + " # We're only interested in the first num_detections.\n", + " num_dets = output_dict.pop('num_detections')\n", + " num_detections = int(num_dets)\n", + " for key,value in output_dict.items():\n", + " output_dict[key] = value[0, :num_detections].numpy() \n", + " output_dict['num_detections'] = num_detections\n", + "\n", + " # detection_classes should be ints.\n", + " output_dict['detection_classes'] = output_dict['detection_classes'].astype(\n", + " np.int64)\n", + " return output_dict" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "un5SXxIxMaaV", + "colab_type": "text" + }, + "source": [ + "Functions for embedding context features" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "qvtvAZFDMoTM", + "colab_type": "code", + "colab": {} + }, + "source": [ + "def embed_date_captured(date_captured):\n", + " \"\"\"Encodes the datetime of the image.\n", + "\n", + " Takes a datetime object and encodes it into a normalized embedding of shape \n", + " [5], using hard-coded normalization factors for year, month, day, hour,\n", + " minute.\n", + "\n", + " Args:\n", + " date_captured: A datetime object.\n", + "\n", + " Returns:\n", + " A numpy float32 embedding of shape [5].\n", + " \"\"\"\n", + " embedded_date_captured = []\n", + " month_max = 12.0\n", + " day_max = 31.0\n", + " hour_max = 24.0\n", + " minute_max = 60.0\n", + " min_year = 1990.0\n", + " max_year = 2030.0\n", + "\n", + " year = (date_captured.year-min_year)/float(max_year-min_year)\n", + " embedded_date_captured.append(year)\n", + "\n", + " month = (date_captured.month-1)/month_max\n", + " embedded_date_captured.append(month)\n", + "\n", + " day = (date_captured.day-1)/day_max\n", + " embedded_date_captured.append(day)\n", + "\n", + " hour = date_captured.hour/hour_max\n", + " embedded_date_captured.append(hour)\n", + "\n", + " minute = date_captured.minute/minute_max\n", + " embedded_date_captured.append(minute)\n", + "\n", + " return np.asarray(embedded_date_captured)" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "xN8k5daOOA7b", + "colab_type": "code", + "colab": {} + }, + "source": [ + "def embed_position_and_size(box):\n", + " \"\"\"Encodes the bounding box of the object of interest.\n", + "\n", + " Takes a bounding box and encodes it into a normalized embedding of shape \n", + " [4] - the center point (x,y) and width and height of the box.\n", + "\n", + " Args:\n", + " box: A bounding box, formatted as [ymin, xmin, ymax, xmax].\n", + "\n", + " Returns:\n", + " A numpy float32 embedding of shape [4].\n", + " \"\"\"\n", + " ymin = box[0]\n", + " xmin = box[1]\n", + " ymax = box[2]\n", + " xmax = box[3]\n", + " w = xmax - xmin\n", + " h = ymax - ymin\n", + " x = xmin + w / 2.0\n", + " y = ymin + h / 2.0\n", + " return np.asarray([x, y, w, h])" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "lJe2qy8HPc6Z", + "colab_type": "code", + "colab": {} + }, + "source": [ + "def get_context_feature_embedding(date_captured, detection_boxes,\n", + " detection_features, detection_scores):\n", + " \"\"\"Extracts representative feature embedding for a given input image.\n", + "\n", + " Takes outputs of a detection model and focuses on the highest-confidence\n", + " detected object. Starts with detection_features and uses average pooling to\n", + " remove the spatial dimensions, then appends an embedding of the box position\n", + " and size, and an embedding of the date and time the image was captured,\n", + " returning a one-dimensional representation of the object.\n", + "\n", + " Args:\n", + " date_captured: A datetime string of format '%Y-%m-%d %H:%M:%S'.\n", + " detection_features: A numpy (float32) array of shape [N, 7, 7, 2048].\n", + " detection_boxes: A numpy (float32) array of shape [N, 4].\n", + " detection_scores: A numpy (float32) array of shape [N].\n", + "\n", + " Returns:\n", + " A numpy float32 embedding of shape [2057].\n", + " \"\"\"\n", + " date_captured = datetime.datetime.strptime(date_captured,'%Y-%m-%d %H:%M:%S')\n", + " temporal_embedding = embed_date_captured(date_captured)\n", + " embedding = detection_features[0]\n", + " pooled_embedding = np.mean(np.mean(embedding, axis=1), axis=0)\n", + " box = detection_boxes[0]\n", + " position_embedding = embed_position_and_size(box)\n", + " bb_embedding = np.concatenate((pooled_embedding, position_embedding))\n", + " embedding = np.expand_dims(np.concatenate((bb_embedding,temporal_embedding)),\n", + " axis=0)\n", + " score = detection_scores[0]\n", + " return embedding, score" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "z1wq0LVyMRR_" + }, + "source": [ + "Run it on each test image and use the output detection features and metadata to build up a context feature bank:" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab_type": "code", + "id": "DWh_1zz6aqxs", + "colab": {} + }, + "source": [ + "def run_inference(model, image_path, date_captured, resize_image=True):\n", + " \"\"\"Runs inference over a single input image and extracts contextual features.\n", + "\n", + " Args:\n", + " model: A tensorflow saved_model object.\n", + " image_path: Absolute path to the input image.\n", + " date_captured: A datetime string of format '%Y-%m-%d %H:%M:%S'.\n", + " resize_image: Whether to resize the input image before running inference.\n", + "\n", + " Returns:\n", + " context_feature: A numpy float32 array of shape [2057].\n", + " score: A numpy float32 object score for the embedded object.\n", + " output_dict: The saved_model output dictionary for the image.\n", + " \"\"\"\n", + " with open(image_path,'rb') as f:\n", + " image = Image.open(f)\n", + " if resize_image:\n", + " image.thumbnail((640,640),Image.ANTIALIAS)\n", + " image_np = np.array(image)\n", + "\n", + " # Actual detection.\n", + " output_dict = run_inference_for_single_image(model, image_np)\n", + "\n", + " context_feature, score = get_context_feature_embedding(\n", + " date_captured, output_dict['detection_boxes'],\n", + " output_dict['detection_features'], output_dict['detection_scores'])\n", + " return context_feature, score, output_dict" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "colab_type": "code", + "id": "3a5wMHN8WKMh", + "colab": {} + }, + "source": [ + "context_features = []\n", + "scores = []\n", + "faster_rcnn_results = {}\n", + "for image_path in TEST_IMAGE_PATHS:\n", + " image_id = image_path_to_id[str(image_path)]\n", + " date_captured = image_id_to_datetime[image_id]\n", + " context_feature, score, results = run_inference(\n", + " faster_rcnn_model, image_path, date_captured)\n", + " faster_rcnn_results[image_id] = results\n", + " context_features.append(context_feature)\n", + " scores.append(score)\n", + "\n", + "# Concatenate all extracted context embeddings into a contextual memory bank.\n", + "context_features_matrix = np.concatenate(context_features, axis=0)\n" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "DsspMPX3Cssg" + }, + "source": [ + "## Run Detection With Context" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "f7aOtOlebK7h" + }, + "source": [ + "Load a context r-cnn object detection model:" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab_type": "code", + "id": "1XNT0wxybKR6", + "outputId": "cc5b0677-cf16-46c2-9ae5-32681725f856", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 88 + } + }, + "source": [ + "context_rcnn_model_name = 'context_rcnn_resnet101_snapshot_serengeti_2020_06_10'\n", + "context_rcnn_model = load_model(context_rcnn_model_name)\n" + ], + "execution_count": 24, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Downloading data from http://download.tensorflow.org/models/object_detection/context_rcnn_resnet101_snapshot_serengeti_2020_06_10.tar.gz\n", + "724664320/724658931 [==============================] - 3s 0us/step\n", + "INFO:tensorflow:Saver not created because there are no variables in the graph to restore\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "G6IGGtGqBH6y", + "colab_type": "text" + }, + "source": [ + "We need to define the expected context padding size for the\n", + "model, this must match the definition in the model config (max_num_context_features)." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "4oh9XNLBjkTL", + "colab_type": "code", + "colab": {} + }, + "source": [ + "context_padding_size = 2000" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "yN1AYfAEJIGp" + }, + "source": [ + "Check the model's input signature, it expects a batch of 3-color images of type uint8, plus context_features padded to the maximum context feature size for this model (2000) and valid_context_size to represent the non-padded context features: " + ] + }, + { + "cell_type": "code", + "metadata": { + "colab_type": "code", + "id": "CK4cnry6wsHY", + "outputId": "d77af014-769f-4e20-b4ac-bfdd40502128", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 68 + } + }, + "source": [ + "context_rcnn_model.inputs" + ], + "execution_count": 26, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "[,\n", + " ,\n", + " ]" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 26 + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "Q8u3BjpMJXZF" + }, + "source": [ + "And returns several outputs:" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab_type": "code", + "id": "oLSZpfaYwuSk", + "outputId": "63a3903f-529b-41f9-b742-9b81c4c5e096", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 136 + } + }, + "source": [ + "context_rcnn_model.output_dtypes" + ], + "execution_count": 27, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "{'detection_boxes': tf.float32,\n", + " 'detection_classes': tf.float32,\n", + " 'detection_multiclass_scores': tf.float32,\n", + " 'detection_scores': tf.float32,\n", + " 'num_detections': tf.float32,\n", + " 'raw_detection_boxes': tf.float32,\n", + " 'raw_detection_scores': tf.float32}" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 27 + } + ] + }, + { + "cell_type": "code", + "metadata": { + "colab_type": "code", + "id": "FZyKUJeuxvpT", + "outputId": "d2feeaba-2bb2-4779-a96a-94a8a0aff362", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 136 + } + }, + "source": [ + "context_rcnn_model.output_shapes" + ], + "execution_count": 28, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "{'detection_boxes': TensorShape([1, 300, 4]),\n", + " 'detection_classes': TensorShape([1, 300]),\n", + " 'detection_multiclass_scores': TensorShape([1, 300, 49]),\n", + " 'detection_scores': TensorShape([1, 300]),\n", + " 'num_detections': TensorShape([1]),\n", + " 'raw_detection_boxes': TensorShape([1, 300, 4]),\n", + " 'raw_detection_scores': TensorShape([1, 300, 49])}" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 28 + } + ] + }, + { + "cell_type": "code", + "metadata": { + "colab_type": "code", + "id": "CzkVv_n2MxKC", + "colab": {} + }, + "source": [ + "def run_context_rcnn_inference_for_single_image(\n", + " model, image, context_features, context_padding_size):\n", + " '''Run single image through a Context R-CNN saved_model.\n", + "\n", + " This function runs a saved_model on a (single) provided image and provided \n", + " contextual features and returns inference results in numpy arrays.\n", + "\n", + " Args:\n", + " model: tensorflow Context R-CNN saved_model. This model can be obtained\n", + " using export_inference_graph.py and setting side_input fields. \n", + " Example export call - \n", + " python export_inference_graph.py \\\n", + " --input_type image_tensor \\\n", + " --pipeline_config_path /path/to/context_rcnn_model.config \\\n", + " --trained_checkpoint_prefix /path/to/context_rcnn_model.ckpt \\\n", + " --output_directory /path/to/output_dir \\\n", + " --use_side_inputs True \\\n", + " --side_input_shapes 1,2000,2057/1 \\\n", + " --side_input_names context_features,valid_context_size \\\n", + " --side_input_types float,int \\\n", + " --input_shape 1,-1,-1,3\n", + "\n", + " image: uint8 numpy array with shape (img_height, img_width, 3)\n", + " context_features: A numpy float32 contextual memory bank of shape \n", + " [num_context_examples, 2057]\n", + " context_padding_size: The amount of expected padding in the contextual\n", + " memory bank, defined in the Context R-CNN config as \n", + " max_num_context_features.\n", + "\n", + " Returns:\n", + " output_dict: a dictionary holding the following entries:\n", + " `num_detections`: an integer\n", + " `detection_boxes`: a numpy (float32) array of shape [N, 4]\n", + " `detection_classes`: a numpy (uint8) array of shape [N]\n", + " `detection_scores`: a numpy (float32) array of shape [N]\n", + " '''\n", + " image = np.asarray(image)\n", + " # The input image needs to be a tensor, convert it using \n", + " # `tf.convert_to_tensor`.\n", + " image_tensor = tf.convert_to_tensor(\n", + " image, name='image_tensor')[tf.newaxis,...]\n", + "\n", + " context_features = np.asarray(context_features)\n", + " valid_context_size = context_features.shape[0]\n", + " valid_context_size_tensor = tf.convert_to_tensor(\n", + " valid_context_size, name='valid_context_size')[tf.newaxis,...]\n", + " padded_context_features = np.pad(\n", + " context_features,\n", + " ((0,context_padding_size-valid_context_size),(0,0)), mode='constant')\n", + " padded_context_features_tensor = tf.convert_to_tensor(\n", + " padded_context_features,\n", + " name='context_features',\n", + " dtype=tf.float32)[tf.newaxis,...]\n", + "\n", + " # Run inference\n", + " output_dict = model(\n", + " inputs=image_tensor,\n", + " context_features=padded_context_features_tensor,\n", + " valid_context_size=valid_context_size_tensor)\n", + " # All outputs are batches tensors.\n", + " # Convert to numpy arrays, and take index [0] to remove the batch dimension.\n", + " # We're only interested in the first num_detections.\n", + " num_dets = output_dict.pop('num_detections')\n", + " num_detections = int(num_dets)\n", + " for key,value in output_dict.items():\n", + " output_dict[key] = value[0, :num_detections].numpy() \n", + " output_dict['num_detections'] = num_detections\n", + "\n", + " # detection_classes should be ints.\n", + " output_dict['detection_classes'] = output_dict['detection_classes'].astype(np.int64)\n", + " return output_dict" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "0FqVkR3Agc6U", + "colab_type": "code", + "colab": {} + }, + "source": [ + "def show_context_rcnn_inference(\n", + " model, image_path, context_features, faster_rcnn_output_dict,\n", + " context_padding_size, resize_image=True):\n", + " \"\"\"Runs inference over a single input image and visualizes Faster R-CNN vs. \n", + " Context R-CNN results.\n", + "\n", + " Args:\n", + " model: A tensorflow saved_model object.\n", + " image_path: Absolute path to the input image.\n", + " context_features: A numpy float32 contextual memory bank of shape \n", + " [num_context_examples, 2057]\n", + " faster_rcnn_output_dict: The output_dict corresponding to this input image\n", + " from the single-frame Faster R-CNN model, which was previously used to\n", + " build the memory bank.\n", + " context_padding_size: The amount of expected padding in the contextual\n", + " memory bank, defined in the Context R-CNN config as \n", + " max_num_context_features.\n", + " resize_image: Whether to resize the input image before running inference.\n", + "\n", + " Returns:\n", + " context_rcnn_image_np: Numpy image array showing Context R-CNN Results.\n", + " faster_rcnn_image_np: Numpy image array showing Faster R-CNN Results.\n", + " \"\"\"\n", + "\n", + " # the array based representation of the image will be used later in order to prepare the\n", + " # result image with boxes and labels on it.\n", + " with open(image_path,'rb') as f:\n", + " image = Image.open(f)\n", + " if resize_image:\n", + " image.thumbnail((640,640),Image.ANTIALIAS)\n", + " image_np = np.array(image)\n", + " image.thumbnail((400,400),Image.ANTIALIAS)\n", + " context_rcnn_image_np = np.array(image)\n", + " \n", + " faster_rcnn_image_np = np.copy(context_rcnn_image_np)\n", + "\n", + " # Actual detection.\n", + " output_dict = run_context_rcnn_inference_for_single_image(\n", + " model, image_np, context_features, context_padding_size)\n", + "\n", + " # Visualization of the results of a context_rcnn detection.\n", + " vis_utils.visualize_boxes_and_labels_on_image_array(\n", + " context_rcnn_image_np,\n", + " output_dict['detection_boxes'],\n", + " output_dict['detection_classes'],\n", + " output_dict['detection_scores'],\n", + " category_index,\n", + " use_normalized_coordinates=True,\n", + " line_thickness=2)\n", + " \n", + " # Visualization of the results of a faster_rcnn detection.\n", + " vis_utils.visualize_boxes_and_labels_on_image_array(\n", + " faster_rcnn_image_np,\n", + " faster_rcnn_output_dict['detection_boxes'],\n", + " faster_rcnn_output_dict['detection_classes'],\n", + " faster_rcnn_output_dict['detection_scores'],\n", + " category_index,\n", + " use_normalized_coordinates=True,\n", + " line_thickness=2)\n", + " return context_rcnn_image_np, faster_rcnn_image_np" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "3cYa2B8uAYx0", + "colab_type": "text" + }, + "source": [ + "Define Matplotlib parameters for pretty visualizations" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "9F8okR1uAQ0T", + "colab_type": "code", + "colab": {} + }, + "source": [ + "%matplotlib inline\n", + "plt.rcParams['axes.grid'] = False\n", + "plt.rcParams['xtick.labelsize'] = False\n", + "plt.rcParams['ytick.labelsize'] = False\n", + "plt.rcParams['xtick.top'] = False\n", + "plt.rcParams['xtick.bottom'] = False\n", + "plt.rcParams['ytick.left'] = False\n", + "plt.rcParams['ytick.right'] = False\n", + "plt.rcParams['figure.figsize'] = [15,10]" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "YGj7nXXQAaQ7", + "colab_type": "text" + }, + "source": [ + "Run Context R-CNN inference and compare results to Faster R-CNN" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab_type": "code", + "id": "vQ2Sj2VIOZLA", + "outputId": "1c043894-09e5-4c9f-a99d-ae21d6e72d0c", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + } + }, + "source": [ + "for image_path in TEST_IMAGE_PATHS:\n", + " image_id = image_path_to_id[str(image_path)]\n", + " faster_rcnn_output_dict = faster_rcnn_results[image_id]\n", + " context_rcnn_image, faster_rcnn_image = show_context_rcnn_inference(\n", + " context_rcnn_model, image_path, context_features_matrix,\n", + " faster_rcnn_output_dict, context_padding_size)\n", + " plt.subplot(1,2,1)\n", + " plt.imshow(faster_rcnn_image)\n", + " plt.title('Faster R-CNN')\n", + " plt.subplot(1,2,2)\n", + " plt.imshow(context_rcnn_image)\n", + " plt.title('Context R-CNN')\n", + " plt.show()" + ], + "execution_count": 32, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA1wAAAFDCAYAAAAu+g+jAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOx9d5xsWVnt2tXVfe8dYJhAGEYFRAxk8JFBER9ZUBAFFRQkCKiAJAcQRMkgUQUlKAxZFCQHA2EMAyoITwUe8BjCEGeGGSbduV3Vtd8fp77qVavWPtV9b0+4t7/v9+tfV52zw5fXPt/Z51SptSIpKSkpKSkpKSkpKSlp52lwaTOQlJSUlJSUlJSUlJR0pFJecCUlJSUlJSUlJSUlJV1MlBdcSUlJSUlJSUlJSUlJFxPlBVdSUlJSUlJSUlJSUtLFRHnBlZSUlJSUlJSUlJSUdDFRXnAlJSUlJSUlJSUlJSVdTJQXXElJSUlJSUlJSUlJSRcT5QVX0mWaSilfLqXsL6WcT38nHuRYDyyl/PMO8/fAUsrGlK9zSymfLqXcfUmfo0spLymlfHXa7/9Nv19pev7LpZTvlFIuR30eUkr5CH2vpZT/KqUM6NgzSymv3Un5kpKSkpIuG1RK+ZVSyn9MceObpZT3l1JuuwPjvraU8swd4nHpWFP8umAqx9dLKS8qpaws6dOUvZTyB9Mx70Pth9Nj1yS+ainl5tTm2qWU/DHapEuE8oIr6XCge9RaL09/37g0mCilDBunTq21Xh7AMQBeDuAtpZRjGmOsAfhHANcDcBcARwO4FYCzANycmq4AePQSlk4E8EtbFiApKSkp6bCkUspjAbwEwLMBXBXA1dHhzc9dmnwdAt1oipu3A3BfAA9qNdyi7N8F8IdLLty+C2BHLiyTkrZLecGVdNhRKeXYUsp7SilnlFLOnn7+fjr/wFLKl0op55VSTiul3K+Uch0Afw7gVtMK2TnTtntKKS+Y3m36dinlz0sp+6bnfqqUcnop5aRSyrcAvKaPr1rrBMDrAVwOwA83mv0aOrC4V631M7XWSa31O7XWZ9Ra30ft/gjA41sXblN6PjqAaV0IJiUlJSUd5lRKuSKApwP4rVrr22utF9RaR7XWd9danzBts2e6U+Ib07+XlFL2TM8Flj1uunvim6WUX5+e+w0A9wPwu1NsfPf0+ImllLdNcfa0UsqjpsePm451j+n3y5dSvlhK+bXWWH1Ua/0igH8BcOODlX1KHwCwDuD+PdOdDOCGpZTbLeMrKWmnKS+4kg5HGqC7+LkGuouX/QD+FACm2/D+GMBda61XAHBrAJ+qtX4WwMMxvRtVa40LmecC+BF0yf7aAL4PwO/TXCcAOG4612/0MTWtrP06gBGArzSa3QHAB2qt5y+R8T8AfATA43vavB3AuQAeuGSspKSkpKTDl24FYC+Av+1p83sAbokOy26EbsfEU+j8CQCuiA7jHgzgZaWUY2utrwTwRgDPn2LjPaZb1d8N4NPT9v8bwO+UUu5ca/0uurtRryqlXAXAi9Fh7OvcWMsEK6X8GICfAPDFQ5AdACqApwJ4WilltdHmQnR3yZ61jK+kpJ2mvOBKOhzoHaWUc6Z/76i1nlVrfVut9cJa63nokidXrCYArl9K2Vdr/Wat9X/coKWUgu4i6jG11u9Ox3o25rfpTQA8rdZ6oNa6v8HfLad3zC4C8AIA96+1fqfR9ngA39yi3L8P4JGllCs3zgfAPHW6VTEpKSkp6cij4wGcWWsd97S5H4CnT3dMnAHgDwH8Kp0fTc+Pprspzgfwo42xbgbgyrXWp9da12utXwLwKkyxsdb6dwD+Gt32+LsBeNhByPTJUsoFAD6Lrrj48ka7rciOKV/vAnAGgIf0NHsFgKuXUu66PXaTkg6N8oIr6XCge9Zaj5n+3bOUclQp5RWllK+UUs4FcAqAY0opK7XWC9DtB384gG+WUt47raA5ujKAowB8Ii7o0G1L4AucM2qtFy3h72PTO2bHAngXumodSilXL/Syj2nbswBcbStC11r/G8B7ADyxp837AJyOgwO8pKSkpKTLPp0F4EpLto+fiPmdFV+ZHpuNIRctFwK4fGOsawA4kQqd5wB4Mrrnp4JeCeD6AF5baz1ri3Iw/fh0/vsCuAW6rfiYvgwjcPN+2JrsTE9Bd7dvrztZaz0A4BnTv6SkS4zygivpcKTHoavM3aLWejSAn5weLwBQa/1grfWO6C5sPoeuMgd0d4SYzkS3HfF6dEF3xemDvGj0adJ0m+AjAPxqKeUmtdav8ss+ps3+AcCdC72BcAk9DcBD0W3raNHvoQPDo7bKa1JSUlLSYUOnAjgA4J49bb6B7kIp6OrTY1shxbmvATiNcPGYWusVaq13A2bb518J4HUAfrOUcu2esdqTdvRWdPL9/vTYXQk334ityc5j/j267Ym/2dPsNehecvXzW+U1KelQKS+4kg5HugK6C6VzSinHobsoAQCUUq5aSvm56QXNAXTbJibT098G8P2x/W76kotXAXjxdC86SinfV0q588EyNt3f/mrMPwfG9Hp0YPa2UsqPlVIGpZTjSylPLqXczYz3RQB/BeBRPXN+BMB/A3jAwfKdlJSUlHTZpFrr99BhystKKbHLY7WUctdSyvOnzd4M4CmllCuX7idGfh/AG7Y4xbcBXIu+/xuA86YvjNpXSlkppVy/lHKz6fkno7uwehC6Fzy9rmy+HVDH2go9F8BDSykn6Iktyq70ewB+tzXZ9E7f0wCctE0+k5IOmvKCK+lwpJcA2IfuDtXH0G0DDBoAeCy6yt530T3b9YjpuQ8B+B8A3yqlnDk9dhK6atjHptsT/wHtfe3b4e9upZQb6onpdoY7oLvz9vfoXnrxbwCuBODjjfGejul2ix56CrqXeyQlJSUlHWFUa30hOmx7CrrnlL4G4LcBvGPa5JnoXrb0fwD8F4BPYuuvQP8LANel56Q3ANwd3Qs4TkOHta8GcMVSyv+a8vFr03bPQ3fx9UQ31hZl+y90jwY8oXF+meza/l/Q4WofvRlbf546KemQqdSav/mWlJSUlJSUlJSUlJR0cVDe4UpKSkpKSkpKSkpKSrqYKC+4kpKSkpKSkpKSkpKSLibKC66kpKSkpKSkpKSkpKSLifKCKykpKSkpKSkpKSkp6WKirf6QXJNKKbWUglorSimz4/oyjjgXx/U7H2sd176tuVrzbuWz9nVjL+PTUczTkmG7tNV5D3bslo63O5cbK+myQZeUbVpxtNO0XXmcT7fyVmuO7eQv7d/67HjT8fpylZ5fli+3koulzZm1Vv5x8KQeSoxMjNzuWEmXDUqMTIzsm6PFc63VKnlLF1yllFvWWj/WOj8cDjGZTDAYDGaTbmxszJhS4QaD+RtrtdZZfwCYTCZz5xms4nO0nQo36xM8xPdSCgaDwYICneNxHz7vjDEYDOb4jGNqUB0rPkd/5wA8jh5zjqDyLgM17ss6VLsMBgNsbGzM8eDax+doz2PH97CLk8fpjL/zXGpLF2Tx54LK2U31GLIAi77IsvF5lodtwHExmUy2nDDi88rKyoJMPHdrodVaIAWvwXeMu7KyMsez0yn7h9OB8zv1Y+4f/DvdqG9xrMTxrSz23Lnop3OyzjSeVlZWZrHQ8sEW8Lg8oDy5/OQ+u3zq8ivHlfKsMab613iM/vF9NBp9pangXUjL8BFIjORjiZGJkYmRm5QYeWRh5Gg0auvWGWM7VEqp4Yg8eQjFjDqBgtEw1HA4nAOICKaNjY05h9fgUgWEA/YpKHgCsJDY4/vKyspCklWKthwAzujqdAp4PGcLRDTgGIhZT8yHI9XfysrKbP445gCPdajE8qvMeo4DOuyufZyeWTdBmqD1e8se4XetBUccH41GC0mHx+TEzOPy4or9noOTZQM27eD8nG3L7XVsBjM+x2OzbVU3LslqXCuIBsUcrh/zE+cjSTOwuHlbCxFuy+O7BWjEiYJf9OHco2NxPmnFo/ZRe7PdOM7dAqQVhzGW5je1dSsf8YJJ24aPqW8zb6UUrK+vf6LWelMkbYkSIxMjmR+Wkz8nRiZGMj9xPjHy8MLI8XiMyWRy8He4lpE6QCjAJXxWACso+sTxcLQYUxOkVn+ibyQHdTQ2Mjsa88PjRnKbTCZzytfxmB9VvDqRS8bMj7bVANIKRhDzosDnAE0raTwG08bGBtbW1jCZTDAej2e8Md8KChoU8T3s4vh01VwAs8XDeDye+YAmFk3oPKcmJ1eV0sUk8zcejxeC3iVrTa7MKwM9+4mzjy6EVCdhC+cfLYB1vs4A5IAn7OHiyOlVq4EKsOoPcZz9phVfSq6yvbq6OhtLie0VOUSrzDEO5w7VmQILJ12Wx/Gvvu4qvWFXznma/N34ChSsF81vzFMsdoIPjk/2Gc4VLR0nLafEyMTIxMjEyBgjMfLIxciWXYAdemmGOlfLGTgRMGP8P4zsmHcK43N6q77FI/MRx7mawkbQZMM8slytoNMr8WjPjh9Jk4OZK3MuiQHovX2rFSUmBXFeAKjt1tfXZ8lckzzLGlVOlll1Gvy4aqhL6qw71Y3Tu55rJRfWQyspq69qXwWp8CHuC3SJDti0VfAZ/VhX2l/nDr9jf2nFB/MW5zR5BS/BR3xX4GzZSIEi/nMMKx9MLimGfOxj2p4TXSR2XWgp/6w/9fcYR/MAt2H52I782fmS2jD8Mtop3yEL68QtONxig8dXXqONxpSTuTVmK3aTllNiZGJkYmRiZPxPjDxyMdK1C9qJl2bMCRlM8DllkJ2SlaKBoUHlgITn5CqJS6LaR8dmQzOg9M2vAKWVF55L+8exSNZMCgZ6nvlpASx/j+B0FcBW1Zp1wsGiulOnd8mOb6Oz7Go3tXvMzVWrGCuSQIzLFYiYQ2VjwNRA1QTLfDMItsBMfYZ9nPXnKp3OflxFU3/gai6DE9sm/FH9nRdDCqhcKYq2ansHErHVoBU/TlYF12i3vr6+UM1zxHPo8b524aOql5BHFzzRXvkO3jU/OHuobdgGMR/7nCZxB+z8X0FNFzhaRVbZuU/LTjxe0tYpMTIxMjFyfr7EyMRItceRgpF9tCPo6RK3Cs/CspJUsCBWhIKKJrAWLxxw0S6qKKpMNjYnAXYyTmh8hdy64m8RVwqcrvgcV3VUt8FH3NZ3c2qicFf93Fb7AlgIJNaLOnGcZz1zP+ZHt7ZsbGzMBRmPyQlNE5wGsepR28W8uujgwNQAU531LSj4M2/10CqRI9UnzxHfp3uE5/q1bt+34oV1wn2ULwUSJn4uBfAPBmtss6108RBjaIJUQGK7OZvzZ47Z0BP3c4u9vuPqM25x5o6r3mMOlqela80VPK4DmvCziFuWlxcXGpPRj3OO85Gk7VNiZGJkYmRiZGIkmsePFIzsox17houFHAwGC7f9HLngU6Wx4Cwgz8vt+bNTBM/HiUsdSfu54HcJXGXSczxfOBwbXfuoflhPrJPoFw9Y9oEsj+2AbFkfl4SZL7Y7g3HLId1igsklmGjPyUITgIIB92VAcgHJ/qeJ3QV0yB1zctDyosb5a3xnPhUsWA7ew+yATYFe9cvf4yHjvoWd6i744X6uLcdrS15XpebPaj/1WSc7n2d7cBtXmVf+9POyHBHHVVat5i+LN23Pfujm1sVPjKvbuhy1wMkBYQs4k5ZTYmRbJj3H8yVGzs+TGJkYmRh52cbIPjrkO1w6uUv0ShzsmvDVEdQJuD/PzQrmylqMyYrg5NACnJibx3SApoblylVfO9WTJrqYTx0rSKuSTrfOVnpe/6tOHcg7p2K52T46FydVtoPzlVayVR24INPEzDxrAKuu1c5qH9azG8+dc9UX53PODgxSGh99ccbVwr5FQh+AuKQZ7d0tf3dLfiu60/gE5p+DaOmupW+dTz/3bQ1woKt6dvmH27Nv69aLOKYx5e4g8HeurOn5Vvy4xYvTUeikBXy6aE3aOiVGJkaq3ImRiZGJkbsPIw/5gkuTbmtCDZL4HMzr7XYWximLg4znZ8M5fhhstK2TwQVCvJ1JgaZPB/HdJVMNvr5Ac07UAmP33fGm8ro+fX2VLz7W4jH+MyDw974xVTYFDrWd+lDcSla/0geduW9LV8pLjK8VTAf6bkzHtwN8YN6XXJxwZVOTV/zXxZHjh+XQeGU+mFxVTuPbyahjuLzB49Ra5xZfEZ8a25oDOA+0Fil9pAsMBRS3AFHe44/buqTNeub2DtyYn5ZeW4sEtoUCIp9P2h4lRiZGKl98LDEyMZJlSYw8cjFyR7YUxkScJFuCOmDgvpoU9areGZHn01vVOgff1nYB3JIrFNuqXHHSjGNuHG7fBwwKVK2xlNjRHEi5hO4SfrR3gLsM5FUXqkc+1wIcHov9I/rwmNFuWYVM5wM2fUoriZxo+HY786bjcl+XoNiu6t8sE/s5b7Vw9nNjqt1Zlwx0GgvOZjyXxpXaknl0sdpKRMti0Y2l211Yz1pBDD072WK+sK9b3Do+Wz7Y0pfypDHQiiPWAcuoumz5CcvXl3e5rS7wQt99QJK0nBIjEyPdeE6PiZGJkaq7xMjDHyN3ZEuhfnYCqRDxPwzd2hagc7lEoVfAmoD4FiPzyGPwZwccTk5N9GpM/nNz8ndNNsyTc1zVt/LgzrEOefxW4u/TFfOrC4UWoDl/YBtykLTG7ks8mljVH92boVQmt+VDx3J25B8mbOksApWf3XB+wvIo2PTJy+dVBp7TVfGY1Id1TJZF7aAJk22nCyzHs/MbllXHVOJkyOO6qpibx+m6lY/YJ91CRuVS3275amu+PmKbaa5TH2R+W2O4WNaxkrZGiZGJkYmRiZGJkbsDI/toR7YUBiPMlJ5zCZL7OAWwEdxvDrgg0OMuuFsJvhWIuq3BJRk1UHznqpA7Fsc1ADixsRz6P/roNgF2GievUkvvPKZuZXDAweS2pXBg8zhuYbAsMasu9HN814DlMeKYPsDO53grhyMF/JBPda+A0bc1hO2qPqRysJ/oGDy2S3Kuj9MX89PSsat4uSSpY7cAPubSOHC+zT6jOmc7aluex/VX/WkSVrvzb+04kFT5NL54Ya0+x/p3/sLHeQwXJ2ybINanAz4eN2nrlBiZGJkYmRiZGLk7MLKVP4Adei18a5JQoN6KVkfid+pH2zCMm6MFBn2JJL7zcU7ubtz4rm9qYmfW18FqwDn+3BaIlpOq7H3yBj/s0NyHx3XJwvHjAlcdVgOqFZAxh9rVteOgUnuxDpkXB0Sqq1aCdVt8og37B//ngFV9xwJB53Q+xvbQZOHAg8+55O4onqlQ3XB/JwPrp8+2SqrnVkVJQVvlVn6DFKjjmPvcAnPlw8mjiz/lh+OkVc1kedSvtQ3nJNaB5kcdX/ngthwXvK2ipQd9fTLzn3TwlBiZGJkYmRgZlBi5+zByxy64HHEyr7UuJLloo32Axd9M0KtaTvZKqkjuowpxSZUNwglM+7FsmgTcmE5m5+gacDxG6DD0oWDIx/mH61o8sB55DpeM+5KG06kGRIylPyoYFPNqtZSptU2Dz/NvX/Txpzz2JXpNpMFny690C1C01USkulV/WBbECnSa1OJc+ImrGMb38BuWKXjQ+FN9Rn/VmwONFji0dNnKEfqd+2ncxzFd6CkYc+w7HnkeBUI+pouR1gKhpRNdeIUPuj6txVdLn25x38pPOtdWFi5JW6fESD9HYmRiZBxLjJzXT2Ik5o4fThi5I89wxcN5zmHUwGoANjAnfw1cFW4mgBiR54pxNIBaV9k6t55vAQ+TGlJ55DaxJcIBipK7EmedAps/9sdjq114LOWb5eLEwFs3VBYGWtWhyssArPpgPlvB3peQhsMhSilzABpzqc5U1laQu4TkdMn65C0iXBF0/Vmv7JNciWQ+VQbVr4Iq94vtILow4u9c9QneFZiUD5WHxw0e3cJQx+Qc0LcIYNk1xpYtdlinWrFv+YbaQucNvuO7e3A2fqzUJezWMQUflpkXXH0gxDHM/GtuVX3G+Dyms0fS1igxMjFS+8V4iZGJkYmRuwcjD/kthTwxC89BxUlQE7VzAD4WffX2fZAmH5eIQlkRIO7qtY+vkEUN1JeQ+bwDLxfYffyoM2h7Bxou4Su/qjP+r20nkwnG47EFRwYRVwHihD4YDGZAB2D2w4IuIateNaHGWNGGAXSZjlUGBnk+z/K4RZPqXX1HfzSR4yR8MRYCMUfog6uxmhhZduePk8kEw+HQAiD34+OaNBlY2J4OnLli7MaNxSOP3wceajOVvaV7tbvGZYynx1sLDuUh8gBXQ2utczZj2Vvz8efBYDAXWyqj8ql8KWAwKOgCjxdoPJ+Or3lhK3ZKWqTEyDbfiZGJkYmRiZGt+fjzkYCRO/ZaeA0SdhROwHE+bpm3Kls6rlZx3PzKB5MCXV+icX2DZ5fYXBJujeOSzjInZh4A/wBjjOOCiMePwHbAzw7GAaE8OTBVPridOrvypvxzBa5Pd1rh0oSgVTXl21UonO9xsLskyECmCYXn18TibK0+yeNrxSmOLQMzAHPPgDh9sr4UrHVcTp68MIi5eKHAQMcJLsZpVQl5Ps4jrJeWvVhuB2ot/9eFCceK82MGRuaJSfu75M0AyO24r8aL04eb1+mUx+NFheNfwX5lZQXj8Xhh3KTllBiZGJkYmRgZekiMnOfpSMHIVn4CduiCSycKBjWwo60eU+IqBo/P/VUo/c7gw3xFNYJfUeoAzAVeVGI02XI/5UPbOl1FhYVlqbXOADcCkB1IdeISSJxzlQ4HdtpfnUyDm5OAs28c5yTCztuqamoA1lpnP9TnZGA/4gqUVr1Y97Hvdzwez+ku+AteYxtG6NEtanix46qYcSz8x9nQ6RvotoHEAjd4CtDiedhXnB31tj7bQBcnvLhWP2d/4vEDaFWevspjfFZQZjnYHnwubMGL0mjDfLuFEJNW+NnWrCPlP3wo7Kt9op2bT2XUnMCxxboLmzPgOUDVeNVcwf0cULDMbuyk7VNi5Ga/xMjESLZZYmRipM6nMh4pGLljd7iY0VbiY+FYSWwAZV4Tvd7+4/GZWhUgDQIdhz9Hko/2/J3HCedtGY8NpklXnZV50Ad6FWQdoNZaZ07nqh2uesDEjstztpyKq1UsY3znRMoJlufmpMuByM6+vr6+APycQDRZqv5bcio/3D/szQnLBZ8LTH3LVPDnkr7yyPodjUYLY7C/cV/t7yrjet7FXoypidwlRl20udhWfUffOM8LEtW3Vh1VZ+xr2s7lAK3YOlKeXZ6K4yy/06EbixdFnAe0r8Y5t2X7tRY6LJ8uUlSPSi2bJx0aJUYmRoaMiZGJkTpm6DD6xvnEyCMDI3fkgqsVVC7w+DM7ghop/rPyl4FOfF5ZWZkL2lZgMY8uwFj53J/51zF5y00roHgcdphop1U71S2TAgbLxW1cxUUrZZrwmd+oPLmxNQnzmGy7GEO3yozH47n96apX59DBnwYz61srdy0e3dhuHN4G4Ko+3CcqS+ETnNh4QaT+qDZ04K6AGomfx2cbMtjwG33ivy7QeDGi8akLO9VnEFcqdRGjCdWBnCZCZyP1MZY/qvO6IGA9a2XdJWJdJIQtWAfL/JTbqd+y/K5SyrxprDKvqkcdg3Mb24F1qy8nUJ0kHTwlRiZGJkYmRiZG7m6MLIcKpKWUGreUgzjxqKCsCHUaDXANHK0UaHLUufj4YDCYS1r82lW+CuaEGgrnfbga5JwsnSzqmHqeeVCHaH3WIIjjOoex1ULyaunSkQtwtZFLfvH7FsxzbEVQYqBhWdUPlE+2m+pME3/YMs5H0tOHaznxORnZxg4A+4AqqLXI6bO/UvDqEjTPw3MEMMZDqLzg4DbMI88XvKi8Gg9sQ+XPJUunK61MBUVcjkajBRB2IOFk4LnCxppUOV8pn61FgVbe2QbcX3Mg6yk+K4/xWSuh3Jb50jlaOnGLHVdtHY/Hn6i13nRhgCRLiZGJkYmRiZGJkbsDI6c50CaIHdtSyAHLFYVgiBXLitAEywpTRS4LkviuyTYMw1UJdhbl3yVdt//WORcnH+7fCh5nVAUKbs+Ap+Np8nG6dv10HpbPXeUz6S1pBXKtDAVPq6urM175P8vBY7D9OQEPh8NZlYl1x/rXqqMGWACJ+qT6ZotirNhL7oCHx9MFQQsgdHHlFko8nvqUiy0dP6pb/FYhXgRwHLO+FLT5e8QaJ9awEbfl/goWumh0yX/Zq3S1v0uWGrsuSeuihftpruI3gGk7XaiqvtgvXQ5RmVgejlXlS3NcLKaYP26n+YNjMOngKTFyc67EyM3PiZGJkYmRuwMjd3RLYTDMTDJz8dkl4TimVQa9CmXHYgdkMFAgUaOoAtXwTK4axMlNFc86URDpS9xaMeCxHWCpPvm4BiJTHwDxfwcefYDCjqzyq0zKBycxYL5iEMHWGlMrbqGPVrXVUQCOyt8HJi5JcJLl6rAmRF2oOAq+AyzjmFvQhJzqF47vPh1wktfFiEv6KpfmgTgXANtKsi1ds05dQo1FnuODY6EVD8xLqz+w+GC0W6SxPYJnl+BdPIYfBwDHwrUV9xoDrp3eDWBe+SH4rcR5X05I2holRiZGJkYmRiZGHvkY2YeT/hVI2yRWNCc2ZUiZ4e+8v3ZZIDihWkJygGg7TVTxX694WZnqQKroMGLsjw25NNkukzGcUdvH8ZhLr6o58UQwhvO76gYnKHXQ6Mu/Su906ABKjztwVEBWmUMeltnNyUmcE0+rusp6ir3jLdBfdox50ucStE0kWAauZTpxQBKf+S1iqntOImq3lkzO19mHGag18XJFWR+G1vGXASj7AY/v9M+88OfYShF6Z3m5csUyudylABg65i1iYYs4r3cJeBHM1XCeU3nlObk6rTrgNiyjto0tJQzOnM94Dh5D3/SVdHCUGJkYyfpNjEyMVEqMPPIx8pARVJlX47iEGJ9d0LFDuyTeUm60c4DERmeF6bjKV8jC7bkqw2OzLlrBrcHM1amWsRQ4Odm0dBXfOWhcgm+BggKMJjmWm89zoLjEz/bRiuPGxsbcb3joNg2etyWz6kd50UCNRMFvOXL20T/Vc9hOH2pWe+tn5xMKkgEYkaQ1ubH/BGkFnNsp76xT1aNWRuO4gkLIzkbi9KcAACAASURBVLHBNuZ5NXadPqOP+oCzTWuxpPLHeLxodfMyvyyvytXKEzGm6lhlYbs4X2KeWJYWWHCeYoDSCrbGM+cQXnzFMc4huiBO2holRiZGJkYmRiZGJkbuSMkyHuzUBM4Jm5PAbHK5padjAFjo16oOAlhwclYgK5uDXoOFDRt93L525wjq3C6hqUPxXOpoqhPm1yUHdaRSyuy3Obg/66ulD5aJ5+eqiOqN9cTbP5Q0uateXLDzWMoDy6f+ocHCc6l/qh9GkmRdRrVP7RWfGRBZh/oGIJ7XycoyMuDG/udoo37i4sL5B8sffzGna8Pfl1VTWfc6v+pZK4bcn9tyFanFh/obgzDrUWMkjkU8uAWL2pNtqbrVnKa89bXjhN+qyLkFi86vudjx4XTi5tSFQ9L2KTEyMTIxMjGS+WD7JkYeORjZimlgBy64IlCcQTm5OmpdoYcyNWEvU7IjHov7sNM453XzRXJx55Und0tS+Yr/LhkGqWPoHEq6L5rBTeWNMfXBSqVWQLmg4/OsE8cD64iTAwe7kxtYfMvXMuDSeVQOZzfVX6vyw2PxOQYOHc8lVh2T+enzMbUVb13gBMm61N864fNa5eSxo61baLRkcMma5dMc4WJGFx6uWsZjK0i5MeMY5ymt7qvcAeour3EstLaGcGywP+hdDpanb0HaWjS1ZOmrnDJ/zLsuNpO2R4mRiZF8js8nRiZGxvfEyM02hzNG9tGOlitbSVMTNIA5weO4qxwEKajomNoXWNyrHZ9Voc7oYUSuonB7bsNztuR2Y2tSaCVtTdR6+5a/O107wHJgyqTJ3tm2pbdWdYfBRGVo7dlWEGDduYBd9tmBgVucxPjxmefnPnGMK2rRnve9tyqeOqfGBx/Xh5Z5YRNzuGqW+qjGg+7XVl0rYOsiUedTWfoqhn2AqnHQ0pFbKPHcyjv/6VYTNyefYz07Up5cX+fL2s7psmXXOMZy8Tzhq1ztdTK3AFTbJx08JUa25XZjJ0ZuUmJkYqTaJTFyXpeXdYzcsdfC8+RhQD4exMbjBKptOaG4OYKcI/YlEZdA+VjMqVfz6iT81iE3px5z+nJO4Y6rHlSHgH9wUUGmFQBKk8nmQ7iuGqg8x/zu9nifDpYFsXNgBSSWmwHSBYH6Avfnt9Iw7zw+99XkxAGviZbnDT25hQ7L4Kp1PF4pZWELhkuufbpnvl28qmwqvxu/T+9KrmqnCw0HBG5u3crEIMKvoV0Wqy1gZOBx7VnO8KtWfDt9OWBTnl3e4+Ouustz1rr5+mHHu+NN40wXgUlbp8TIxMjEyMTIxMgjGyP7aMdemuEmYuWyI7h+HBA8VijY3erksVu3JzUQ1BmCuFIVgczGiOPBS99tb/eZqz0a+My7JlOXUJ1M7pyzRXzWIHaOpcDhKkNOdrWzJlq1J/tJi2f+rr7E59SvIrHoMU76cZ6TBW85iD7BX18SVN8NvanNVS61Kfujjhdz6vYNp5c+/uKcq3zHOK2k6BYMreTE86qP8/hu+0dLPzqPjss2Zt3x1qCWXA6c+ZzjQbeCtMCWY0uTtG5LUKBzPq5zOr0on84XWu0d70nbo8TIxMjEyMRIPsa8qL4SI49cjNyRZ7hYEK46aED03WJUJ3CJoiWMc3KXBNjommDcWK3g5DF1ruDBfXbzRZuW8+m8zvE5aJhnBTrHt55XwHUJ0AG+qyjyeZWXnVfbu0TK/CqPTgb+7ObneVwy7PseY3JlM3TA4KS+qA+1qj412FuJxVUUlyXGUua3ckR79/skzLv6QZ9+mNwWo1bSDzu4BSHbiKtiyrPTqdtHHrHfijWeqyVna8HGVbFWnIZe2MdV78pDy19iTleN43Yazy4fLZNxWW5KalNiZGJkYmRipFJi5O7DyB29w8UOH5NrYog2rFRg8wFFvYIPJ9Pg4vOt49tppwmI23KljmVoOZ4m2iAegxOL47EPyFi/LRByAOfk1bk42BSsHGgD3sY8brThxOEAw8nKetGA1HnU57RdH1gooDuQ4fnjj3WnW0ZYPvUd9pvWwsYdc4CtPCixz/UlQRfD/J3Hc4sBrdLF/2VbB9TOyjd/V1voYoL/8xuleAHB86pvqc0dkGqsKHD27V/XMVR/Tm7VV3x2C1rnqy1S2V2OZP62ut0qaZ4SIxMjEyMTI2OcxMjdi5E7docrJmPDuS0FfGtak5g6hgKLngcWA5wNolUUl/A4sDUhMf96S52Jx28ZVZMvkzqFBin/Zx1zYDge+DjLxaSyRzC0wAyYfxBV9apBGf91+4kmMJdIVW/RzyVonS8+q2+5W/LRRh9K5jm4Lf8QZBD7WitZqV2cHzuw0PG5/WAw6LUHH4tX5vKYDIo6L4MGJ0i3LcQBu0vMqrNSFhcqrkIXMuqbo4LcvCoH86p2cXESY7kk35JTx2npQfU3mcy/USza69wt3+zTu8u/erx1jPu14jSpnxIjEyMTIxMjEyN3B0b20Y6ULMPYnMydkHxLmc+3glirZg6kol+c56qQzsM8AFhQkCZTnjM+89j6o3d9SY3nG4/HC0Ed7Xg7h4Kt8qHJQbeCqFxODk4oyi8HEo+vcvIeYLavA9dWsPO8LSd3wc26V8DXBYXyxQkmfpSQx4ngZnso8Ib9XHJS8HOV3IgZ1hvzrGOzDhUcOSZ420XwoLGp/uD05Krp+tpnNwbbpLVYUKByIBzfdSuCi69o70Av5OC3YvGChtuoXpwNGMQdvy1gc/I7v9H4VNkd4DJvnD9b2ziUTx3LyaMgnrQ1SoxMjIzziZGJkYmRRy5G9tGOvKWQg7J1Lj6rQC0A50B0zq6JIZTnEhjzoUmJDcDJIo5FsPOP6WkFbzBYfFOR+90VbqdyayBwUuNKooKovga1pWsGIDe3OqRzzBhXF14O8FUu1RvzyyAYPOhWAtabJudIEhpoKp/qhxc/8WOMLM94PF6Yh+3hkh1TABT7sdolkpKrZvMcfXYLMAxeYyxdWLGPu7hzyYRjiiuwnPxD96oDrRSpLh2AMS+tOOLzLkHqnG5Bw0DOsnM7tpfSxsaGfYDX2Vd5DN25vkyqszjWV113Mte6ufDtA/Ug9h8+lnTwlBiZGJkYmRiZGLm7MfJieS28Jvw4zw7EytZqmiYynYPb6Rzs/OpsPD+THuOg0XECENgZ9XWpkeQ5ACM5OZnUaRkcmR8HWvrZOTMnSdfGOabTu/ZXnTmg5zFc0Gvwq+NrhaXFs0vCGlAMjA4QOFBVp5o4uXLrKHhX2+mrklU+9zn8SfsxgLqFEcvMMnA1yCUq9jvlQyvnAOZ0Ebpp+Z3q1cns+GefYHlaiw83Z6tq73zdAVjYsjW2Vs1VFs0lDBYK4i6Hxlzq+7robi0cWvpXWVt2Szo0SoxMjEyM3KTEyMRIleVIx8hDvuAqpcyqFOHwbGB1YBXCgYo6sSa+mIMrATyXggiPpUlWFcpGUEAJYqDgfiE//6nj9iX7OB/zqi5Z56xHDjx2NE5ELBPrTZOk0mQywXA4nFWyXPJ3+lM5WkmAiSt0zl90HlfdZZ0oMGjC0QSgPqXjaiLlz3yM/YYfSlUfDR6CmN/4rImZda5xwgsVBmJ+2F51pHrluV27VvWNq1mtBYBb3MWYXMFVG/JcTkcsm8sFLdKEzFsp9DyP7cZRPUbfiBu36FEAbYEL67gFMtHPgaADZ64Wc46Ivs7HWc9JW6fEyMRIp7/EyMTIxMgjDyP79Llj6MmKnkwmC79/EMSGdpWAlrK0yhdtgcUrfz7GClMHc0mIz7uxFQyYZw1avp3K8yg/qkf9zHwrYDjeoo8mrOBTA5p1zkkoSJO2yqIByP14m4nO62RRfmJMrkIB89sbXPJyuuexA0jCZsPh0OogZGC7cvU2iMFG9R99OIDZxgE2qjsFYCbVN/u9LrKcTtmWLLN7TSzzqraPtgyWqm8HyrpI4jYbGxtzW1UcKMU59W3lXRcBHAM6P8vP/V3+cP2D1Pdan1s/yun0HW2Dd44dfSCcF7LxneeJmBwOh7NFMINNtOE7BayjpIOjxMjESNVXYmRiZGLkkYWRzi+CdmRLIf9QWkzqbtfFOWWQSa9ANUG45B7n1VD8i9FRYYzzzglcEtOE3JKXZY3kwGPomM4wTjeufQvsVH98jG87h+6UYq5IrDoHO1volHWjAaFVTg3SSBgqvwN+5SPG5yAL0kqZ9i9l86FW9tM4xttaeH5OnGwb1g3PpQDPi6AIaJWR5VLQ6fPb4I9l4s8si1aCuW3MET7Dizgdm2XVhQ8nJ52X36akCZYr3py0V1ZWZtuSOKbcgkuPsf1C57yQ0HH4TwHIkcY4LzZ4HPVJfREC+8lWwDz+u6pexFefLph39S/ND8uAJKmfEiMTI+N8YiTmvidGJkbuFozcsWe4ggF9i00wHAwB807PwraSpho2junDgqxMNQjzpElJ5wK6ao4LIg6Q+OMEHYmpj1o88nnlx4GAc27tx4lV9a5JezAYYDwezxKrJhqVrZXwnH5dMLJNGIhZz9FOgSr07pKJs6vSeDyeq3qEbLFoiX76PED4nXvNLI/DMkVyZh65WsikvqP2jvFiPrdvXRdkmsg0ebF+OdnyQkBlY1srCGtidwDYWqy45Bb+qjEZ/GoCjDE4H/H8bmHjbKm61yq2Lk50caSg5HTAPq88OH1wf40dtRUvpt24MYabm/XaiqGk7VNiZGJkYmRiZGLk5ri7CSOLS2bbocFgUIfD+es2rnIsTEiMta4UJ5PJXAWJ/7vk4ebgtqoUdfZWIuIKhjpFkCYubqfgxX3C8PxaVU34y5yxlZwj0TKQaL9WEnZAzvpknYSO3Nh6q1Xf0sP60WqS8sXnXLJVX4ugdv7Hfbj6F4knbBK3i1v79nURo4mT5+Jg5wDXxOLs5OTWpAZgAVhbi6m+GGKAZburfK3KDi8IVHcMVjym8ze2qavwOp1rjLjqlBJX2VQGt5DSvMFjsC+p3VmP4XcxF4O2W1yrvp392AZM6k88X6sPjxdjsBwAMB6PP1FrvantmLRAiZGJkYmRiZGJkbsDI6dbee0V2I5ecCmTqhgnIDsfGy/aal91jj5Q4ePsDFzxiPmCj1Aet+ekx4ZdWVnBaDRqJnh19uirFU51zFYS1IQfPPA2EJ07gtG9GpfBlp3M2UsTX+jFOTDPr8e1jQNcFywho1YVo50D/DiuyUBlDAo/VP1zYEe7OM4+EdUllUUBVBNdKxG29AdgYUwGcE1g0T8SmdqgVW1mXlUXGhNaSV9Gqjsno9qIY4b9khcpLH9LTo5JYN6ntMqq/qkLIOercVzzlSbm1gJOfSLm69MrAznr170Jjv3YAZ4uZLVfXnBtjxIjEyOZEiMTIxMjj1yM7LvgOuSXZmiAuiDlc055+hDeZDKxDh4PsMUYzqA6NgdunNPEFO1Go9HcG1MUYJg/dgyVi2XgByxblbRoH/y0gjJ44Fv1zC+DIAMYt1eg1sTHFYRor4HEdoux4r8LFE4CwX+M6x48ZH5i7NXV1Tk7s9whH+uuVTl1cwSxPC3w4WSicjkbx4PL6ustO7NPse+GXTjR8PMB+hspsWBx8eBi0cnJdmmNE37Hx/mcA0GueKnelA/VC7fXh5f1f/DC8ccyuFjQGHB88TG2kcrN8RDzhu9rbCsA8p/LhTyX+mX8sf4VLB2/3I75Yn/aykIhaZ4SIxMjEyM32yRGJkbuVozckbcUsrJKKTYoOSGxMUMJfMsyiIMoBGIBNWmxofgKVdvqeE55ratlDiB2PnYuVnrLObUKpcDW0jPzrfpkfp3jMEiz44Y8GjjMS60Vo9HIVnP4s3uDjbZj3lWvPAZvZYi/aBNVCZc0NcFxZYl16b6rDlvHmJ8gfeCc/YD9Mb5rctREX0q3JxuYByUX+LoA4M/On12FOOZTIOP/QQwKfMzpNRYMGpO82FF9cnLVcVluB+CsT27PeuUFTfTRRZHK4cYB5p/T4Dbsr5of3DgODGNM5VHBQccNPSvYqh21UsftnD5auSmpnxIjEyPjc2JkYqSOmRh55GCki+OZjvpOboUGg0FdXV2dCchXjqwIFp6dhitp7PSRhFvjBTH/7tZrGNVVIxhk5pQiAcLG4fN9yUaBUW89uySjySLGCx41STojq76VeByVP47x9heVR+3A+olxGSjjT18py0lL9Rc2Y73HMX7bDMujPLRsxvbQwHPbTngO1jPzyonD2VcXFvFmHF706LxsG31TUIBp7AGPLR6txOT8vBX3LX9SP+RFiluoKZ/s5+zHbDutasYx9oXghfOIJj3lgW2oulJfU9258fi4ApbaUXXI7fv8mOXXZyRczLhFCp93ecGBphLrKvxwY2MjtxRugxIjEyMTIxMjEyN3B0ZOLy5tpx3ZUqiG48DigItzwOZtY3Yul6xYUcvmj+/8n0HMJam+4GG+gflb2Ty/qwK2ErUbl53SObV+Vwd0zqWg03Ic5SV45dujLtkzyLPN9DWebAOeZzwe925P0AogPzjNsusCQW/FOz1wZYf17xYWTFwpjPauMuuAROdVH1Swjnn0mQVOGvGcRV8yDPm4isdy82f2Qx1Tfdkt8py9VU8cL5PJZPbGL1cVVkDUsdQXYtHodBFtuUqo1WqNK10gOLDg3OT8x+lYgUOBio/FItQtPpaNq9SXRx2xjrSSmLR1SoxMjEyMTIxMjNwdGNlHO/pa+HBsJpfE1ADRjo9pclEn4bYMGC7xOcM50mDmIGYeHfC0ZODP7m0+rQqEytj32T3kCbQfWtSg4fF4zGUgpMDvEiKPxf04meo8GqwuSce5VrBxcDEvwU9f0DlwccDKCYqBrbU4jXG1euQWMKoXrv4GiHDS48WK6sHxy/rXSq6zu+MxiPu7GOB5FYD65nPArMeVXAyHvlwllfe4t5KtxnFLn335z/XTz9zHLcZCltYCUPWj/blyyXy7/KPAFHK1YiZpa5QYmRiZGJkYyX0SI48sjOy78DrkO1wtY/N5dhhO+uH0TuFMTnnKg7uKdgmKxwpeXCAF8ULD8RjfW47EPLlADB6UR5ZDxwra2NiY7Vt2DsT/VV+sA8cTz8320j/mjXWhxE7JSVDn6tMvgwjPw/ywvRiolWetDvL8XFVmnQUxiDFPPJdW5ZzuXRVR/zO/+jparVxy++AxyPlYH0AocLItVEYlTtyhT632cl/93vJnBgWNkb4E7fyGt+CwL2ocqF5bOmPf6wM8tidXZFUmXjyw7tUeGjsuF+q8Lb31UV/+TeqnxMjEyMTIxEimxMjdiZFLL7hKKbcopVx1qxMFs6zoVjVD+7uEz4bWc3E+jrVug7aM20qICi4a3PHHD6xGX74dq/Nz9SWOq6O3QM0FdisIXDLS7zyHOu1kMr9dQl8RrHK29KhgxPZRHhjYdS7+r7JoILlkqeNwEuf2ehteFx7xnx8AZTladnT8Rxttr8SgoLYL3TpAaulJ/cEBJ+tMFwZqV/YFB6TOFznGeFyNXwckHOua+FXPLpbic1SyXMVO81GQ6lrzDIOV8z0mzhmab5h//qw6cbHtQE7bxB/n6rCbvv53mU52O20FH4HEyMTIxMjEyMTI3YCRfbR0S2Gt9eNLR5EJNbEw6TmnxL6x3Vh6G5QD280X313As0G0rSYsTgZ9TsHzsnGZbzYe928FoQMXx3+fbjkB8LwtJ9W5WDf6mdu2FhEMpKGL1u3Y1q3s4FH50iTdup3MvDEwx6105lX3vLuqC+uRx1Xw0gq2I92yEnvV++za8uE4FzK6H5d0vtVajIQM2i/k0dc3q15cfDA4abxpPGhc88LHje/aho11Hv7MoOXiTvXr4k/n5346Z0uuvhhiX1Rd8ziuqtfSj+uzFUDZbbQdfJy2B5AYmRg5z1diZGKk6iUx8sjDyB1/LXzruLZhQVvH+pKUtgf8A5l6xR39HJ/8PcZ2jqEJq3Ve52Mn1S0IwSPLEI6scqjczLcmXb0CV1BoARU7lP7QoZ53/DhADhm4eqn6iz9OKNxWE2BfUDJvfRUq/uwqjOq/rAutnuocLRDk7RYa+EG8NUB1ybzxPNyHdRbf4zP/1on+GKTqxenJyRoUx+N3YHihFLwq3zrespjVCm+LFx2X5+67g9NXteRxl+WJvvhQefp+nFUrmnq+9Z2PKbj0ycGycJ5LOjhKjEyMTIxMjAxKjNydGLljL81QJp2Q6uRBWsngJBJjukBi5XA/Hp+VwoksEocmAk7kPEbcVtYk2kreeuvd6UhBhckFxzJwZYr59dfN1aE0ibeShuokkhYDnsraAgRNjly1acmviwauzjHQulcnt4C3BYBOB8q/nnNjtRZRrXYKfDyP+p4b182hPHC8KO9hY1cF0rYxn/ISMmhlb9l47GM6hy4g3LhBrdcWA/NVVPVB5cXp0VHLntHH8a4AwZXi0B3rV8d2ixeNa3fnwsUBV6DVp3iBknTolBi5KQuQGJkYmRipfPWNlxh5+GPkjl1wRYDrvtFgrhXQwKaS9PYzt3NAwWPorX+nUFZWyzk0+XESZefgJMr9dD4GyJbczI9eKfMYqo+WTlh+HU8BVJOe8qdy61jxXfnlMRko3IJBdQwsLjDClupfDsSYv9Yig8/HZ2eXvgSjsul51oMCpgteJZfoeFxd4LAuOOlobKmNHNjw+KozAHabQa2bP6zK8nFSaumzlWyd/6tuWZ98zOlDSdv06T70yNtoHLHeNVb5T+OOK8dONse/+pHLvxqvHE+tXKILK37rV9LBUWJkYqTylhiZGMl9EiMPf4zsox274NJqlXNmDfSgaKuvZAyhdCw9zuPEsb5KkOOvFdAOCDRAnSw8PsuiPKicqifWr8rKPLrb8nrLn3lY1j8qncELO6I6nwMDto0LIB1T+yivoWvdV90H0uojCoiDwWDhQdOYi+VQQHI2VrljfzbPqT92yTJoondAwElA5YiFDS+iNHkFD6EL5YHt68DO8RTJtdbNH+oEMDvu4qAv3lgetUOQ2pHlYH91PsBjOX/j+GuBC7dvndM5XA5x7Vk/6rv6fIT6+3b4U5nZB4HF1+HmXa5Dp8TIxfOJkYmRiZGJkbsFIw/5Ga6YkAVVcg7F/V1C18qQBgUHD//x23H0vHMiTbIAFh5m5Fus0SeCm/vzbXsem/WketH+LX1o4LPRHQDxeMEXf2+BsM7HjsRJy/HqAp/7hY8wucSgsnJi5h9W1DlirKgg6bjqXyrvMt3zd+7vkpi257aaZHVODeioFrnfqGGfd/4Uc8Z4umjgH40MXatcQRoPnPSHw+FCe60c8xj8XXXA/sn27gOh4CvaKvA6/bo4cHHGf3yXgvXGvPPiKsbSKjHbVgGaY08X6cxbC2g1B/OcbJeW7lrzsO6Stk6JkYmRiZGJkYmRuwMj+2hHXprhBGClcBJwzh7Jgh9SbCUAJ5wLfq6GKDlQY0dS4An+dAwG0WjL43NSYMM4x1be1IFd0mZndDzr+Jok2MG1gjccDhd0O5lMMBwO7dgcfOqYEXwxH9sYmK9qcaDF9xgzEpbauqXDIFfJbQU9gJkfht9GO060WkFsLQy0qsn7kBXY3QKHdRTn1B8Y2BS41IbhLwG4al/n77x4aCWXGJcrQTG3JuhWTLqFGOcCF9MrKysLixTVn9qhNb/qS8GY2/LbsNzCxOlOY0P9L+SJMfQ3Vdi2qi9HMX5rgaNx6kCWF9JbqeAleUqM3ByDx0+MTIxMjEyM3C0YuWNbCoN5TgpskGXEjsgO6K40NSmqw/Jn/i2BCBbnKBqM7HThLOPxeC6oV1ZW5qqFeuud+XGJYtmWDpZXkxG35zckMf+6tcCBM7DpPJwAuTLJOlP9cLJ3Acs61B8h1O0Sal8NoNA386OJhfURfEXCUTu0dB7+wok3xm0tGFq8u7l4W4G21fHUf7iPJmddEPDYoa/QA1cQQ7YYI+bleTgZ6lYQtR/riZMR66AvBhUMONY4cXO1SxM6J3n1Rda1ArHqQHkKYt9VudjnVS5dJKmtol/YSHXnQIFzp6PxeDy3mGvJxPmilU9dPk7aOiVGJkZye+YzMTIxMjHy8MfIPjrkCy4GYTdpMBOMcjJc9pBZi3mXkLXiE4HV5yhMatggddJoG0DF88RnTvwKTK2kweMqTxrYrAdOHurY0Sf0r7fbmV83L5O+9nZjY2OhwqcgoosJBeeWnMp7jDUajQAAq6urC+NqImEAYp5Yhy7Zc9BxxY5BNj5rFZN5ZX70t0K4SqZVQAWWsHEr8DUp60JMgcgtKPriVm0HzPtZAK/z69a4cU71FfrlRR+DUvRZXV3FeDy2sczytYBMK2T6gG8LwJ0Mbh7lmfXGMcQLLM6hautWHnS+EmMF6DMPfYDUN5cuXJO2R4mRiZGJkYmRiZG7AyNbOgF2aEuhS0aRaOO8Bi4nGzZGJGVWEjsYC6ROE07N1RceV5NyBKm7Zan77RW4gly1hG9xcuVQ5XTy8rjOiH0A4/TK8rnEpAHAtlKbcaUygCT41zlDplbycAHLwBY24CALYIhE4hyfeXdBonJqtVV9xiVf3S/Pc6h9hsPhbI7V1dW57S08Pid+vsUd/3lREHxq/xYYqOy68In+DG7sw+ofWhVTCvuxHdUPNMlyX5afKY5peye3Hmd/idw0HA7nkrn2ieMa4+zr3I/Hb1GrOsa61piIWAvS8zwOH3eLCgV7Z98WoPTZPKmfEiM3KTEyMZLnSYxMjGQ6kjGyaBLbLpVS6urq6hxjOqkKGkHBTg34W4/8v+XcMaZTXrTR6h73cYmGHVaTspKTI8bRsXlMraYFAPYlSR432ruExAmbx2Ubqe0VrHmulq5a7Xlft0vqzjG1auOqTywDA7aTo1XNZH1z/6hQhm3iWGxv0AQddo+x+DwH+fr6+hz4RhuuIrV+0I8TFsvNPPSRS1Iqoy4eeEz2USd7fOZq4jIg19jkMTkhKxiwzZhayY/nUn9luzPPWqFVmZmHrcRS+LJW+ALI2O4xN4OmrlREAgAAIABJREFUxhPzqTlPbdSS351z+nG2HwwGWF9f/0St9aYLSk+ylBiZGJkYmRiZGLk7MHJ6sWqvug75gmswGNRwJL6qZ0OoEXifqSZgFTb6RRutckQ7Djydj9soaULSdn1ApwDpDKZJlasGEdAtvngcTWR8jB1CHW4wGMx+zXyrCUlBJ+RzVRvniKqDFsi32vH8KrcLBNWjjq+V1Dim1SAH5uoL7L+sF15MxPiRrLmS7HTkAl79kZO3yu9syIko2vODs+PxuOnDnFBZ725M5Y91oHKx7hmAXLIHFn/7ydmsz99ai6MYi2VTvbE+lNwiiNvqPm83htquBaJuO4cbl6udHLNqo2XyqM2db45Go7zg2gYlRsL2S4xMjEyMTIw80jByenfZXnAd8jNcmvDZWBqoQXFV6JKSJi8noBo/2nB/9wBhjMEJTedgZ+UA1qSmyZ4TOANb7KVm59FqIFc9OGFrEtZA1uAOoGJZOXAd3ypD/Ocr960ARt/xkIXtoclN2zJpZYzH5+SnyUeTSszNb7bhcaLNYDDAaDSae3CXkyW/3tUtXEqZfwU0BzXHgbNxHI+HODlOWgsW9jfd7sG6CwDhSqJSyOniQROx+iv7iXu4VPluxRbzov10MaILnxYga4Jt5ZJWwlV9K//s186v3Fi8gG4tKJYBuPLjfEZzs7O7yseyBC9uIZi0nBIjEyOXHU+MTIxUG8c4iZGbsh7uGLnjbykE5m87M7EitELAY7jKFYCFBBCkgcufteLHAcDA03JSlS36u+CKz5okWB6XTLTi4drGMXUidTStLEXy4AqAAiy31XG46sh9XaAqf60KiSaFllxsM3V0V6VloGC54jxXrFh+lmdlZQXr6+sLld+wYwuMeTxdkDg7KXiEnzLgtXQ1mXT74tnvtV+Q8lxKWagGhX+o77H+HRDyA/FaVeM51HdYf24h46qHXLmLN6HxVgP1HwcgjlS3vIVE8wwDBduT//PcDgRYDrZd32JJeeV5NMeGX7jtN6znlozBE/PBC4wWOCZtjRIjEyMTIxMjgcTIIxUj+2hHnuFaW1tbSPJsSGDzAVJ+ZakLaq3AhEBBGsxaCXNJsi9psXFCYRo0jjTJqQHU4MwfO5PKH/01WXN7dVJNBk5/LfDQJOj0pe0d6GpAaNJo6XvZ3Jz8eFtOJBQXLI5ntY2zl/LL57XS1LfgiWQ4GHQP2Y7HY2xsbGB1dXWuwsZ6Cl7ZB52v6kKGx2BQUX2wDNE2xoyHlqMCumzxpgslx6fqToFex1S9aFJWu/IY6tssbyyKdDGi/4HNRaX6hsqtczmdqC76co7rp/M5P3ALFvUnN17Y2Y2tvOvCtJSSz3BtkxIjEyMTIxMjEyN3B0ZerFsKnXHcea2o6H9Nyi4wNOi5P3/mxMRJySXVIDYqz8dVG2cwDno2WnxnY0TQsh64GtaqDLHsLpC1X9zm572urmKnc+h8EeD62leVT51ZAZB54MUE64u3Ujjg5LFcMnX9WCZdJKgcIWM8AKzVCl7ksE87vpjfSJD8ml6n7+jL21QCNMOeLaCPqmT4oiYptl3ox52L81pJdL7HiyQH2iwbg4iLgZiTk7mzoY7tco/GYAC6LoI0rwQPTk+OB5fruE9LH24Mltct1HQul3P0DgDnFY1d9pMYw1VvFaiSDo4SIxMjEyMTIxMj58fdjRh5yK+Fd4mFA9EpNBxVr6o56LWaw/1Vsa19zlol287CQR0sxghgqXXz1jQ7IAel8s/JpBWIzK+TO6or+vslLpBaPx7Ic/FrZSPxRaWVgT0Src6puuGEo7KoLjiphFytSksrseiCQoNE+7QSACdzrkhxslM7Mc8aA2pPnU+J+RsOhzM+OBGvrq7OdMt77NkPmQ9dGPDf6urq3JYMtbGCact3VB61lyb8kCXm0vEd0KhNWc/OjqyHeJaAeeExGUDVt1kH3J6TcR/guUWOtlMZdHzmm/nTKhvrFdiMJ81BLX7ZXxxfLhcnbY0SIxMjWTeJkYmRiZHzMu0WjNyR3+FyDIWRVClOQL7yDMajvQYKEztHzM3AwueB/nf/a/A5cs6txFUTBhU1FCcwHpvHYN6iPW8d4P3JnOh5Dg4WlU/l5PFUbnZyBtU4r6DrQCDGUWDkMZ1vsC/FOOwXnFB5QcJ6VV60UsN+ybZROwZPqn9OTMy76jd4dwlLdamVZLaLJuLBYDCXOJVUPqWWvXTuWMho8mJ5lQe3sIjj7FcuebtFAVe6VUYnl4Jg6Inn5MUjLxYdQCjgqZ/EOCyH2pJjQKuzum0kFnzMY5xj3nlPuv6WDes/9McVO7fA1yofz510cJQYmRjJcydGJkbG8cTIIwMjne8E7ciWQiekS57cx906DqGVYVYIO4WSztMKGuWhbz515Pivb5jhcdWAGkgsi+unzg3M3yJ1wMOOpQCuSZ77Kvi3ko2O5eZk/YVD87mWPTggnY50EcLfuVq2bBHAnzUp6duhmB99DSm/Qlj/oj+/ZUxtoHpXG6pOXaUqxgg9s/14fG4fFWOnk4gHTrK8EGF5WU/qTyEPy6L8awwqcHKCC9u4eZ29o40bL8gtKtlGrAeWgXXEeonPvJBlmfnh+mjb8munV20bfLjfIwo+9WFpllOr7H0xr/m2FWNJbUqMTIxMjEyMjHaJkfN8xHy7ASN3ZEsh/3fn3HdlyjlHBIabK4K8jx+uBrmE6hSkjszfuR0bRKtOmkBcEub5mFd1MCdvOIsmkWijwczyqK70Tx2a/7hCqJW3Fgi07BPJzQGytg+ZtNrJc2giYOoDGa2qhK5acrXG0Soc8+Qq2S6pK68Myuwn3K712yVObq0usZ/EsZYd1NYOtFqLBrdwZPncwor5Cnu4hYMmyzjW0kXw7WRlsHSytHJd+JFWAnVcp9OWH2s/jj/WG8vGvqJyA/OVSSeb+8667wOSpDYlRiZGJkYmRvJ8iZG7EyN3ZEuhKiWMHgK75Bl93C1SHZv/83xuzPiu/bkdn1OjqAFZDq2yAPOvrFT+XfLQuRQwFFR4bk36vI2ilM3bwOw0raBgkObKhF7Zqz3VNqorZ0sFb+af5+B+bouE8t8KrDivt3sVHFQO/n0OXQy4hKe6cODhki4Aq2e1h8oXn/l2vtOR++6ShbbRiqP6oiOWNW77q8/rnC6muU0c1y0RrbsDfD54VqBkcvM732A/ZXlbecr5SZxT/+vjqRVrGi8ud4UeNL/oHNy2FbOtfJm0fUqMTIxMjEyMTIw88jGyj3bsLYWc2Dg4NBEzc8tudTujqXCcCLUv/1dyQaKJnXls8RM8uCoj64dlVKfjhB392SFUFpcU3HhcodmKfl0Cd+c12PqAQx1aba6220qwqW5DVnfrl4HQAQmPo69bZnmdrzhgcf4Rt7VblUUNfAYhfmBb5+Fb9E4m5lP1F8Ch/g1s3t7vk5cXP+r/qjfVc4vXFr+uSqgVwT7Ac3M5O6icOp/LFUGawFvAwn3dQgFYfJtS/O/bAqJ5mH1+mV7c+byjtXOUGJkYmRiZGJkYmRi5Yy/NaBmjlUjZ+SL58XcNLv7fomjPrw3VxO7au3OtZOiCtJVU4rsmS9eOx+EHEZeBYTiQ48Nd4btKo+Pf6cLx4PTVAkHlvbUAcPO7hM2+FJXLqHDog+F8rgUMyrfqVcfjduz3LphV325+rdoFuKkvt/hn0OFkEHrmB2FV3y0fcMd5oRXjhx0YZNwigWPd6cuBm9qNbck2Uftsxb806bPd3KIw+I2xnSy8sNZ5mP+Wv7tqeSu3MG8xp/Mv1iHPo+36FsV6LGl7lBiZGKnyJEYmRrpxEiMPX4zsox274GKG47szijLHzsWCcLVPjaL9OGA5kahj9t22doAW5xmcuCKmFbLgRQGU+dWk06o+sl5bQcn/45x7awzzpceUZ/5jHmMu56g6vm674L58G5uThf4ORPiSBjDrwy0YYi6eb5n/hYzBd4wfn9nPaq1ze4BZpviuvhBjhVyOT07AHEuuwu2SdgswOSZK2XyugPurLfuSrtNhkCZWBW7mW/Wjfqd24xjv49EtFljPjli3LuHz3MqHm8MBVGvhpf2VT7ZJa1Gh43O8xhz6GyKt+fW48tLSYdJySoxMjEyMTIzkcRIjdxdGHvKWQsdUkLuS5HZxjt/U4xJ7kCpWlRPHRqPRnEO7dpE8nGNrsuNgbiV+dbJIOpzcow0nVAYrbqe3/2Ps2EMd5zgJOp40GbIeHAjG2Gon5Z95CBn4nIKwo77FRhxnXtx+bL5dz7pSIOL/TK3b3MyH49fZWwHKgVzwynrVfpwAWMdsRwadIF6YMLCwnzOAx+fRaDQ75uI15uFYZaBVf+I4cbrXfKGgwvJwHPW14/mVHChxfDlq8ag2Go/Hs9fWjsfjGb9uHI03/s85T/OEtlee+btu+Vnmy0xsf/7PizeXl5OWU2JkYmRiZGJkYuSRj5FOv0E7csEVxM6vzLMxHLHy1GDcXysp+hskTiE8h4IGz9dqz4EMzCtd+6p8LFPwORgMLIDG2C7ZqPx3fflTcKXrXKsbE0DLxApSMR6fc0mvOw+EONGklPY4PMeM7ymDXb/58Wafe/hXvhYDaXOcue8x4RbHnfWfVLz1ro9Y8JEIKLYZ9+VKj1abQz8rKyuzRNO30GpVAyOZR2LVpKW8MM8OWPUBcl0AOGB2YzFw9CVLTYgOSFwlP2TSN65polabKPHii19ZzXnAzd1a7ACwtlD5+3IR60BzhfoA21SPsx1bvLdylfMN57MuvyVtjxIjLzmM1NzLd0fcIk55Y37HD30eJj96UzZGNAJKQWFbEvZsYmDF2GDkuBQESM7kobEZixkk4+MCdhrcc37SfS/TEbZAIaPMD8yPMDeizjvXrz33nB5c7qVx2vqqwPpF2PN7d58bNzEyMfLixkhX2AnakQsuFUoXnSwIf1al8Xj6XY3jAID3JOtYqlT9/QvHY4sXNbgGBTBfUdCqBs83HA4xGo0WEoCruHH/q9zgR3Diza+/IG/SoVGVgNTqq/NbXmioj/PFU/w2CbD4o5LRTpM5J8CYS3+zgn2D5+XqGv8AZSzCeEE2GAxmVTwGEScHL1KAzdfuDofD2XcFM5aB9ROy6lYibcNjsa51HqfHOfua8eN4q5rnFizaVj9z1VRjXon54DasD62Uaj508jgQU2pV/RXgnF6Stk6JkZc8RrJcEYt8p2k0Gs1djOqCbW5xfI3rol7/Ngu6m8m8hePLIsid34lxXZtdEc37z0+MTIy8TGHkjt3h4knDmHr7Um/D83HdHsAOH8TH2aHZqVzCjfl4bB0zxlPw0bFYLndbkhfJLRBkfviH2fiVq7GgVn5mi/QsNF8ixL4BbAafbnGJtlxJ1S0/nDjjv1ab+TOw6Sc8P1egOMHEefVlHofPcczym544HnVMvavGc8TdN5ZVFzEqG4/ZSn66CHP6CuJfpOe5NFZdAnaAxTzpfFyFb+mWx1VedXsK+w/riPs7/TgA0Xn5vFv8at7cCngkbY8SIy9hjCT+g69WbId9VF99C7Gkyz6FjyRGJkaqXnRePn9xYeSOXHC1knArscf3vuSviTiIDafOpwDj+OkzBu/95vYMdGxoNZ4bk4PJVeRahlOQ48C/OBZDb7jLw/DNT35GjhY8/tsfPSjQeeNdH45vfOJ/AAC//bn3YN9xVzwovt7zsD/EZ//2H+aOxXgvPPH2mIzHWFlbxWNP/1DvOP/83Ffj1BedDAC495uej2vd4VZ4ze0egDM/+yUAwKNP+zusXW4fAF+R5aqqLjT6znEVhxcZcS7aRmLiLUe6IOGFh9vC4MZnHw1e9A5e7KvmxOVkaS1k+Pv6+vrcNgznO27hFd+dfLGobG0V0LzB57mdzsux7ezuYo3PK2AEGKjeWslb+VQbs84d0DmeHa9935087C+tvknbo8TISx8jOZeXUrC6uorRaIRSytzzJSHf3DyowO/eBfjCJ4FXfhK48vfPDz46ANznB4BjrgK85r8X5r7E6FE/CXztc8DJnwOOPu7S46NFj74d8NXPdp/fdBqw73LAZALc+4Tu2NFXAk7WdYjQi38TOOVvpp8/AlzzusADrgOce1Z37O3fAkrbjxIjEyOV177vO4mRO/IEtF4luitOrcZHkmSn42oEOxg7HVcUHDDEL66zE8U47pkXnlMdQ/uw0gHMGZkVz+OpQ43H4wWjcRvmi/nm/wdzAbSM7vO2l2DfccfgwjPOxkP//a+wfv6FuPCM7x70eL/4Ny+ejVcnB79QO3Du+bjwjLNx7zc9H4/64vvxqC++H3uPPRovOOF2OP+bZ+CRn38vzvv6d/DCE2/fHOPf/vRN+MjTXobbPOFBuOZP3Rxv/tnfxukf/z+46LvfwwM+9BrUjQlQK5533K3nklEkNK6qcrDrloTxeNzcQx3jsd+PRqPZRVb0izhRP48Y4vFWV1ftbXldnDG/GxsbWF1dne075gULJ4vwU6D/hwz53MrKClZXVy0I9SVolbPl4/HgrW7j4Dm0SqgPkfM5BlAFU+3DPGvFNI5p7mqBH9sj/vMxbuPyIX9n3oIHzT/cPj7rb9e4hXz4i/KdtH1KjLzkMTK2b0U/5pH/x+fWAnoWx+efDZxzRneBoDRcA974ReDlH1s8d0nSed/teKzt50guNXr8HYH/+mfghf+4eZE1HgH3OBY47xzg1Z8Cvvo54EE3bI/xit8F3vsq4NEvA078IeARNwO+9RXge2d2/c87B5hU4GePB4DEyMTIOd4ubYzc8S2FrARmUKsGDCQsQAjH59m5nKOxMp2BWjyFwvQ491HS+V3gsrMqwMY5xwt/1/3z2hYV+LMb/jzO/NyX8PhvfRQvvdZdMLpwPwYrA5x09sfwnKNvDgA47tpXx2995l0AgM+980P46198LADg9s94JG570oMBAG+6+2/i//3dv2Iy6p4xWrv8UbN5n7nnJhjuWcNjTv8Q/ujKtwUAnHDjH8OdXvB4vO4OD8GNfu1n8bOvfjo+/LQ/xT8/59UAgF9+98tw7TvfBmVlU0/PO/ZWWL9gP55y4D8PauH2xrs+HCgFj/ri+7Hn6MvjMV/t7noNhl2yWD//wmbfjdEYG+sjrOxdw2B1iPH+A91FFoBX/PgvYDIa43nH3RonnX2q9ZVISLFQ0eDihY4uFmILQfSNO1kR0Jo8+TPHRoyvccTfNzY2sLa2Nrf9gF91GvNubGxgfX197qHY4C3GW1lZmbt1r4uU+K8+HaTtIqZ14RnHuEINdGDG52rtHmDWfOC2YcT5ucUS8cXxyXbWyryOq7I58Na2rW1KMdfq6uqcvaJd6EIXq3yR73hhmTT3MGC63KhAqu1Vl0nbo8TISxYjQ5+xhYrzpItdzauqoxnd79pAAfDuc4A9+4A77QXiqajjTgBe9GHggdeZ7/MLvwOs7QPe9Jz546/+FHCN63af73EscNEF3d2Z934PuNsV5tve4CeAF/1j9/ltLwX+/And5ye/Hrj9fYFH/yTw5f+Zzndi9//9FwCDFeDOe7vvx1wFeOkpwAN+bH7sez8a2Hs54I3P7r4/850dr5/5GPD6LwAnXGOTP6C7wHz/+dgWXXRhdyG47/JAKcD+af8LzwOGQ2DfFQBU4KKecdcPABvjTu+Dlc0xAeCXrglsjIC7Xg5477nAeB1AYmRi5CWLkSof047c4YqraiaXXPlqkZmOxacqk7cv8JiqwGjDDqpOwJU/ndsBiVsIcx/32V3hqmx65R3ntU/ccla++NjDP/U3uPxVj8fzr3RbHPjeeXjy+f+O8YERnnXU/8Lqvr149GkfxJmf/RJeedP74Msf/jf81b0ejRvc72fw0898FP7xiS/Gv7/8LXjbLz8BX3jvKXjgR0+evfWQ6ckX/AfWz78QL73mHfE7X/47TEZjTEZj1EntPo83cOqLTsYpT/9z3PnFJ+G6974j3nS3h+P0j316NsYfXeUncNE5HX+lFIwPrOPpKzfsvStFQgMAHvCR1+L4H74GXnLNO+HCs87BytoqnnP0LfCM1RtjZW0VJ539r8vHEnr4p9+O39v/Cew7/hj87pn/jNWjOlDSQOTEoz8eGfaIpB1VkfivleTwBQ5i9bf4zHNtqqMbY21tbXZxx7wEn+yPXNWJsfbs2TNLzrpQY59duNDH/MKGkw4nH+4TCZATv1soqV7iGIPNvGssvhqbZY+cxItTruZFX63Gsb2UL53fLfZiTLYJz+cWmAwabsHKuU15VDBjPTM/8d/dSWF5nD36QCRpOSVGXjoYqQspnivygctn7q7cjN7w+e7i4G6X7xb/H9wPvOd73d2a8Qg48VrAy07tPt/ibsBjXwG85Y+A4Spwt4d0x5/xDuCHfxz49esD3/4KcM8rA+efA7zvvG574t2PAf72O13bq1y9u3P2nx8Cnnov4AOvBf70McD9nwL8zEOBp/8y8G8f2NxeBwB//XXggxcBK0PgjmsASjfemV8HnnCnbrzgdzwC3vrC7gLm7r/Rfa8VGI+7z6jAva7a8TceAR+4sJMXAM7+NvC/V4CH3mRRT0p//E/AD16/u2D93pmbx//+wPRO1zHAFa8MvOELy8dSevu3gQ/s73T8vvO6/0BiZGLkZQojD/kOV6114eqTJ2fm1Mn0apavSkvZrDxwEETy1ESpY3P7ONZSPhtIKxXch43PDqBJncGI9aIOEvzxOOFMXBXRc92xihKBVCuedP6/Y2VtFU8dfQrPGN4IB849Hy+95p0xGA5RBoPZq2k/ffK7MFhZmR6n8QcF7kUcZTDV1aTOLn4WiHi81+ufi3u+7tkYrAznzp909qkYrHbHhnvW8NTxp91IC3Sv1z0HP/+G53aylulctbtr9aTzPo7BygqevnJDPO/YW+NJ5358S2MGTTYmeNl17oH9Z52D5x9/G0zGG3jq+NMLiVT9je3jqiphW73VHHe62A84mcR4WunmtjG/e5A8YkYTYfyPOOKkzwsd9jn1Xa0IcUxzLLCuok0kdZZLEyvPzbrQeGJ96yKsZSNOsrx4dPmKqbWYVH7dIpl1znmMz+k8mg9DlwrQyi8DrYIR51nNc2wHxz+TA6ikrVFi5KWDkbwVPP7zdi4XG+4ZtYVcXDZxd9p481jHyOb3UuZxs1AbHiP+R7/JRMaUOeO3VkrpPtfa8RETlAJMpls9339Bd4frnlfeHKMMgBvcFrjvE4BT3gZc+8bdHadWfL/9W8DPHte1ucNqd0Hz/guAY68K/MPY91Gqk+75t5UhcK+rbF503XFPN947zwJ+5mjg/j8MvOlLWxszaLLRXRSOR8BdjurkeNd3Z7ZOjOwoMfLix8g+OuQ7XOp8PLGrDinDYWTuz8pkRwhn4DfEqVHU2ZknHZONqMbR3y7RW6kcPK5awbJygAcfCsAR6FxdYKdQ+QBgtP+i2ffRhfG5YPWovVi7wuVw0jmn4qRzTsUDP/rabuw9a7jh/e8+O36TB98bwz1rKIMBxvsPdHufZ2NN57hgPwBg9ai9KKVguG8P6mSC8UUHOr2Op4GyOsTGgXW87VeegOdc4Rb4xn9sPjg83LcXzzv2Vjhw7vlYv2A/aq0YXXgRxvsvwjL62199Iv7vOz/c9ZtUDPftAQrwou+7PZ65dpPZVsLZ3anJBOsX7Mdo/4HZGCvDlY6/9RHqeIyVPWsoKwP85W3uj1/7x7/AvuOPwWO//uHZ9kSXTNl/2B68j5svrLh/K0E7O3MCAbpEzBeAOpfzeU3KSsr3ZDLB+vr6XGJXX48tkXFMnznT/c7sy67qpvzwMbcQ5H6cA/gCV23TqhgyCLFOterPnzXOWU4dy/XhnBay8Xe3kOTvMZ76Sh+/jtcWkLJsuugGYKu5SVujxMhLByP1XCy2XZzzwpC3kW3J3y+6ADhwQTAAHKDt7Rsb3R2rlWG3DS9o/aLuAmRtb3dxsOeo6VjTvnuP2mxbK3Bgf3eRtLpnOtZqt2VuPAJW17q7U0C3bRGl4+Eex3R3yu68t5v7rV9bLstwOtboIqBOL9gO7O/G++vTu22UKN289/mBTXnXl2M5nnBn4E57gK98tusXMu89Cqgk+57u5VWYbHS6Xd/E8pmsowPdRWno737XBv7qK51e3nXWrHliZGLkJY2RfVQOFUAHg0Hl266tJBgMqcL1ORYXCNFWjR394rgGj45Za7dNi/fs6tV4fO8zDPdjWVsLEq00sDwKZDqHVi6i7QP+6WS8/9HPxVlf+PKs/eO/dQoGwxWMD6zjRd93+9nxq97wR/GAD/0lPv/eU/COBzxpdvyOz38cbvKgn8df/+JjcdqH23eHVtbW8LhvfBgAcPZpp+NVN7vv3PmbPeKXMNy3Z/YWwF94ywtxrTvcEn9x6/vjrM+fhkd+/n348xvfG6ML96MMBnjs6R/CC0/8KVzuysfhtz777ua8APDu3/gDfPbtfz/7/rD/fBuu+APdG41ecMLturcUrq7icd/8CADgnNO+jlfe7D74/lveCL/ynpfP+v3Tc16FU1/4WgDAvd/8AvzQHW81O/cnP/Iz2P/dczodfvsUvPiY29gtOlolif+6mGA/CttzEonAdD+CGvuy+bkxYH5hEnOG329sbMwelOUEEwlkMplgPB7PnmVwsdnywxhr2Q/7cTy1QEzba3xz9V6TsC5MQ7ZWLAPz1TxNzK49x69uSwD8MyOsc32LpMob7fk788G5T0EqZG3lWB6DZVJdaRu3hcN95zw0Go0+UWulX4FN6qPEyEsHI6O/Xszx3QTWl7sLGd/Xn/kuTN7yR8DnP9k9d/WY2wPnnd3d+fn5E+aZutoPAo97JfAbP95dwBx1BeDnHgE86BnAS34TeOefdceGa8Cf/Atw9R/t+t3n6t3FSxkA7/gOcMG5wN2v2H2/wrHA9W4FPHuKl+/8M+Avn9p9ftwrgJ+89+b8D7kxcMbp3ee3fRO499Xm+TvhmsBJrwFe9STgXr8F/Ot7gB+8fneNPj/nAAAgAElEQVRxc9/HAy98WHfXq4+Gq93Y55zRvSHwmtcDXvrR/j4A8Mif2HxL4Vu+3D3PNZl0d7wA4IpXAl73ue7zZz4GPOnuwE/dB3jMJpbjRQ8HPjp9S+FLT9ncRgl0so5HHX9v+AIuf5+rJUYmRi6MwTKprrTNdjFyNBphMplY4+7IBVfcpufbkcsSZTg5J0LnDEEKOAFIrkLIfZzx+4LB9Ws5PRuUF9JO7lZw9QFg9GNwjeMP+KeTceLNb9CUPengqE4meNEVb71wCzxsrvbSBMTn2W9qrQsLJ/4cfblS4xZXGgM8J28viGowvwExxnXPLcTcLtHowk4BqG/Bw+NrbLjFPCd93ubjfm9IF3Q6j26fCBu1FlbMF+cYNw4DDFfFnewuJ2hOcUDD8R524MWz8q5tXc5xuufjbI+WjvKCa3uUGHnpYCTrMOaO/4PBYLbAjpzAdwijX/A5ftZ7UG92p6Ye52g8Aj7xD8AT7wbc7M7AU94IHH189wzUnzwK+LvXA088Gfjp+3Z3rBzV2t0J+vXrAVe9RvdM2PFX822TPO0/H5f7xRMSI5EYqfpQm7VkdHywPZyO+i64duQthbxojP/qNC0nisQXxuPjqiB1nvgMLF5tK2DE/HycK1zO4KrgmNMBpZORP3MCD4fWB4X77pT0gWbSztJVrnftma1miaVOMCjTKnGtGJTNtxCiAIMy9ddBQUFZCPy4c8UJL3xEK0vqE+xHkzpBQSMJDgpWBt1ioqJ2LzaRylQkR+WPFyz8w6W1Vow3NjCY8nTu17+N/Wef2wQO5YlJ5XYJy+WMlu+7i1smfV4u+mheCdKc0lqIak5iO7nkr3O1Fre8SGVyIMTnXD7k8ePzVqqqPI+OnXTwlBh5yWMk51e9eOXx+MJKj890up0QOOubwJ89vnv74He+Brz1RcBDngV88GTg/36iO/7m5wE/dCPg2jfyY2yMgD/4xc03GL70t4Cnv30bTCRhMEC9+nWwUTD7aZo6GGCwMr3QqsAo7Fym/jKZ+sFQMBKLGFn2n4+VM0+3ccR9EyMTI2d9Ws6wVRoMBlXf6BFMuwVkEF8V8+KODcaLw+jDwBVzRdWKBXeAInwv8Bf9HVDEcbcY5rsh6oiuahZzaXVEnYJBlXkBgF895bU48WbX37KdkpJ2gt730D/Af73hPXPHeFETlTGuvmlctIDEVQwnk8ncq255jL7FpfKnAMCLUE20nDcYUBwpzyqzyyl94Ojm1IWggpzKrxfUqgfmk/nj3OaAJL6vr6/nHa5tUGLkpYORcWx1dXXueRx+SyEvjnXxxf9Hz3o36k23eIcraVfQymdOxeWedJeZ77DPuIuwxMjdgZHTXHPx3eHSyd2tfb2qBDDnePxbGfE/3urG/dS5XcKNMVvAwAHRdxXvHCL4CPnYEHzbmm8/Rxv9/Q+9Da3EfXicWmu35QDA1/7pk7MXWNRagVIAdq75AUPAze/T9rP/0/OhlTrrVzZH47Y8Hs8TctI4c61Y7mn7QnOXUmZzzyUe4r2GLcy4hfQx49yA8pyOws4kbYyjfsI24b84x74SfLLtZvyoLoXcmZlOSc/MP0pBnavOltmcM3kGgzk+5uxDOov2x1zr+3HsD/3AgnxM4/F4dp6TmHsgHmi/En/GN7CQA4DFN+a5+FawcBTxFf00JpUiJykfDtgU/JivSPKaEzmXuHymIMRjhY7duE4velx5YJm2U/VL8pQYecljZPSLu4Mhc8QR34Vjft3CdSb3f/8LcNEF3S4DAreKOpczof1KAQNL1346QIk8CzCYRZ8abyGcNmT9TJWwOVf4VPTD9O4OscuY0eEny1JnmGNjXjGHxxHcQFmOkQXApG7yWcE4OdUT48KmyrdGzC+NVwgjQX7D/QrFTPBTClDX9mFyg9vOYoRjS3eUKCVG7m6M3LEfPlYm4nvftgcmFZIV55yMj2uCVmOp0/WNp+f1v7bVQGi1D5nUoZSHOO4CZa7f9Px7H/w0nHv6t2bBz0Go/Lgr/9CZ6jv++JfWNUA1iPiNO3Gc23LyiHPj8XjuHP8P0t+aiX5ha30rYFBLXk3+bHP11/iubxiK39WJ//GbMLrAGI1Gs6S1sbGB8Xgce3wxGo3m5HOLL60kqc71v1bRgm9+cLbWzR9IbCX3eHA+fOBWT3wwbvvUhy3w57ZGsa7Zn/i8JlcGLpd8NWeoH8WcurBTe/SBF59XvreSBzTZa5wsGy/8nhedC4s20X+MqZV5x7/LdS3AU0BrLSCStk6JkZcwRlJbPcb8sC3i4mwBM9At8lf/+LdRvvrZxMhdjJH1qtfAhX/xP7M1WOBX6yInMTIxMmjH7nDpd2XOKZaFic9qdCbtG3OxA22FPx1Tx2q1c7ceOeidTKoTllPBzRmU53KybEw27JW9ytKqZqp86nQu0FtjBGnFR3WnCdLpR28981YTro5GQudqSSTU6BdbSOI4v1aYj/Nc8Tl+TV6BbjAYzN52xeDPeguf5Ip0gMh4PMb+/fsxGo1m33lrC48Rn4On0E3rHOtWx4sx40FWtVkrTnlMPs52iO8u6bXAxfmSbkFQvSqQsay8NUllXpagQ/9sN/6x2lZ8t2JCc5uO0eJJz4e/O13GYkFjPeSP52B4XAVtltvlXrftJWl7lBh5yWNktNOLkZYsfRhZNzvNzRFzJ0buIoyU2HKxzf8TI3cXRvbRxXKHiyd2Dsl91PmY8RY4BIVi1CBc5eHxXSWf+2kyZzl4Tie36sA5OhtWq5NuDnYapslkAkgbdSgelwGHgcwlcAY21ic7GlfNmAcXPEEc3PGKVuWTK0QKjlyNUaCJClp8Xl1dnf0FGESCUdCIublKxy+N4HMO0JnnSNKsD04GXOmbTCbYu3cvNjY2sL6+PvuL6t54PF6o6qk/sW7DfqxTB2jqWwr0/Nr5ufgom3bVbQLO9/g8J/q+pK7xy/pX+fk7Hw8bK+gwD6wz5tXFvdtW5RawrcUVgyL34zld3mA/by3eeIGiOlD96NwMumxvJyfzmnTwlBh5yWIkxwHnBeUN2BpGAkCdTOa2mSdG7j6MHMf2PYOF8VnHSIzcPRjZh5OHfMHVSrytZNxyJmA++YewGiAqrPZ3iuvrrw4Xx121qzXmVs7Fd95PznOx7ByAwduiTqe8TsdRO6jhNTkzfzwn667PuZwdWjrg4GBn1yqf40WDMcaJpDwcDrG2tjYDgeFwOAOSqNSp7By4PGeQJlFd8PA4WglheXWRxMknqn+1VuzZswfj8XgGJgcOHMBFF12E8Xg8AxV3EdRKvsqjsysvBpRYpoW94QbEggLMNcnrZ80T6kusJ+7L59h3NUZ1XPXPvphmuzkA1Rjg77qNtyWjjqO8qk9xe/Urp+dYDOmiU4HEyap6cMCXtD1KjLx0MNLF4UFjJLq7XPGGu8TI3YuRMULo3v12E8uUGLm7MLKPDvmCyzlnS/F6LL5zkER/l6x4LicoAwKAWSWFgaElgzqcS8hA5zD84CP/V7lbhnLHNHgcMOvn6UA2mMJh9IFqptCZJvkYj5OVc0aWhc+3FgmTyQRra2uodfNB53jgk/UebcJ2tdbZ76UAwNraGvbu3TsDDd4fHtW7GM8Bido3vjMvITNXg9hPuPrB8jJ4sT9wBVV9kaso4/EYa2tr2Ldv31xVL6p86nsaO0oKnCpH6+6W+m6RC3wdr7V4dD6nutFzWrVvJV533gFV31yOZyX2F/1hR+VN5eVFkIsR9RXWvY6l+a41Zl/FUUl51hymeWyZrpIWKTHy0sHI1mLoYDASkS/L/EsMEiN3H0YqaRwG74mRiZFKO7KlsHUb3jlny5lYAeGsC9uaptRKolpl0e9OOY6vPid0t3xZBlW4q0C25OHvIbs62+bnKW/El/IzGAwwGo0WHD7OtfTpbimHPbmaFlseYk4G2jgW7UopWF1dnemn70/1FmAR1bn/z9677MiRbed/KyIyM/JaVSySre5zWgLOHwLOQIDHHhnwzC/iN/AzeOAX8DP4KSwIguCRJgY00cCQjk73IdkkqyrvGTcPMn87v1i1M3mp+pOCMjZAVmZkxL6sy/ftWHvFDsgEghkMBqGPCuYxwIvJVfPb9cFelYPKinaUdFQ33tG9PakekKsff57nlud5yF/fbrdBvrvd7lGkNzYubcPbR7/fb91o8+AygBkb16Gy1jFd0tfjKtNTPnwKyDzh6fn+M9+xScWiU+TBJEttQqOrKkffL69nrS/WV+3fKTA/df4psojVc8rHP0XmMZn667ytdeXLS8eR354j+U393PfnczmyVW9ddxx5wRyZSP/O3axzrOPIy+LIc+XZUgpjjSvweGDmdwzPR3NigzqnIK1Dz9E69J0m+jtvmUdgSmTeqGJExnX+WEw+MXLQ4knKO9axT+07aiVhxqpLot4wuZZ+9Pv9sDOQ6g35aQRUHdb3lYIMiUpp/zXdAV3oUjN5/BAFEbo0TcNnUiSwmSRJQvoBOjwFcqoH1ZuXoY9aqk3H9KLApO35na88KSs5+f5kWWaDwSD8q+valsulVVXV2t3Jy1D7SFu8fFl9IE33KSW73S6MERlxvtplYu3JDv31qRc6hpgv6XmeSLnGE6DKxstKdenBmKI3kjoujR7rWGPF+w91+pVBtT3VQwwfYjtwKdF52Xob5HPs2thnPeeUHyRJ0tK/b68rn186jvw+HOl9+ikcSV+qujJz+N1x5OVxpMorduPRceTx86Vx5Lly9oYrSZL/0cz+v6Zp3pw559F3BU41CG9MDFCV4CNu3mB0YKcIi+t832Jk5Jcn6Y8fm4/A+WijGkuMAGPGFVOOl5M6ke+TmVlV1Y8cwZPHKQdTY6Xf2jbH/Racsf77JW4/oQiEJUShqQk4Ju0SrYNM9JiSS5LsI1Fc5/uuQK0AqGTHX52Q+L4rCHmH9HJRGSip+ZcTan/os9qlRr8Yf9M0NhgMbLfb2Xa7DTs5qYy9HSnA8b2u65BWom2r3/j0J188IPr61Q78dTGb1Pa8vGO+q33T8fJZiVpJXovav/dp317Mh2ITulOfY5MFbUPTdfy1qr9TutDrdCLkcTiGVSprj4n++q4cS8eR/7k5UieyT+HIsE1h0/bDjiMvjyNDDUnyiCNPcUfHkZfDkefK2Ruupmn+n0/WcCjeyc6lCKjDxM7xxuaFJP1rXRszMq/MyBjPGpmOz0f91AAV0D2AekLxSlQQ8aCn12vfGlIKE2v1g2t8JNO3qbLRaCW6A/gUjGMy0jY9oass1D76/X4gDlIu6EuWZZbneesBX9IhYqRC/5G/JwTtg8pTd1qCmJRUfIREI80+KqJkiD2VZRnk5tNKtK3YpELHYnaMumkfkMtmswntbbfbENFTmWvUij7o81pqyzo+tSPszYON9pk6YwDqz9fCeWpPpyZ7moahdXofjmGR99kYUPqIr8cl/zk2Dp/+EpvU+r7HJmmxSZvHBF11UJmpbXlf8CRGfZ5MdTIZk3FXOo78z8yRKrencqTu0kr9HUdeJkdiMfzV1DvVe8eRl8mRsTRxyrOlFMYIwzuJDi6mSF+nBwF/jheeB3Ft1//1RKSfFRz4rEamCtQokR+/J0M1ao1uqfIxfv0tNpYgh8NfyI/rlaD43cvN7BjlKooijJHcZT9WvT4mN/+ZerUvnlQUkHnAV1MklFiQd5IkIY8c8Ne2T5EB0S+KB1O1Of4qmCFbX7f+rpE66tEXKvK7gn6SJOHlln4sKie1W+y93+/bcDi03W5n/X7f1uu1bbfb8AJJjUwp6HjiV5uM2W1TtwFNb9K1xCZdOh4PtnqeEldsguiJwfdT7c7r0PsSckY32IxGTLW+c5NYf51OQlWmMZLXlCFfL3I4F12kbV9HrN2Tk0kp6geKQz7lpyufXzqO/D4c6SeGT+HIMtmvbHQc2XGk15/HZh1nx5GXx5HnyrPsUqgAqMe1c954VFAxwPRkFBvIKUHobyqUmJHE2vHOY2YtYXINwObvaGMO5vvNMXUsb7RmR6N/JN/wt+18gKaC06l3VXC+LnWHZXORjfbPGzJ/1dh1jPwFENnGVesgEpemaXjYN0mSQCyQCSDtl9U9OXq5+/Fqf/V6H2Wj75wTs3FAhPM04qbkhg6wQdriPJV/XdfBLmKRnX6/b9vtNti2RjTJ++dB4liftY+0h3zLsgxRUZWZJ7lYBPnUBFLH4idm/ruPGGm0lH6ScuP91tuq9lEBMjaZo03v+zoZ8aSMbLQujukzL8jLg3iMvPwkUq/T9mNj1uJtRscSIxg/YVLMZPLmdduVzysdR34fjqRe7zdfw5F6vNdx5EVz5I5NT5ydabob9qh1dhzZceSz7FKog9FB+KKK1AiTjxy0wK3Xe6QYHIK2fITAK0MBWgWkRZ2E8cQISEss2kBd5yKJarD6EKs3Vj2XurzhNPb4+S1kkCRJ2BHJj1NliMECSJpq4Z1KyS32uxqf9kF3ZlJA1V2U+v2+5Xneehkjv9NX6oQoFeyxJQUg/U3tAfvzwEZqgk5yPKl4W/Ljquv4W+W97hUUNULnbRMbiaUubDabcEyjnuzUtF6vA3kraDImCEWJis/6oGzdHKJDETtSeVBUvrEJm0bWdDKmelPC9+TlJ3dq31q/RjCxH48bPsoX8z3vl1qP70NZlq0oIyWWduSv12iyvyY2Tj7rOacI59TkXHWktkF/Yv3typeVjiO/D0d6WX8tR6o/dhx52Rw56Pdttx9QmyNlHKe4oeNIC23+V+XIc+VZbri0s+pMKlAFS47RWXVI/aupCF6QupsMwKKCi5GZGpD2W4u/VoFL+6a/Mx5+i0Xr9FpviLGxc84psvLjUuNh6RzZqiFpP31d/X4/OIKSsJ4DKQC43uk0VYFJAECs0Z/BYBBSIci1VjCkHj57wtQJBKkTuvyskSKVP/XGbEsBUKOAsUkF7TFmtW8f7dPxq21AEgCq/sY16FBlTT+9vHVSRopomqYhfYJjGnHkOurWh4RjPpQme9nou05839XONHIcszs/GfG/KV7EfMb7h5+c+omqXqdy8PXGIvjUo1G+U+f7SYz+ppjHudqGAvo5MI/hwKlJtp8MaJ9UT17u/nyv5658Xuk48ttz5Kl+fBVHHr72+32rDhjcceSFcqQbt7c56uw48jI58lx5tvdwqTAVeE8Z5CmANTsqDSWhlJjCY46ubavBeKHGhOuv1+/+GnXMWJ2+3574YtERvyz9qXSKxNrRPMBMo3Y+nUPBmP5otErBNkZaHFdi4R/1qFz4Xte1DYfDQCBJckyJIGqn7xPRes2OqRAxsodU9Dwf3WN8uvSrfyEvlQuy9xsHqF2qbmOyTZKklRYBYBCZRu9+sqVgqeTOWKqqssFgECZWROyQH//SNLXdbtfqY1EUrQgjctfILVvslmUZXnxc11XrHCW2GPCpLDxh6Hj0HD3m/f7UMZWP2rtiik4QtK5Yv3V8fPc+rn6ppOn15P1f/Vz9Se2DOpmEmFmUKPwENman6pfalhKpJ2W/6hHDgq58fuk48vtwZKyPX8ORlLIqLbGOI/l+qRxJGy2OlDZ13B1HdhxJeZZNM2hU96T3d386aAUDvSv3xuQHrsWDnT9Pz1eDARhUUHqdj1qpEflxa19jBun7Rv1ab6xvSi6e8Hwkom7a0R0AXsHJEw4gg764To1bI66qJ5W/1yUOoNcpUEIWXE/Ebjgc2mAwCISgETQfteG4Opg6BLqjHdWxkoMe1/HFJi96zAMF9hIDR3Xguq5bD1rTF10tUrvQfHYPHNoXbR+ZVlUVyIpjq9UqjGm73YY20Fee59GHPoMMmse7pSmA+YkN1zJeTxJ6jOPed2NEHcMHT1IeLM2OKQwesH0AgxKbKPrJrH6mXrVTtSlfvFw86TEOxSO/8qHj1f54n/HPQHjZKgbFJjO+f135stJx5PfjSO33UznSzKzYFZYe0s86jrxMjkyRjbM7+qG23HHk8fOlcKTHYS3PsmkGzkBURJ3Cg6IKAGfSiLkHMu+83mA94KoyY797IWk9McfRooas9avxeiDyBqEk4kkyFlnkuseRiQNgpcelfcZ/aux6TI1HDSh21855ALdGnMh9ps4YuKNP8s55Q/xgMGhF66jTE5Xvj4/CcR2FcVCnJz0FFJWJ6l2Jh3MVuCkaLdG+eT3mef6oXd++n1ghO6JrkLVGMZumCfnoPFRNPzQaN51ObTAYWFEUoU/kbldVFbbOVTmxS5QHQ7UJby/qB3rMTy79pE0jTCoT/d1PwnykSj8jK9Wn9sdP4mJjoz4Pzr5NtR/+klLhCesUqfrJipeBXhPDIbUbP2GFnOi3J2TfD2+HOt7uhuvLS8eR34cj9Tw//i/lyIbXsKTxVTTO6zjyvz5HmtiLPvsVs6WOI9v1XQJH+tVyLc/2DJd2Tg0FR8ZQPHH4NzXrwLX+mFJiNwcxA1QQ8HVr8XfyMRL032PA7Z0PQ1ZFesWpYQOE6oAYyFHeDZ1s9UdBkO+lROR8hIk6aS8WifLAzjtCYs6sTpTneWhzMBiEv8PhMNRD1E5JhH+ae+1l5mWu4+McTcugb7F8bgVfrVNlqPajelZyoP4kSWy73YbPfoKkcuW3FrkfZEdUTB8WVv9Q8FKSh1SwQ/xuOByGMWZZZtvt1na7XdCZkjM2E8afoIe2bDwp629qu/pZbUr1p+CrhEF9p4BPiUVt2k8itHiCocSIxU+QtS+ePJWoPG557IldG7Mv1aWOXfvpScrb2ylc1QmOb/tUe1358tJx5PfgyHj/voYjQ32WWNJx5EVzZGAamXDH0ks7juw40pdn26WQRj3Aa8djd+aAUiz3mOINVh2Lun1fEIYahILQOYNQ0Pft+j7Gfud4DLj1PDUW3x5/PYHQv+MKV/YoSqHG4eVC0SVuJT305OVMrrLKTfut0Rf6wXa1vDsEcOc7wKbvC/E68E7MeWmahveXUABvrldyaJqm1Y5GAFVGAG+/3xdZt6OuOmaVs+a9h/QD0a8SCHUSPfOrjHzWZxW8zfnIDXWp7NApZI2OVI5N0wQ/pE3Ko0mcPQYWiMj7HPJUgPUg6FdMvIz1XP4qyfOd9mPRbZURxBMDZP3ux+j177FDJ6/+Gu2/nh+zdb776KbqXcdDnd5fVI/at9gYqddH+bwMvE668vml48hvz5E6kdrtdk/myMbM0iy1RCZ3HUdeHkcadiI2G8N+35+OIzuOfLYbLgxDIxaqNB2ICt9Hr3QJNkYsMXCNGaqvy1+vDumVoX1RA9K3v3ti9ADsiYw2tM8qp5gs1RgfOdlhqFXV3omKB0LVEPnsyVejXSo/8qj5Lc9zMzNbrVahHX1w1sxCzjO7KdEHyAMgY5cl2tc+etl5clEApJ8+3QaA9vaDXTLOWP66AjL90JU/LeiN49i+viBW/5Efrf6BfSowUrf2U0FQ++rJiUmZtoOdEqVDJ3Vdh1z2PM9ts9m0bF7HXNd1eIYrFXv3kyvv39hbjERivh2zffQNlui4YgTj6/ZkRTux9n1RbIqRCG17fDlFiNq21umfl9FImvZf24xNCPw5qhetJxat89fw3f/mb8i78nml48jvwJGH4ndr/BqOLOChJLX0cIPUceTlcqSZhc1TOKYY3HHk5XLkObk9yw2XGsupu3FvKLHOmrVBg+OnjM8L0B/T+nXJ95QB+M8xZz1nPP6YjoFzFPh8e754cmqRZsP1j+WhhOflGTtXnVGjR1xfFEUASQU35Kp/aY9InW5jC/grIXh56U2gyoE2qFsJOLa9MtFEvRYS1IgcTuUjJb5Or2fVh0bFfB55zMn5rrLWdvQFmB6INVqpgED/NXqn8tVnR0hR2Ww2geB5AaROpmL+pfpS0vD91N9Vdj4C7H1HJ0Y68YtNxrQfugWv4o3Kx0e/PAh7kNXrz/mpH5/XtydbPvv+6G86eUAGp/LMPaZou9gDvuzx1BOeJ02Py135utJx5LfnSP9Zx/7FHFnvP5dVab267jgyUuelcCQtNfbYl9SPO460Vl0dRz7TLoUYUyyCRge1Y77DalAMWuvzdXqiwDBijqnn+/r0fB8Z8f0GmDyhsNStgKP9onjDUTLw7XnjVwJ8RAr2eOnVy0nHoX3Sh3m1b9qeAlpd163nCRSwAVR9CSP552mahpxp1bcSiBqv15vmlHMM+dC2khDt851jp5zap+poGxrB4btGjryDQiSnrkXuKj8/9pg98BdQAUCIQvpIIvW3cs9lxyXkSkSa39H3I/K0tg/HiEbHEItkqu7PXU/xESZt+5RNozcFZHSiMtF6qMM/3K51nvJrvp+aJMeKxy/VrScxre9UxE3r9LajKSWxumP90vM1kv05Y+vK49Jx5PflSK2PcXs5fYoj2TSjqY+rkh1HXiZH1sjmRB9iNttx5OVw5LnyLLsUeoPh+CmwjBmDFzAAYPY4fzOmTBWWAquCY6zvMcHG+uKVcAq8zxkDhq5AdC4a58s5I/DjiRG1gpXKTOunXxohIxKVZZkVRfEoQgRRaISu1+uFlzZSH0TDWAFYjfx5olJSUPIws9bDrQoiSia+Xi9nrtXoiKZI+LaVIDTKqQSjNqD5555kYoBCWxpV9BMt1bMSh+8j1/vJXsxXNbedc1sRTTE7T6Te9vyk6JStxgA7Ri6eyHykNea//vM5IlI5x3DC/+YnPz7qp3qM9e/U+GL+7Y+pjGKRzpi8PL748zxm+QmSJ6qufFnpOPL7caTvy9dyZGJmKp2OIy+XI8NxZzexvx1HXh5HnuPJZ0sp1EY8SMY674sfbGyQMcONERFFI03njCrWF21HJ5+xtrXEDNw7mjeyGPh7Y1FZq4OmTr4eeLSvGg2MyUT7PJlMbD6fh9+oj2iPgrZ/yNd/b5qm9e4QNXCiT4Csj7j5NAz6SF3Ih3YgIG2DumLkq7I+1a7+jQElRKHApBOpNE0fveNKyUjB3vfLzFrypg7fH7UJBWnq9/3S8TJJ4BxvCxiRFmIAACAASURBVHVdh5Qa/1usaA5+DAxjAH4KqLQ/Hku831BiE0ElAD1+inhix9WnPJ55EtXjsYml1q02HZvs+Yix1uPHfYqMPiUnf40SJ8XbVVc+v3QceSzfiiM9RjyFI/24zTqOvHSO1KL+feoc317HkZfHkc+6LXwM/FSICkxe4LEtK/31p4Db/37qszqSjxxoW6eOK0H6h/jUQWL9568uP6qivWJj5BxTvjfsmOGeIzJ+j0VIe72e7Xa7cG1RFCE6Z2ZhuV3TFSARfd8FY/AAppEs9OLJhnYUFLxuvONpDnesTi8HwJy2fFTKT45ielY5+2Vvs+PuTPqCPcgvZqM6fm8Pqj/65glUU1xC7vmhXSZZmr9PXUo6/uF3bfuUvcYmSnqOt8VTIEtdyEcji57Q1Gb95DPm0zF/V6KIyfkUwehvXo++nAJ0D/gqo1Oy9sfPYZ/XifedU330Y4jJpyufVzqO/D4cqWN4CkfG2u048jI5sj42eHKDmI4jL5cjz20q9Sw3XNrRc4CsHVRQiAEn15xSrv9dAUmBnfM+tVTor+U8vaP20TBfPmcy4kHCjyXWz7jCGy58JEcPhp8CJe8IOK9Z26EVwHjYN0mSELHL89yGw2ErV52i4AWAe3A+lTahjqX53HVdt9pQIlBy8nV42+A756VpGohTx+xtWGXo88wV5DX1gXr0AVbVA/Wfszev0xhZel/zhOmjjSoLtSfkQUqhnwT5v97OToG6jl3BEx/QiUbsulOkoPJRMNWot/ZRJyF6/BRQ+wmNH7dvx197yhdV536MMVlrHbEJViyCG5uQesLyY4pNJLrydaXjyG/NkY8nV1/LkfCsJcd+dBx5oRzp7Mzzhx7vOLLjSC3PsmmGVy7CxDAoKij/ux/UqTYoXqAK+vzuI3TeYPz3T6Vr+Hb1blaP6+49MbL0MgOMABit7zRBJeF/ztF3XOi4PDCpTGPtJEli6/Xarq6urGmaEMFDr1mWWZ7nNplMwvs4+v2+5XkeXuSYJEkgG/Sjy/YakVMiV2D0cjVrR37Rn0ahYhMFrlEb8Y7p/0JSHqBpL9afpmk/VHoOaHX83kf8A6tKBFo0qhsDVM051z4o0cZs3EeFkyQ5bpphj/3CT160eJ/09qZ9ixGQYopixanIX8x/ud4f9zKJEZSv2/uzH4vqJkYgvk6VnZ4fk5OXl++nv0bHrPLS6LKWGNHG7LcrX1Y6jvw+HOnxgna/liO1dBx5uRxJLY3Uo9fGbLfjyI4jzZ45pdDssXB8J72D651nDMxjbZwCwNh5KEgf8ozV6RXpHcaPBTBQElBQw+A9mOsydkx5p4qOr2masGtSXR8Nh774cezPe7z7VMwwubau69Zb7rmGyB3AnOe5DQaD8I+cdH+eyt1HWJFjTH/eWZCh5r5rFM8DZMypvY1iG7RHHTpRUNnEiE4nD/64rzcWbSHy6FNyPDCoDTE+P6lR+/LkpN95jmC327VsS/WkYzAza+rHDz57O/UPF2vU6BRgqo2o78SKJ2uVUawd5B5bFYhNLGLtcszbiteX2vE5/46RsNpVDHv8uPQ6f55fidAo/Lni6z23ktKVzy8dR357jtT6kPvXcmTi2ug4suPIxE5wZMS/vJ12HPlflyPPXf8suxTGDEqXZs3iZIAj00nAyud+c64Cha+La2JkFVOG7wffvWH54iNlMaPxbauzaBRGyc1HZ7yR6+fWeGRb+LIsrdfrtQhFndRHSmMRQD5DTHVdW57nliT7/HR2PVIHJk9dyYStbrUPSn5m7QdVY85MX/wyt4K91oujA8xqN3Vdt873Tu/TN9CxP6ZjUN3QBu3qBANSgPSa5vgOEuqKrSopKHu5MZ4kOT5U7VNJvPMjMw9WkD999uTh6/ETIQUqve7chOnUJE314Z9Z8bbvgwbet/0YfL/U9nzxmKFtavF2qe3pd+0Px/RZhViU1ROv76/qWn3E+5SmPiHbmL/FZOf70ZUvLx1Hfh+O9P7/FI4Mqxn7nMKOIy+YIz1b+DpiNt9x5L5cOkc+2wqXF5wng1Pn0Xn9zRuWmT0ClBjw81lBk6ICVyVoH7witc4YyajReIP0TqyK9O3GDDdGJK2xHz6nYhwqPyXqmOOfkrPZHkQnk0mr71m2f/kfueiDwcBGo1FIkdAIHufQjkaPPGjGJiIqC6JTfnKi50IeGh1TkDez1m/kzut7JxTAff98JMTrXX+jXk9WHshjdqqTAiVrJT8lTa2j3++3CNODsY5R5ai7lJ0CGL0uRpyceyod5VNg7MlSf/O+orbAtfzu++T1hhz89R609RjXaPH1aXuxSWzMzmN1KXb6cWuhbiUMfw56jeEncvVErnLXz94muvLlpePIb8+ROnFUn9HfP5cjfek4st2/S+LINHls397+abfjyI4jtTzLDZcKG8Cg83qHrefw3ed3++i6CjfmxDpgPebz1fkeU9I5Yfrj3kEUnD2xeKfyY/PFAwjH/PF9HY/7TrSN64jmER1Qg4+1S93k1/PZzEJkjjzz0Whkw+HQBoOBjcdjy/M8kIw+mEsfkD1b5tIP3hiPfgF0dQL+KiEwBv1ddaE68//UGb3jxyJRftekmL0rQPnJBfUhV28zlKIowvmqR4qCpZdPzB/8JCIWIVS/ybLMdrvdI5BJksQkp6ZlL2matvRLORWB17b1uLdLiFdJVEGfemKgj5z8BE5J3o8vNmHT/p2ajGrfz6UjxOpXDKQO/CXWfkx2KoMYOcReXBzzfz/pjJXYDUJXPq90HPntOTLmD1/LkaF920+4O468XI7cunP0L587jrxcjjx3zrPccMXAP9ZpL1AVkApDz1EBqSOfG5Qul7NsHRM05/goTiz6lyTt/FMlBr1jj8nFF0+QumLAbwr+MTmQ5FCLoccIHAJYrVahXh27gqqOR5f1NWIHePAQMORByoTmjUMK1IcuID21B3VECJB/fhKhION1QJ1KaEro3iHUEX1dClIeeDwQIFvGqITiU4IU3PS49lWfpyCax5iwQ03D8AAHeejYYyDEhCHLskBmKhc/+UnEfnlZpT+naRrr9/tWFEXUJ2Lg5mXMbx4ztO963BOJnufTLvxkjetbL7V07atteXtCN34C6K+N2V0MQzzp0T8dd2zSeiqSGMNiX3Ryovjj5fYpwulKvHQc+e050vvjUziSPDLq6DjygjkSu7LHwQ9vvx1HXh5HnivPvmmGfldD8AM/NzhvVP54DFhV2doO5+lv3kB95CBmwNTnDc5HcnxbnOPrMzuS3na7DQrM87xl4D5qoM5rZpalacuRAHAfkVIy9jLTY/xbLpc2m81CtE3JAcLQuqmHc/h9NBoF4Oz3+wEYiYhpv0hj0PpUtirjmH4hITOLRitjEwrVOf9ikTWKgr5eqxMDioKYRs/0XHXaU+ktFAVJ+kk93CxpXxiDTiL0en6PRbRV7/sTj9fGcsf1s05oPMnrOQpgsUmj9k1l5CO3vg+0i6/GCM2TkX+HiSeB2KRD6/P1xnQZsyc9R2Xg7cmPWXXs6+H7Kdw5V7Rej69fWldXjqXjyPY534IjY/jytRzJCn9THyfvHUdeKEfShj3mLpV3x5Ht+ny9l8iRz/YeLrPHd6R6N+mPafHCPFU356qzK1CogerdMMfVMNWYYmTj+81nfUhT+3/OobyRNs3xre9ExDabzaNlUurWXFPdTlf7forkttutDQaDlgzVUdM0tel0akVRhAhfmqY2HA5tOBy2ABqiUIfJ87wVKSPy4AnazMKb2mlXZaHRP9oiokQEsWmakPIWs7HdbhcijQoESnScS/3IUPuqETgvU42KKKl5EinLMkwM/FbGfktb9Ir80JO+VBNyVjtTgNFJhD+P4qOICu6etIi4BvJMkUPSsgXthwdelVUMCzhPH9RWX9U6vdx1TEoc9EcnVDFiiIGjnyj4yZhO6ugbv6v+1f5VxhT1Q188Keqx2CRIj8cwTicKPprtxxqbGFP8eLrydaXjSGud+9+TI9XXzexJHEmp6sr6B5l1HHmZHKk+0OJIZ5cdR3Yc6cuz3nB5B/a/xb574XmQ1siDrwMBeLLgXH7HYPV4zKgUPGLC1j6ZPU730Bx4DEoVyAOz2+3WzPago+AbIyTqpZ2Q9xsM+die2dGQR6NR2M5UncQ7qdmecHq9nr148cKqqrLtdmvX19eWpmmI3uk7NzQ3XYEb4lHZeQLTZ4SQDwTAP16miP407171i4EzdgVVZKHHkQ1yoI8aiVL5+5sQ72CxiJ0CqYK2Tl4Gg0GI5imIq9ywe0iG+pRUFIypi89+LIxZyaosSyuKopXWoPJV0PH1aVGgiQGkykrPUTJELzE/0Ikc/kD095R+lEBi/h0bi8euU0TqCUWxJIZhftxKtNio5pH79um71ql/tZ1YdN/3Xc/Xnet0khebCJ8is658fuk48hty5KHgR0/hSKR6c3Njyf2040jrONIcR3qc19Jx5OVw5DmefLYbLm1EheePnxK0ClKdQUEpZoj6u48QeAJQo/S708TGoIrQO3cFKwU5lKFbkKZp2iKR7XYb+kOkajQaWa/Xs/V6bWVZ2mg0ejRergnfD/0a5INWVE3Hk2WZvXjxwlarla3X65aB0Feum0wmlqapPTw8hC1ukRn90eiXgh7RN823JlIXczaAj/SIXq9nw+HQkuSYigHI+7fN0z5Aq/LxEwpPEoxBJwTIiwipytw7sIKmymK320UBEvDWZXvtEzn76tgApu7CpP3VPGhkjowgErVLBfmyLIOOfP5xkiQ2HA5ts9m0nu/wK6pN086jRq+Qnvov49MJBjKj/2pnalPIhAdkfcnzPHxWQPYRJiUnH0nUiZnHmVPE5nWI7PirY1RAVxnqQ/raT61DccqTh8dDJSePZb7/WnQizjXaVz/+c0TSlU+XjiO/LUfSpzTdp5M9hSMpg/7ANoe0xo4jL5MjV27MuqJqdtz9ruPIY+k48jCus79+ZjlFGAxOB+qJxndY69PlXX8edcaASs/RPqlRcTwGkHqtlnP1aFtmFlINAJP5fB4AljFSF2kHaZraarUKoKI53I/6cvjLA7n0BQfNsizkvUNmEAppCEyuX758aZvNxtbrtU2nUzPbO9p4PG6NiX4rMHAMcoQEFBBj5KUPD/u8dz+58G3q74w9lsrhZadEoOc0TdPaqjZGGFpfbFLideSjaCovBfQYyVAfNkA/dOw+WuZTBLRO9RXsgjQUnQCgL/0Xop7JgWAlOqoPedOOL/4YcvTRWPqtxKG6YjwKgE3Tzl9XGSrpcr6fEKrPet9WWeskxU+I/bWeMH2dagN6Xuwv+kK//KaErrrTnbC8j6oeFJdUB1yL/Gg3pseufFnpOPLbcyTyzPO8tVLyNRyZpqlVZjZfzO3msB18x5GXy5GHhoJcdZxsP2/WcWTs2kvmyGdPKaR4wI0dV4VrZIR6GKwHIk9S1KuftQ2KpjNonaqoT92hal16vjeCoijC28npFw/EqrzSNLX7+3vL89yqqrLJZGJ1vd/6EsDd7XatyEzTNGGFq9/r2Ww2a+Whl2UZ6iFqMxwOw2d1JracffHiRThnOByG6BiyIrrGOFRf9M2nfmC0+kJdM2vt2kR+O3n6TdOEyAzL+TiGj75QJ06ETJUkFEg0+qkApGDgAVj16a/1kbpTgKRy12VxTXNQm0BvOja+a1SOf17GGplDZ/zVfigw+5UsJWiVm26JjA6LomhFrgF+PyFQ4vKTN/7y4lC1UQ9w9A9dq561bi0qV+TgyVfb1Do8Lp0iCOpBJv5cPvuIpmKbPzfWDvLRyYR/fkH7qpMBJijoykeJfR3djdbzlo4jvx1H+hcNP4UjN72eFWY2m80sX+cdR0bs8VI4si+po6w0ohd4pePIjiNj5ck3XDiVEoMCNb9rp2Kd1u8qaH+XrefzWdv1d/V6DvUBclyjhtI0TWuZ0t+50z+N9HAt36uqCuCjqQSqWDV+jYgURWFZloWXKvJ7WZYhkpNlmaUHxxrkuY3HY5tOp7Zer22z2dh4PA511nUdgBoQJ2WDNpbLZSAyxpLneWunJVI7GINGCpIkaZGkWXsiwDj0gWfSJJRYYsaL/P3yvgKt6iIWDeYYfY1NAPy/wWDQcjpPTrEJj/8es0ONSAOOpDFwnY7Xg61vi/ErUDMm7YMHdx42VvnQN42gKbkeOhLsgfaUzOmrRnI5prrx4K/ypf/qe/q7XqNRTA/yMf9XH1bMUjl4Pccmuh7bfH904uvxKjYR5hy1/3MTXN9Xs2M0FV0PBoPwfIq3G9pRe4z5iL9B+NzJdleOpePI78OR/X7fxuNxqOspHEkZjUbhpq3jyMvkyFJuKtVPkIEGDzqOvDyOPFeefMOFgyBcFZRG2bSD2jFPMnqH7g1E2zA7Oh51K6AooWHUapTathoPv2l0DiNRo1HwUwfGCb0haOQQR9SicgL4AQhNpwA4ksMaV3mInHAuIAhZLBaL8J4RgIeoIFGlyWRiTbPPKdddl4bDYSAzQB+yQf4qN00RUBugTcYIMfnvGDQRBY2qqqHrMSUKrtfIFN8VFLATtRWveyYDRFI1ouSdWG0pSZKwa1IAaImYcq4HLe2vjisma/SmfsAxjeopICowA6R5nofI2znQ09I7XMf5XBsbp58M+r6of2ibTA4UAxR8Fdz1uPdl1YG3E8hOwV/twsxak4jYzYZOCHR8flzoSNvBzrxt67WKZ9QRa8fbuUby9FkPoq4qP198ak5s3F35stJx5PfhSDOz9Xod2nsKR9LHsihtJKsUHUdeHkcyImxc/VNXkzqO7DjSl2d7hgvBKlCfIgz/mQH6ZW41Rm+kXK8Gr8qNObvWG7t717tpPzYAW+96vXK1be2D76/KREENg4AYhsNhOJ9c8yDLw9CSg/PxEr3RaBSih4PBIEThFKTMjjmqWZaF9AjdXpZIwN/93d/Zzz//bEVR2Hq9tvV6HVbE9IWZRBfVuDXyR8QPfQ0GA0uSpBUVBEi8kXtn53ydICBTwEf1Th0edFTnPm3C25Ae807mr/EgcGoMCiKMB3kpOaufMCZsV5+70rF7svK2TyHKi/w9idHO0e6O0T+1I+rysvXkof2K+YCPbPlrNGLuiQq5aB9imKGTBp0UAraa507bOh50oIR6CnhV5tq29kn7Sd3eX7Uu7Ia+q54YE/Vrm36CoeNT3Z+y8a58fek48ttzpN4QPpUjg1zNOo68cI7041Uf0To7juw40pdnffExzq2DP3WuGrj+hmJ9FC5GUhQfCYgpL6asGLjEwIPiU0IUAPldIxiqfO1PXdfhYVu2ZKc+QHy73Yax93q94PRE1bL0GAEgT137NBwObbfb2WAweAR0fuxmx0iFpjSYmd3e3trPP/9sw+EwbIP78PBgv/zyi/3pT3+y9+/f22azCedr5Ix66T+kQr/RhdoCzul1ii1AYlzP7keQlDoLdalDaxQFmXhZeNtSQPHg7O1HnR156DVapwdY7Y+OQcEP2ejEA/noRARZqCxVjloHn73tqvzD9cnR/9RuPShhBwr21K0TBj/JYxLiwUx1C4GpfNTmaF8j+LEILalL9EEj2VpvDNhVTopJGin2duPtR4u3qVN697pR+TFWcvzxER2j2qpiV2zy4W1T2+7Kl5WOI78DRx4mW6QHPpUjzSysZHUc2batS+PIw0AfTcy1D+i248jL4shz5Vk3zaBxT8wx59PB40BeKDGC1/PUALXt2J2y9k//xiaWMSDSSJfWmyTthxJjkQf/GSXmeR5yhGNRx7qurSgK2263lud5SFlIkiSscKUiJxxEgcID5ykHT9M0vG9CU8b+8R//0f7pn/7Jrq6u7Obmxn7/+9/bH/7wB/vbv/1b++Mf/2j/9m//Zv/8z/9s79+/b8medki9APD1hi42MfB6ZVzq6EqajIuUBmSr0U2v/xjIKtkoaMTsxgOcBx1IW6NASoaehJSwPHB5AjKzVlqE+onKXknNA59GXbUfPs9fo4fBX9J2vjZ/Y6Dr/Vd9QwnNpxyYxf1Bz9FrNPVBz9Gceu2fRr00f/6UryIbjxEqG7UJ/irRezs/RfgeyNVfT2GrRjxjtqPnqa6V1P0kRvWnOvsUoXTl06XjyG/DkVmWhWufypFBtoeVsY4jL5cjE3dT8Ygjk44jOf8SOfJcebYVLgSmd9FavAN5A411mvpU4b5dPa7EpGDif9fjMdJB6D6CFCMes+PuJyhUDUwdS0FQ3wav/fUGpO0VRREcJ7QtOzEBuqQI4jTqUGbH5Xja1HxWTXvQt9Lvdjv7+PGj3d/f27/+67/a1dWV/fVf/7W9fPkyvC1eSQ07IIqgaS2n5KmA6SOsOgZy5JGFgqhf3vf2kiTJo4iF/qY68CCttqQ2r/32bamTe8flbyyK53P0lVw9Ceo5atMeMCjqU2qvmo+t4Kt2nMpn3m0TI1c/IWNiwQRJZUx92Ig+XI6NMmafMhNLDfARRR23nyAqbiErP2aVh9qrtkXdijvn0qd83XqeTqCQifqrgr72y5+ndXvCiZFHDE8VN3Ry1JUvKx1HfnuOBFvhxqdwJH+pu+PIy+XIWnTE+NCpn/B3HNlxpJZnffHxqU75z96x+F1zT/WOWyd/vj7fZuxc388YcMT6FQMU//I//cwb5+mHkpL2T3/Xfnrlq8zINdcd5MzMGnEOgLZpGttut48iSHqeB3b6rzrAqQECIhh1Xdt8Prd/+Zd/eSRP5AWJePmpAatzKaHoEj7pEXqsrusQqfPRQeSo0SEfdfA6V9ulaH0qN99f/T0G8vpdbU7/Me7YZMqfT0EX+psHCk9EdV0HWRIB9CkCqn99gDTNjs9wAUoqK3Si6Rr0PZZHrfJTW9dVNi8vvxOWBgp8nafk7nFKJ2ZKnip/LWq7jEXtgnMYR2ySEeuXEp0nZ7UHb3candU2Yr6lffff9Xra8+PyeNWVLysdR35bjvQ+8BSOpJ3+oN9a3eo48jI50myfZKQcCe6rnlRWHUdeBkeeK8/64uMYMMQIhUGoE6rR6vlqACpYflcD8NG2GHDSngfyU5MJDzTc5bP0T33eIRWA9KFJNcRerxfy01Ve/h/X4Uzkrh96GK5VR/TpAgoGvV7P8jwPgAJwhsm1fAfQ2VCDdgaDQWunKc03J+JH5EEjEErQCqTeydUuaJN/1EX/yX/X9zroX3VGnaxgN+q0qju1PY1I+gkHMtKJjLbF9fqbjySprSkQaL1KNpwTi86hJ4qSCr8hKyYg3g4YbyC5QxOawqq2ouPhO3aiulDfQa86sarrOuhS9aK2o+Skfqp2SF98HrvKX21NSU8ngYyL8ai+dPLiJ7p6nvZP8dFPnjlPx+ntCHl5+9PrFeO83Wsb6Fl/V1uk6Ng/RShdiZeOI78PR7JJAdd+LUeG0TXWwr2OIy+PIxPs5hBA0Bs3728dR14eR54rz7bC5SMVFC/EGGBnWRaWUP0Svnc0JQbflgKVCpeHbDEWv8zLtTGQaJr27ksc7/V6NplMArBj8DgJMlFQpW5A0Gy/5E+6AQ+3ah/0Rkg/0/c0S1sOpIahzqH18U+NlMijlydGxMPFPjqj9dEvXtTIP0hQ5a1OrKCogEc7mgqC7tAnevEPPapTeQdUB1bH94RHIULoJzUqK3VoxqSg539TgEfWZu13lCiIe9noOQpA1KFEzHX6LhOdwAwGg/ASUvRHn7BVlSuTGD+x0920+v2+rdfrAFia5qCTKSLECmoK4HrzxzMIHqwVeH2EEd/gPK9j3TpYf2f8XF9VVZCZ3nx4/dC24oCfJHnc8ViGTsAqj0u+v2pXHPeTJD+B0rZVXsjck4rW1ZWvKx1HfluOpE7146dw5EFYJydaHUdeBkcyIla4Yhyp11I6jrwMjjxXnj2lUJWsoOMFq0rlO5/5rp/1LhWBqIB8BEbb1N/I3dZIFvUBShptou5erxeADJDY7XbBgTabTWusvFPDExuEw/fZbGb9fj/spKTKph+0w00R4GZmllj8+Rs1LIoHd3RkZq2cYAU2BWWAmTGqoytREU1T4PHGTVEC9KkmnO91q2Sz2WxCNAo50LaXRyyCoXqJHde61Ca9/HBCdc6YYyq48bJOjWgRKeU4kw+iWaoj1TvAqKkl/FNfwIawYYiW9r0f+iiw7uxjZq13saD/sizDTRtgTX95p0lMHh4Y+QcRKUDHQFzlDVGpT/mJKfryabpqI9oXjcByPf1CNt7fkD3X0ieNrsVWjzQ9CYxClkpwKku1PSUmHTu/6e9+3H5Cq89zfIpQunK6dBz5bTkS+epN4ddypE6yVfYdR14eR1bYi/CCmYUbXP8OuY4jL4sjz5VnTSnksypZlaGd9ESDIaqAFCBVAB4Q1DAQtObK4jxme+Pm7dIedFG4dzSz4zKvkgEPbabpcRtaVgnMLOyYtFqtwnclP2QDqBDFpO8qEwXtXq9nKf1Ok0BsOBtyUwDiOiVglqV9rrNGxRRYcQQiKRg6x3TSrvrxEzUdP/rXqAv1aFSQOnUljgek1+u1bbfbkALAeHXsCvrqXLqqpxE1Pc/bn9q6n+B4kGNc6tRJkthwOAykkWVZePZgs9lY0+xfzjmZTGw+n9tqtQqpNR40tA+azqLEpn7ix6IPc2dZ1vIN7EkBB30AhkR0IYnxeBzARx9MBwjRn6Yx0F99R4najU5adrvdI6xAFoAudq+TLsUZHS+yY0yKYdgLNqKyRM7YGXXoZAlfpB21P/UH/xyA2pCOnXoUt3T8Kjcdu07YtI+Kz1q0H568lfy78vml48hvz5HIwm9w8TUcqaXjyMvmyJ6kACpHZllmo9Eo2HjHkZfJkf5cLc+6SyGDM2vfmXsH08iY/o4R0mEcjd8VFGLOrf1QYzGzkI+tkZKY42vkToFTt1Tlr+6IBAHhQDou3lauTq136kpUvOcDoKFO2glgwJhlQsxYe71ey+ExajVET/Aa+SBapFEN1RkGrOBFUbnruRp5hvP5yQAAIABJREFUoy1NG8FZAJRYRKWu60DWOvHjOoBPnUBtxU9ukI8u22+32xB9Qhbk2+u7TbB1BTxAUu2K8eo2swAqkcckSWw+n4cXZ+p7atI0tclkYrPZzMqytNVq1Yqg+ufwNG1ACYWJg+oEnerEQEld8/3ZCne73YU28RUiypCgJ3V0pECkUSH6zTnUrRFI5DkcDlsYgBx0wxeddGJrWqf2CZLzN5e6sqckpO36iY73L/rCJFHloeerLaFHjSwydsVC+qATVMUwrY9x0L4SEL8plp7yF/rZlS8vHUd+e46s6zp6s2T25RyZWHsS1XHk5XLkjvmW2JfKmz53HHmZHHmuPNsKl94ZIhyUqSSgg/JkgHI9wfOZpVraImpAaoIqGyOhLp1MqpPpcjH9Q/lE/NUI2eEIAOOzLifTN1IcMFIUTztEEc0sPDhJ38hP1UgW44MoKPSBdjQPmePIXIlMScBHAWKRbCVa2qqqKuhMJwMAi6ZAAKhch17UKVUPGDO501xDpIi21YmpXx1SbQ/5qKMBskrGRMrQqYKdAgjyihGrghN9xjYeHh7C70VR2GazCe9B45rhcGi///3vg21MJhO7urqysizt/v4+RKBpV4kUu1ZQ0Jtr9IkfrFarR5M8BUQLx/bHR6NRsCGuS9PUNpuNjcfjMFaibQrK6J1JGvKjbxpt11QCzkPPnKOTQ32WQImcCRV6QQbj8ThsP037noST5PjyVNrTvHa1A09amkqh4OwfXgcbsFeupW+eSPis7Wo/GA+RTPUPLT466QmTwm/dDdfXlY4jvz1HchOAfT+JIw+uoxPHjiMvkyPJLjJ309A0Teu9bx1HXiZH+uNanvUZrlhR8FSwUsH7O0lPJihCo2L8w8n1GSQ1PgABw1WjVkD2Y0F4GM1kMnlkyD7dwN8Fa0QFZWIUAMRwOGwRVFVVtl6vw3eVjY8OmJntDlFBooZ3d3eWZVmIcmDUeZ6H/uFo6IbziGDoGDTyoeNTQvJREY3QqRH7CIPKjL4o0CuQaX9wsO12a5vNxjabTdgS2JOnj4bweblc2mazsfv7e1sul5amqY1GIxuPx5bneYgiQiTYigKI6gdbUUBXO0Xeu93OlstlSKFRwmHyQfvYCqkUjDtNU7u6ugppIjpJUp9TnUDK2IK/sfJEgz/hewG4D/n/TKSIbmp6j0adlVC8/yNDbwd8JnLF5ClJkhBZpg/4jk5e1VfwNZ4viUUIPf7QL8UYJoXab7/rl+bEM16izWon6JpJnz53om00TdOajOrEXP8xbsVaZIjtaoSVa7Qt7aMfP7bEuLrydaXjyG/LkUySdrudDYfDJ3Fk6G+WPhpDx5GXyZHWHJ8NpP/YFLjeceTlceQpnDd7phsuGkaRasR0lO90iGiXFw4D1+s96CmxmFkLnCgKZpoL6wGZ6BF1cG6/3w8RIwyv3+8Hp6b+0Whk2+02jI0onbapjkR/AH/u7olEonwFDhSohs7n8Whkd+/ubL1eW7/ft/F43DIg2sPIaUuNT+WqUQ/aR7+a345xaaRIZV3XtW2323B9mqYhMqUPlapRewP2kwcmDByHeCBmQAtHZQwaMWJ8gNBoNLI8z225XNp8Pre//OUvwbmHw6GZmY1GowDy/NMHvj3hIzdSL9DtbrdrRcDMLNRH3waDQejbbrezxWJhWZbZZDIJYFMUha1WK5tMJnZ7e2t/+ctfAqihd78JCjIFiAFgbICXZ9NfxsZY+oN+qEv1DoFoZNPbqurVT2YUA6jfbD/JImKGHnq9/VbN/oWoCqZ+MqoTSvTAzmn4/na7DePk+RVNp0JnmkKjeKV90LQkiAIi84Snz5l4v1I8VfzgXL1OiVPJVH+H9FXGYKaXPQW/RJaxiXdXPq90HPntOVJT3bbb7ZM4Uid4HUdeNkeu+kcuhBN0ou/13nHkZXHkufLkGy6NCJ363QsHB+ez1mN2jLogWE11UIDVO28PvGYWQF2jdHoO0ZTxeNxSclEUwakxeKJlHnD1zhrHUaMGTLQPfhx83+12YecafTiSv610hPogqyxrOZg6r/aFKChF7/T5rsbOeFSHntQ4h/4DNrrETVmv1y1dmh23Otblaq8bxg4JUedoNAoOpLnf/kFljYxxTq/XCw+3TiaTACir1SpE8haLhb148cI+fPhgd3d3ttlsLEmOW6Zrn8Pqj+hfI1YACn2A8Jumsdls9miygQzRyXK5DHXr9r/v3r2zJElsOp3acrls+aIHf/xB7c1sD5Cr1cr6/X54SHm5XIZob4hGpe13yCgJKEl7nyeiVVVVAFeVmdnxoXYmANgFsuD5Dt09SvPytW3GSeoJ6Q7Uh9yxyevraxsMBq08dbVBjplZa3z6N8v2D0nrBI+CDQ6Hw2AXSsKaRsP5yEwx00dhfdRRJ+CKP/gEstNrFDuxFb5T0BN994TTlU+XjiO/D0fS9+Fw+CwcaWZWFqX1JZLeceTlcWQlr03huUduBHUnyI4jL5Mjz5Vne4ZLQZZOKBBqbqgORDusd/Qa/dJzOOaJA6UpgNEn+qUgqIRVFIWNRqNWHd5JsiwLu8soWI9GowD0AGuSJMFYtF1IRSN49FEVTnRFwZBrkyQJ0QYzs94BvDQaQV+IRnAM0FUZ6vlqvCxRIy+vB43KcY0Sri5Hx/QOIABQ2BBGixMQiUS2RLaqqgp9HI1GtlgsHkVpcW4loRjBDAYDW61WNhqNbDqdBnLu9Xr2008/2Xq9DmTy+vVre/v2rS0WCzOzkNe9Xq8DoPzhD3+wpmns3bt31u/37erqygaDQbA3ojaAAgTloyUKekp8RJiaprFff/3VfvrpJ7u6urL379+3InLYIXWVZRns9ePHjzadTlvECyBNJpOgG+9n1IvPKCjqBK+u69YD/ZATclX/U3vyPu8ncDqx0QmP2hg4QqqBkgNRSraoxifwA8aH3fi6+K4RTaLu2HtsckmUTycJ+h4RJX+N4NE2RX1bZaMRf9WXTqKxHerw2yhjJ0rSiqHdzdbXl44jvz1HIhPs/ikcqbsCI+eOIy+TIwOfNO3nIH3fOo7sONKXZ3+GC6FzzAO+Cm00GrUGh/I4FzDDUHWJWonK3zzojQPnUK9uw0ndGMR0Og2K5Px+vx9SIqqqstFoZPP53Mbjcegn48D4WbrWu+jxeBzGzZ38arWyq6urICOWhxkLhoTiWeYn79fMrKr257x48SLka19fX5uZhQiHgj594nqcSUmBPqphamoH49bxeeJQWaAPBVNkT0oFS8oYPjLMsv0Knjqu9gcnoj0ciogZoKaRGyVMiBs5syRO7jEOrw+Gz2az8NArD5SSI7/ZbOyPf/yjvX792v7+7/8+1Ec9jI+b5qZpWnpnzJpLXJalrdfrkBqDvADXP//5zyEKiK8AzJ6ssdOrq6sgC6JXu93ORqNRS0ecHyZztk9f0Bt/7EztgoibkqdO9nTiwT+uZyJH3TrB0joVX7AP+ooM0CHPp9Auk0P6MhgMgmwBfZ0QK4F4EqLPfvIKAWs0USOE1KVkzUQTn/U+pUTLNZzDOGM3yPgq9Xp/BifN2mkRGhVUXOzKl5eOI78tRy6XyyDrp3JkfZBlv3fc6a3jyMvkyJQb6qT9ugLS7PR5qo4jL48jOSdWzt5wJUnyP5nZ/9s0zcdPnBeEiQAU5PU8vSPUu0glAr88icAV5BGYKoKl0abZP0QJkCghKLDRPyUpVRIPHRK1U0IqiiIAro4d4WNsOIGmM1DHZrNpRTeIyCgJKKi0DO+wMXxVV8G5cHj6iSx5KFHBg6VPTW1QQMBo1DEZG/ngGsGjPY2gUDR6RnQOXWiO8GazsZubGxuNRrZcLkPf/C5bgEuvt38nB9EglpiJ2ul4iIBgE0Rt+b3X69nV1ZXN5/MwxvV6HWyTdhaLhZVlGVIJ7u/vW3ZdFIX98ssvdn19bUVR2PX1tS2Xy+DkSZLYZrNp2YYCKTbkozAQL3KAeLGH+/v7lt0BZuhQbU8jYLormPqqRl/b0dtjHjQTAAVUfUAYIlJSQv5+EqhtEcHC5zQyroBOe2pbukpA24o/vAuIcTMhASewY/qrslL5EjXWl7EqrlAfPq0EyVixYyaFOhGC3LV9xQAlcIrWrfqmfcVlPxnUqKHaDbL09tiVfek48j8vR+rYzexJHNnUxw0rwPKOIy+TI0kaU1+AD/W5uI4jO4705ewNV9M0/3D26sfnB8XrnbqSDY6jEQV9QFHP95EmwIx/SiwIOM/zsJTe7/dtMpm0AASQ4a5ac2N1+ZD8dDMLoD+ZTMzMbDabhaXxJEmCUWFsLMkCmDzcOBqNWsDIg45ZltnNzY3N5/NAhGYWHiBVg8PQY3fR9EXfXQEwakQAoDBrvzsEkFIDVqNXwMCwcDrVCf3DadXx0SvpIoD2YrEIKyeApC6rowvkQz951giwB6j9jaTmuhPFgXxJRyA3HT1ix6PRyNbrdZAtADYYDGy5XNp4PA7HSLf49ddfw4OnmgaELbJbFNvdAs5pmgY7QY9ElubzuU2n0xZh4AMKhvgeYKhL/ICaAgdki140Eq7pPWYWonu0RySa39nKt9frBV+kTnzePzBPQUa0qzrQ70RWsWMPlGYWJhuAvfpPnufheFHs36XDDmvYAv2hHYhAfU0f3FbC0xe74iv6YDB1a/oScsQmaE/JkXGqH+vEQ/EVW9ebZfV1H2TR37mG9hUTPIFdeuk48j8vR3JDqT7wtRzJewiRaceRl8uR+mY31QHy0zF0HHl5HHmuPHtKoZKCvynw3zFkPpu1X8CrwMdgYiktOnicBqWRy212fKu97yeOosLUO3VPAoA2AKLRDRwLA+Rf0zQ2mUyCsRI1RGGawtHv98OyNG2orNL0+CLGxNqpJUQA8jwPedt8JzrHuIkSqL58tEKNWMlFo45cpwSGrum/RlnMLORGL5dLe3h4sMlk0kqVVGdV0KzrY65wWe7zrXu9Xoj2AfyQsI6Dh5KVFHFy5IpzbrfboI9er2fT6dT6/X4AIjYQePnypY1GI3t4eLD5fB4e0CYqiSMiI6K+9Im6lWwBIAWf8XjcyonXGyT6jGz8iy/V1hkr+tKorAKYgjNjMrMwQdhutyHioySGbdN2nufB5plkoQudiNAfiJ7+47MQsJ+M4jsQEBMNxgqZp+nxAWfGyEQmyzK7vr4O2yerDaK7NE3DhHC9XltVVbZarez6+joQKH2jH+hPI8v4DXLBzsEafSeLJwbkq+PXvqq9gBXYBgW56oRE2/N4jM6p41OE0pXTpePIb8uRTESR/VM40hzudBx5uRxpgoHYOoEBfAnu6Tjy8jjyXHmWGy4FD777zqEQCsDDwLjD9aCkgKZgp+0BrDiGX8LXKBCEpAQFqAN4GrUhegLwaiRJI1YsX6vTNk0THjQGFDEc6qAtjEqXwwFTcnKJdBIRM9tHEuu7ZSA8opBMmmkL44Fs+U11h+HpMwYYvoKcRgEYl5KwtqNEqHKA2IqisJubm5CzO51OLcsye3h4CO1TF9cBplVVBdCfTCa2Wq0C0X/48KFFtn6yg61sNpsQaWUpnTp1uZ5xoX/0zZa55DoTcSUffTqdBv1VVRWiyRqxpm8KAEqmukTPGDimuizL0l68eBH6PhwOQ194aSTAoJMfCENTOuq6Dtvx7nY7q5v2ygbj88v2mpakaT96I6eRRp0wMl7dKUojj5zXNM2jVCVIx2MMdWr0lEgsflYURSBgbVttBv9irESaqRvfTZIkkI6ZtUhQJzikvphZ0As6VkBXn1Ic9asXyAHd+/PVnxWXaVeLTuzBZz8Z6cqXlY4jvz1HMlkfj8et1bOv4ciAzXXHkR1HHvuiHKmrHMpjHUdeFkeeC0o+y7bwDBZjpMNq/PzTTiIQFQ5RD8DSL7VqZCk2eAxQCYCldBxYl/c1ymF2fImfOhVOQ92Mg3qU2FgJMLNAGDgrRsndvo6fvkKmXAuhKYHu/x5fDofBILfJZBIIgaVlHwFUQtXIHL8BmMg3BshcQ/t855jqlnpYWmZHI8aJvtfrtY3H4+BU6CZJkrCpA3aCTHa7XXgHB3XQR40EaVQWGeN8gDDpD4Ae9tjr9ez6+trevn0bIiVqFxAcOgXEFeRWq1VoDzvxfeVcjWQxRh03RW1CCWu1WrX0QloJzyZomgFL9ZoSwLgDkBxMnBVV/AhfpO+MCzmXZWmTySQAftM0tlgsWs99aGQrSZLWREpJAMJXPNCxA9T8pZ9q+5AmMmXM2+3WBoNBSJvQSSGEw3mKERqBVPImioq+ISn1ZWSnkzr6iq4VayBjxSD8DFloNA9f1O86eY/ZEW0hA+qn6MShK59XOo78Xhx5fFEuUfmv5UiTCZ7eGHYceXkcmWVHjFWOBK/By44jL5Mjz5VnCVfSEYShgtJOck6WHbeDNTuSgUbUFKiVQPx3PYbhaGREz8FguWvWPFnAWCN9+lJHnBiA5E4e4+Y6CFSdjnFTD0BPu54gMZqiKML7T3RZvKqqQAC7A1HQnhI1ctxsNmH5ViNRAAVyQgc4LoSiUQqNEGhURclE7YL+0naSJIEgkR2RKF0KJspFpAZgQgbYCnJ/eHiw6XRq0+k0bEOLnWEfOklgskOf0zQNBEedi8UiOODNzU2IDtLnoijCzkfIXseJnbDNLORP6o2+s0H7yTmQD/UAgrPZLDwPQQpFDCj0BlpvoBRwFGyJNHENUWlkYmbWOwApdgogT6dTm8/nZmYhL520gfF43HoomlQFJgZFUYTUAfwF/8JudXzj8dg2m00rGob8scuY7VZVZePx2K6ursIzJ1qH2jj2gS551kVthEmD2i6EjX+jS8UtlZ3ZcVc4TY9SvMKH6BORax9VRpcK/vSLvz5CqxNptVsf+cZHPJ535fNKx5HfgSPNQj+egyMP2uk48tI5cpDjqGZ23CEUnTFu7LTjyI4jKU++4WLQ6qQ0qoNCWTgHoKUdVuD3RetUxekdKHWgFKIvV1dXtl6vw3kYEMrA0QBJjIWxKWhyjYIThq5pBRAWfVXFmx23p2Ss2oZGLhmXEkRRFOEZLmSDAWiuuy7HaiqQRmNoH+PEgQEMXe5nnKofJXlPdKojnMsv+Q+Hw0CuRN806sbYAE/a4XpIh12bkmSfoqDv26B9xseNBWMhjxkC9ZGoLNtvEfvw8BAiNNg5xJIk+yXw6+vrMAlBx9jfYrFopcB4+8AG8AvSN3a7XRgHBLZcLkMfyJXX6DTAqxEgQDPPc3t4eAjncR2gNxgMwkPsPnI7zPOQKoBMeTB4uVy2dI+dMy70PRqNQhuqnyRJWvnZyIUxQMZMPoiI62RPbRRbIt0D/y6KIuhWVwsUoCEBHQPnE8VTIKcdZK2+rH5GO7ShKWJq2/iOTojLsgzRQfqhvqx+o5NVxRd+17oZv+qCgo3p9658Wek48vtwJH6iq0hP4Ugzs+owme448nI5shCMZ2MWdAivcXPUceTlceS58iwphTSK8pRgfKQAY9dlSwU2lh+pl4Hyu5KN3rVTL2ChS7lmx7QD8oo1UqYAiLPHiIpzMAQMhn5p9EmvgQw0GnDquBKuRreyLAtAt297X8d4NLJSdmPCsBkDTshxZEWbamRECNTIbm5u7O7uznq9nt3d3YVzNTqm0TutnygYY63rfSoH4KjEojoA2DSvHtJnXLzEcLlc2mw2syRJbLFYhNSJFy9ehIgRpMpkoCzLEDki/5qIoUY/0APfb29vrSgKu7u7a+V2QzzUD0g2TROO8zCx10GStHf/UpkwXo0OQliDwf5FlCznQwrogLQIjfiQpkAOOfpL0zTUxVin06mZWfBRBUn0Sp9JBSAKBYlxHhE+zieXXwFZdzxjQkD7+MByuQwTRcUHyBB7h2Swb7VnJUh8z0c5zY7Eq22hT01XgLywE3SmPoXvYRMU+q6RScavBKMROB2PTjy0eJ/jfMal44eA9IaAttT3FJu68mWl48jvw5Hc3CVJEnaz+1qOpFR11eLNjiMvjyPVznWTCfTVceRlc6Se48uzPQGNILVRjJBNHnjoTvN+EYoKTw2agSmZ+MiOWTtnmrY5DmgQxer3+y2lI3CiNrpEyWf+cievUSx1diUr7SN9A9wYK7JhdxsiXACQAi0RFDML29QCDhiutsmKBQ8RsxuNGj0Go2SMnIbDoY3HY7u7u7PZbGYfPnwIwED0C115+Sm5Uj/99QBMHyAQ3XkJ4CVqpNHFyWRid3d3Np1OA8ET2YKMiaSpcyNbxkg0FyDRaAyg/Pbt20AUpDoANER6iLTpw6I8bAqwcr6SJ8c1NUCBR+2ccRCtpO7xeBxkjn6IhpGecHV1FVIGsGFeSkm0W6OJCnzqT/yeJEnYAStN0xClJI+c9+hAvETMIB8ikGbHB2HR/3A4DHn2mgJSlmVIF1mv10EeyA5/VP+BUElb2Ww2wb704Xf1ac1x18iv2ozqCV9WfVGvn3RqnzQyzvg9fvgJO/3iZaf4DxihbetknOuUEHTir+coNioWq0105ctKx5HfmCNlwslE/6s5UrAYXO848jI5Evuypr36qTc2+HvHkZfHkefKs24LH4scoQiN1NV1bYvForVsqBET7TzLzr3e/v0RAJa2RfvcXXPToH0AkGJgj+DMLLyjYjqdtu5y1bEwam94GAYKwYk1WgiRMlZvyF6hOKreSZtZK6WQgsMjU81FTtM0bNep+eBKdDiM9oM+j8djm81mtlgsAnCpYevyP47sJ++67M1fTW1R+aITyDvPc7u5uQlAwIOZ6BtSxvnfv39v19fX4aFTnVwACrPZLIDAbDYLOc/sPMQ7QsyO271CxAr62CR2x8spGZfaiZIBgER0EXny/gtsj6gVETXsbTQa2Wq1sjzP7fr6OshPJ2HqA+Px2JqmCeRBtJKo6mq1CjYACWADOmnQSCo6apr9w+68FPP6+tpWq1Ug5Lquw65HfqLmCbZpGvv48WPQMfZ4c3MTiIMdkUiBwBaURJUMyJPnGQPO2Ww2rZQdzqdeZMCKAxMtdrXCb8ys9RA9kx/GoxNc1Q/PXNAO0UH1Z1YkeLicKKh+Vj35G2V+10k6GKYEE7vB1uv8OV35stJx5LflSJ8mSB1fw5GUujpGuzuOvEyO1FVa5UiPyR1HXiZHnivPti28grIOQKNPLPkreBAF0vd8oFQIQXcQ0vZwbF1W1CVF3d0Hw6NPCpb0F0cCsABfzqFfvd5+lx8igdvtNkTKNEpDferQZhYcgndhAIS6NKwpFIyR/vptunUyjKz0oWhtU7cK1cghE+mqqsKOM0mS2Hq9tru7O7u+vraff/7Zfv31V5tOpyHiVdf7h4f9g5v0geiRkrJGN4iqcJ0+HEn/ifq8fv3aVquVffz4MURi1JF4UBTy1iikAh+AB4g3zfEBZLW5Fy9e2HA4DC/bxPaIFs7n80eRECJTq9Wq9RAt6QmAOdFrjdqoPjWqRcTNzEK6SVEU9jd/8zf222+/hQeMqZs0BwUCIomQ63A4tNvbW/uP//iP0I72kVQGyL6x9u5pRNnW67UNBoMQUR0Oh/bbb7+FXZc2m41Np1MbjUatyDh56Ni8Eiq/F8V+O2Te61KWpV1fX4frJpNJ+AxRaxoLxKBEvN1ubTqd2nq9Dg8FI2/sWSc/+ryCgj/1xSYqHqxjmMhxfAZ5aJQUX9Q8dsUCzvF1M2Zkq3KmnzoRVDuJTdDVLj9FKF2Jl44jvz1H6qT3qRzJC9/RXceRl8uRzeiwfbgdV1vhHfWTjiM7jvTlWW64NAKFg2hUSIFR75J5+/hyuQxgQue5+0Z4XI/y+Y36VAiqEE3J0CiKLuUjKLNjXu5yuQzpeiiaOngvxWQyaQGF3pWbHVM1fPqFEoNG4AB5vZYxECUgQsh5GDd1QWqMib5g2ETeyrIMoIPBbbdbWywWwYjK8vgA4ps3bwKIAPDkyPOAKLJSp9YUA84hKqMOQI55v98PYE8fiKRqigFgolGoNE3DFrKvXr16lCcOCfz666+hferTyYg+DEp/yWnHcXkRo+Zwm1kgaICqLEu7ubmxjx8/htQLTZdhRQnZoXNepDgcDm06ndpsNrP5fG7T6dS22629efMmRMhWq1V4mJkJgE7cFAx7vf0D8tgC/QeQAUH8OvjcYUUV0Oe33W4XZP3mzZtgh0QVx+Nx8A2VaZoec+KxFY1esm0z/tg0jf32229WVZXd3t62Ipx5nrfkDjFoG4yV8eouXUxMFbPwG8iESQYEgt2qfzF5mc1moS58RSd6RAZVvpoGhR9g/3oTrJMQJS6NfCJHjfiDIdiYkojiN332qyiKOV358tJx5LfnSH3PD/L/Wo7knPFkbHZYIeg48jI5ssQeJcCg9szKaceRl8mR53jyWTbN4I5bj/nIGMdQAMvELBkDBhiXChlDVABAIAhBDcHn/+qSK+2og2jfVdlFUdjV1VWIwIzH4xDNu7m5seVyGYiEvmlkUJXFDQ3GAOhrXi0P9uodfsvR5eaKUjsC5XfAl0gYMtCIAVEjZH19fR1yjJNk/1Bmr9ezH374wR4eHuzu7s4Gg0ErugBIA9SLxSJcR9SJ+rMsCw6NLKjPzGw+n9tgMLDpdBp0myRJAGbGw3hHo5G9f//e1uu1vX79OshzNpuFvqEPcrb7/f07WVjunk6nYZlbSQ/w6ff7dn19HW5MAe/ZbGY3NzdhNz/smgkIBLPb7V/yiEywe/QPOTBWTQcws5D/PZlMwvsveFiWfH7SH8jP14kX9TKJQHc3Nzf28PBgi8XCXr16FaJ+TOIARmxkMpns5X6wJeyZyCipB/gJxIWu1e90IgjR8fD2ZDIJPoPfECVl8ok9MHb6qFFwMwuEMxwOQ8QdWafpPp9+u92GZwrwCX1+RSfAikv0HfLSSTR4pA+A43dgIf1EV5AWtkr0k8kR/1S3OlHVyTL90THQP45xLmPW6zVqp7+Du135stJx5PfhSE1DQgZfy5HI7vWr19bf/a7jyAuI4zY6AAAgAElEQVTmyI/5ngvTg3yxEVJxGWPHkZfJkefKs6UU6t0kAgNQUC531Wbt3URQJMbOgMza70cgGkJdYRCyNBj7x4OJPpKm4K93rPTBrJ0HjjLI5QU4NeqvJKW5o/yGIxINYOyMjfQ+UgQwBpz5keyT445QRPZ4UFD1gMEBfIxd+0CUQ0Gedm9vbwMwmO2jSkQyRqNRiGyxaxT6yrIsPCCLc43H45AWgX6IUqRpGtIMzCw8m8BnXerHCc0sRLDqura/+qu/CqBNSormLk+nU8uyzFarlV1fX1vTNCHKU9f7t6yjp4eHh2DjODq2St9IQ9Bt1G9vb20+nwfQJnKJP2B3ClyQrdoEhMzWw7/99luIHK/Xa7u9vQ2A9vLlywDygO1ms7HVatWyz/v7+7BF7Xq9tslkYjc3N/bLL7+E9BV8BQAMUZzq+A4PjhdFYR8+fAj2ANAyVkBbo+4AFPaEDrJsv71wr9ezxWIRUofG43HISecZkjRNbTKZBCLCzna7XcjFB9Rns5nd39/ber0Ozy0URWGvXr0K6TBgg4/8E3UkyqdRN8UKfJ9z1O88jlD8qoMSk5IO7emEGDvUibWPripGolP1fbBPU0zAX43yeXzuypeVjiO/PUfSb+3z13Iksu44suNIuDAxa3Gkpjx2HHm5HKl1+vLkGy6AA6XTEb0b5DMDZ2kXA0WBOI/Z8cVw1N80TSAMjW4BoBrBY5ceoi3cJeMACA8FKsGYWYiEaLSfKFSv17P7+/tgsP1+vxVBgfjou0ZRIEb6C6ABGCgcY1TA1XzjXq9nabIf6x//9//Vmqr9bgCNlPJdjUdLkiSWWGKNNY+uQTc6SaAN1YPq+FXdmCXWqpO+JunxQV/qNjOr09SSprHZoZ0mOW4JkjaNNYe+7JLEsrq2cdPY7iDT2UEuVZJY71DfXOwQZ12mqa0SyeM3s6Rp7L2LZvB7laS2To/L0N6J7l10ViMs6zS13cEGdmlmVX2wqzS1uqosaRobiJxJ3Omr7BszS5hMpLbK9vIdH3xg0DTWbxprssxeH8ip3+ubJWY5QJIkljeN9SQVoElTm1SVFVlmo6qy/1YUluZDq9PEXu6K0KYHo+xqjMG0dntSsFTQ6fV6Np/Pw4SE1I3VamVNc3w2Av9iguCJoq73zz+QTkIkMfjBgSzQG9FW/G0ymdhsNrPJZBKImIeAN5uN/fLLL3Z7exvSHZj8EP3WiR/4tlqtQtoQvqx2gP8TfVcQ1okj+EfEk75BJqoHjepT8EfIG8LyBOhTHXQir36iBKVtKMGAZV35/NJx5PfhSOSMrzPuXq/X+h2fYRLGjQ39TISP/uF/+d8srQqz5oDTiePIJMKR1pg1Bx1bY01dh5fmmu25Mjm85iVN0ihHpgf8r+va0iTdn3+AAniW8dRVbY0dbjQbs/LvjnJpmn1fev1eaLtuamvqwyrroR91Ve/HVh828rDGmgO3W2NWN3Xgdvgt2RN/6FeSJmF+UTdtjkyTwyppXVmWplbVdZBRVVctvNINwsKxw1xC5xlZevCBqtzL0Y433GVx2JCm3wv9Qe5N3VhVV2Gegg/Sv6Io7P/Oc0uT1Hb/w86a9LhTIyuEGgDA7juOvEyO/O96w2V2TE/Qu08a1jt8jXwAsv7OlvQAjIQ6EABGpQNjiZsHLbkeYCWawR0+USSExfmMgwihph/M5/OwbA4pqBMRkcE5dOxmx7tnlskx6CzLwrsgrq6uggERKVIi1PQSdDr8/evnUOF3KYn7HJvK6TmHexDF9NY1nPv4rQvxY2Zm5Ynj/nyflXsqS5e+Ua/WX8k5MZc8d4z+ZJFzvRPrwnYS+T2Tv3xuIufFyjDPbddrby2ruxvd3NxYmqb26tUre/fuXXiR5Xa7DekU+MJisWiBY7/fD9FMoq28KJKiz1MMBoNQP30hVWW9Xtt0Og3pSHd3d2GLXQiOrWt5XoJIZ13XIcILhoA/WZaF6DKkSJ/YJMBH9jTYAQ4R3dfJJpikMuF8sERXG8yOE3ewleN5noeJtJKTTrwgIg2yaGRP+0X7bNzQlS8rHUd+e46kv0TSWU1L0zTsJMjzKqSn8dwKKVYhze4w8Vte//jtjKYr/2nKPHIszbLwnBo+yUoTONlx5OVxpK7O+fLkGy4GQO4sxqLLwXSSu8SyLEPuLxECs70xsWTaNE3YtWW5XIb8dB6URHhFUYS7eiKAZu2IIZ+plzthTT8gmrharUIkbT6fh8gFD9LtdrtAaIwR59KlR+7geWP4fD4PxHa8aUrCzjoomPc/MD5IFlmRw/rL//F/WX8w2G+H2u+bNY3dP9wfDDw/EO/QsjSz2dXsEGEs7P3732w4HFk+GFhyIJbisNtNfRjPcJjberUOOfcsvRdlaVm6fylfY7Z/aLrXs7Iq7erq2nq9XtjmtK4qG0/Gltjh4Ubbr8CMRiPb7nZWFoXNrma23e6sqkqbTvbOu9lurGmOL5/bbrf7nZUOW/WORiNbHyI8TV1bdnDe6hANyXqZNfWemFfrtdWH9Irtbmv1YSVwu9uF3Xvevn1rvV5mk8nUBoO+NY3Z/OHBBnlu0+nEhvnQ3n94b3XdhBz45WFjkeFoZGb7aF14QLkqrWnMRqOhPTw82HK5sqauraprGw2Htit2VhSlrdcrK3aFNWaWARJmlqbHSVFV1VZVpQ36A9sVO7u/fwhRryxN7er6ysqytFcvX5mZ2ce7uyCXXVFYWRbW7x1y4Q/2OR6NbL5Y2NXVlQ0GA1suFnufSo7bxC4WC8vS1Hr9npXFPqpVVpXl+cDqzR4A8TV84Pr6upWmsVwug00XRRFy3bEnUj6Kogg7mJHbv16vbblc2s3NjSVJYnd3d7ZarcLuUxoZJC2C6FpVVeFdOK9fv7bNZmNpun9QuNfrBT82O0bNmACORqPgYzw/AZEC6OALmKMTaCZvyBHsAMhJVdIHy4lmssmAYhOkB4YxYWcSToSxruvQpkbmwFwf/UdnEDD4QoHkOFcJpytfXjqO/D4cyaYCi8UibBpwf38fngHS3c24yWQHOW7W+Ps//8P/advyGAXPh0Nbr1dHjkwzG+QDK4vS0iy18Wjf5z0/96wsK7s+pIHBkVVd2WQ8CX1HZ6PxyHbbXXg+brvdWllVNj1wFjjiOXItHMkqCPJJksTKA5dkvZ41TW1Xs/3z2lV94Mjtzup6v+qw27Y5Muv1bDqZ2GCw39784eHB8jy3yXRqwzy39x8+WFPXgSMXi4UlaWKj4SjorixLm0wnVpWHTS9GI3t4eLDVYUMYVizZAZJUR7PDCl/N6lZqxW5nWa9ndVVZWVU26PdtVxT2cH8vO0umdn21DwC8fPXSzMzuPu45sm5qK3ZF8MWi3K9a4lML4cjFYmHD0dASa3NkL2m/V44gA3zUcWTHkb48yw2XbtfI4HAys3a+Jh3H4DAslKxv4t5sNuE9EDxUeHd3F87TAQLcLBNqbqUuy3JHjuABX4AEZdJ38tqHw6GtVqsQXaA+riFvvWma4HhJkth8PrfXr19bnufhAVSMcbfb2YcPH8LuQ7r8m2VZ68FHriGvuFnvLG1SW3+4D1GLq94+z7tvme3KypbvPu6NZL0LkYxX4/0kvV5t9zdQo0P6RLK/2ZnP57YhIjAaWdk0Nr25sc3DUVfp9vCOkm1paWWWVZVtPz5YOhpZvVrb9hAlucnHliRmSb1PP8kqs3q5sdUhP7pfNFZuS9uuVrYr91vW9svDsm6xteFgYE3Z2KBsLEv7tix29vDwW0i3ybLMfvjxxz2BlY0Vm6UNDy+fHFlm5Xq9f9fHtrRJ2rer2z3BrJvU+k1q1ba02+HENpuNzd/8Fh5G7hW1DbLGBqXZdj23frF39mq4j34WD/uI0rDZ33zeXN/YmzdvLK8Ty6rE/vznP4dJ02g0svKQvlMvN5ZUlSVFYZOkb4tie7Czvb2XB0Cd5rkt53t5v3rxwsqytMWmsNvhJADNdDyxzf1yP1F5+37//pUP90fbbhqzqrZitX/B5WCwf6nju3fvbJim1iw31hS13Y72D3J//PjRNofta+vl2tJezzbVIa+9qqwuS7PKrJemVpodb34PD4+/fv06gBwTJgCx1+uFiLGmUdFXMwuTS8B2uVzar7/+anmehx2jFotFmJwBcmxJTMF/2OQD3+RZAk0bUrwwsxCJY4IKSbGKAMEQWSe6yAPQRP118wImoOBO0zQBC5bLpY1GI7s73CgzKSH9g755wIfQkKOmQUB64JimSyj5K8Yp7mlahf5GFLBLKfzy0nHk9+FIbpaKogj+TmoU8mQSx8rWaDSyH3/8sYUTZVnaJMts2k9ss9nafD633Tyx/uH8ptlvsLBZPdgwy6yf9S2vD5s81FvrVdU+ZW3x0fLRyKrtysrl0pKmsevJHluSQyQ9SytLtkvb3t/bdDi0KytsXW9ttV5ZL9lPfnM7vKbjkIa2TkobJ6VN89SWZWHrDw9WkZKaZfbj7YEjq9LK9d5ePnz8YP2eWble2+ubGyvLrV0NUru6ut1zZNZYv9dYVW2tP9unlm0+vLH0wJFmhQ2TzKZJacVybddW2Ga3scFm/3zadr1/rmuU7W+Qb26u7c2bNzZLayvTyv785z/bwuFwnueW7JbWryorysImg8QWu/3E1urjLnf9ft9u8tyWy7llWWYvbg8cWW1sNhseOXI6ts3q3saDgVUf3trNzY3dzz8Eu8ubxqqmsnK9stxx5Isstf5uaQMr7OZqFDiyYIv37f69X9uq/Vw9E3/spuPIy+NIromVJ99wAXaAPXeldFiX6RAsOaa6vMq5kAlgy4Oa3LlzB43QmHizlLvZbMKOLUSxMBwMgJx2lIPB8mZuSIelVoxHH3RUMkLhjJsoATvp6AsBs2z/QOarV69ClIKlW+7iieJBIkQYiPgQjeBhZM05J3ownU7D7kA8aFyW+zeQk2PPmMbjcdj9aDab2XQ6tX//938P73MgcjYY7N8pofUSITTbO+54PG5tqwuQMC5eDInRrtfrsMxMfwArokI8BFrXtd3e3trd3V2oi3dm1HVtv/zyi1VVZa9evQrtEhnabrdhwqEP7w4Gg9C/N2/e2G63s59//tnu7/c3stPp1FarVYi8JUliV1dXZmZBrsjn/fv3Ybl9NpvZbrezt2/fBlDZrx4O7erqKqTL8GJGorjYAw/A/vjjj7Zer0MkkwmW5m7zcsGffvrJ/vSnP4WoEb6SZZn98MMPtjus7JlZAG7sCZDmZpYomeY56/I+edvY9cePH0PEiAja5BAR1QfLIZjVahXy1pEtOsev9RiEALjxDpfZbBYmT7rjErpmInp/f98aAy8pXa/XoS+bzSakfkAM5KuT9gQgM3aiqeAV+IKcvO6xeUgObEM2ZhbwAJl53KJoGgP9oy10pn+J4ClGc76vC//U38DxrnxZ6Tiy48iOIzuO7DjyMjjyXHmWbeFRNh3X5Tq94+c87j4BosViEUDbzMJL17wx9Xq9AM4QFFEsgJ/VHhSOMQN45KLSd66DKHSZPkn2O+mYWdiVJ01T+/DhQzDu29tbe//+/V6YveOWliiOupEDeeOAObnjRAfY5agoirCNLHf0P/zwQ9hFhqV2HIc7a+TO9p6QbFmW9vDwYOv1Oiwxm1nY/tU7y6tXrwKoqQMkyXHXI30Ikd1vdEmcyRnPBZA3T14wEwK2mkUOuq1umqa2XC73y/qH80gVmc1mtt1u7eHhwTabjf3ud78Lu2JVVRV25kEub968CWQCMLEDEc7+8uVLu7q6so8fP9rHjx/t559/ttlsFlKC0D85zGxZy1I9bZuZvXz5MkSDAAPsGCJBpkRIqAd/QvcvDitdb9++tbIsQ9RoPp+H96f97ne/sx9++MHm83mILKOL5XIZHjx/+/ZtSCcAJOq6DikIkAokzWSMyQ72ohHy+/v7MFHBx66urkKO9Gw2CwRAqsr9/X2YOBDdK4rCJpOJvXv3LpDW3d2djcdju76+DlvAYpek07AKoGkJROrwBdKGWBngYXy2CWbChB+DZ2b7CScpT9iqvkcGO9MIINcTrdM0Kgr64TcAnMk5pIa/IXclGNJXwEoIh3rBQSb0mi6BD2KDtMHvGt3EP7vyZaXjyI4jO47sOLLjyMvgyHPlyTdcdIwlxabZb13p7xIpAIwuYZKTPRgMgmNnWWaLxSJcA2mQ3gAh4XQYA/ml/KZEROQJo93tdmF7TYB+vV6HO2mMGYUSxXn9+rX99ttvIb+VLUUhApaIIbymaQJp7nY7u76+tlevXlnTNDafz0N/9X0c+q6IqqpChAaQWCwW9vLlS/vpp5+srmt79+5d+M0/IEnure5ARfSHOquqCvKm/6SmECXbbv9/9t6s140sudpeyXlmcj7nSCqpym4DbTQM3xrwT7ONegH7T7z/xlffVRuw4X7bVa6uKpV0Rs5MzmSS3wX7CQbVbXXJpb4pZQKCjnTIHPaOiBW59orYG9M1EwAATILNcrk0wGKcGSeuw3z6LjYECFhBbANmJZ1Oq9lsKpVKqV6v63A46He/+51evHhhS+YA02g00vPnzzUYDBSGoQELwH08njTo9Xr9QqoThqG1ks3n8+p0OorjWJPJRIXCaXNFAMonQa1WS+Px2Gzt9vZWlUpFj4+PmkwmxqRhUxSx5nI5C9ip1GmjyHq9bvNAYGY8J5OJyuWyMXZxHFtgzmaz2mw26na7xrhyENAymYxtVooGG5/DH/gs7BF/40NIIYIgsOBO8e3NzY3u7u4sCZHODBMJFgGS9r/T6VSHw0Gz2cwkPyR5+F0YhhcAwbN4xouxlWRBkM/zTL6OhPHd7XYaDocmywLQeWZ8Ab9gg1EYUnTvqVTKWHs/hqxEkFASJyQZYOPzXDMIAi0WC5O4kGgSF7ElxtWzcDw/8+tjEd/nc5wDG+Fn5kyS+SiAlBwffiQYmWBkgpEJRiYY+Wlg5J9VUijJ3holWQEbb9LchH/rZEJ5u8S4YVTy+bzm87kFGRgF3y7TD5rXa/oHZ7D5u1Kp2Bs37Bg/AxS8mR+PR5MjjEYjpdPnzisY/Ww2M+0pS7gAgSRbnvbANR6P9fT0pGfPnlkQ4DnjOFar1TJ9qzc2AHS5XBqwcL10Oq3r62u9fv1a9Xpd9Xrd2JrxeGxzUS6XDfwKhYIFeMaAQME8NptNAyVAHH19JpNRvV63+/D7ePi5IHAQeGezmY0hQESQRBO82WwuJAQADcEDpoMAP5vNDARwxMVioaurKwussH6r1UqpVEqtVstkNTwPy/qwIYC/l7uwHI9dzedzYyVJPDKZc1tjlsMJNLVazVqvem01YweziX1LZ10y44mWm2SAIun5fK4wDJXL5TQajSTJgly9Xtd4PDZ7QbJQLBY1nU4VhqEFR8+qEsiKxaKy2VOXImofeEZJur6+VhRFxrLCgMPcITNBugCT2Gg0TBKFfSAVgV2rVCqKokiLxUL1et2KfIkL3C8gQGDlGSaTid3ner229rL41Xw+N+DE3ulgRLLL+fE9ZGK1Wk1xHOvx8dFswBdD+9joWTviHhII/BuGMAxDY+kJ+gAw88p3iYOAALYM6JPU+gOmzvu3Z949C4gP8r3k+PAjwcgEIxOMTDAywcifP0Z68uzd46O8cDGAGI0HZwI5N8zDEijRbPIZjPl4PNoSK+yHX0L27CBOxSQXCgX7DM4gyd6EPait1+sLrSmDztKwdG6py7/R1RP82FgunU6bQfGWzdsvy/Gwaen0Sf8O80PwhDnyy78813a7tSVprkOg5jOVSkXj8dgCcqvV0mq1svvBiHibx2h8UEKugaOiO8eBfd0Ajgd7Kp2ZAGQCh8NBi8VC5XLZlnhht2gjTOAhWUAT7JeTMXi+U61WTbKyXq+tC9Bms9F4PFa9XrcaAekUsGFnOR/sYqVSMcDCxgCHarVqbJB0lghxbqQ9z54902g0MmZuv9+r0WjY92CKASrmj/nAvgBEgrkkk0CwV8bq9w1BODc23+/3zVc8i42NIzeo1Wo2BswnjCkFuSQ4jD9z420+l8upWCxqNBppOp0qm80qiiKzN+aRuSP4zufzUwev3/uhlxrBlkqnpGYymRigwLLxfPv9/g+YO37mvHyWhAdfqtVqJjvBTgEmzkO88Cwzn4P5BhioveE+YNCwJy8jIukmhvEcPugz1iRR2Wz2YjXByyt4VuIUvoodS+dkyMdh4su798B1OS8Jgh/b5PjxR4KRCUYmGJlgZIKRP3+M/LOvcDEoMFkwZwycf7OG6fBBO5vN2nJpuVzWcDi09pPT6VTlcvnigaWztp3JIKBlMhm12209PDxcGCjsCRPD75gQb/C8KbNk6N+aYbzK5fLFbtwsffvlSNgAjIBWt6lU6iL41Wo1m/xyuWwFs5VK5aI7C0XA6XRajUZDh8O5qxUGM5lMLAijBQe8JVkw9V1seG6WaxkPGCIMGPYPVjKKIu33p13SmQ8KbfkuwYWgArABRgS1xWJhYMm8Mn60UmXJnWXgRqNh2u3ZbGbX4l6QBgBiOMl4PDapAbp79tNgrqMoUrlcNtvD+QEg7ocAO5lMDFA8O/f09KRarWaaaBjD2WxmwRvGkLFHB49fAXiNRsPsKggCKzomOMxmM0VRpF6vZ+MnnZfl8U0CZ7PZNIYHxpJgA7tO+99UKmUsLfbmtfjYS61WMzui+Jtzorkm8cHukBJQGA4wMmcE8VarZb4Js+RZKM9S8UwkdLDIzKN/gYDx47m5X4CBayDlItnyyRK2iQ35+wDICfz4CZIjmE5+9isb+DcrAB7gmHd82cspOLy+3Y859uVlKHyGccHvuQ7JaHJ8+JFgZIKRCUYmGJlg5M8fI9+3wpX6H3/zAQfMB0HTg4iXIKBB90zSfD6/YH1ggxiITObcptIzTv6tPQhO+yNMp1NjWWBxJNkbP4ONA3sma7Va2bIpg+kZQQYag1yv13r27JkFFO4RIyOoSmftLMv2FFxy4ISetfKsWrvdNr23X+71+mbuwzOOOArLz9vt1kCSzjMYMu1x0StjxB6QcBicKJs9FWIvFgvrwFMul634FyCgw1Ucn3TisIF0AyLIZTKnYnHOsd1urbtRNpu15/DsrXSWttB5yjvhfD63olTOy3foEITNYp/j8ViSjJVDYoNMAJve7XaaTqe2eeHXX39twJDL5QygkFV4QOB+9/u9de/h3zDQBEi/s/p2u1UYhqrX68YGe0ZmvV5btyEABz0/kodUKqVms6nnz58b8BCoSC4YI8+qw5pJMh02dlapVNRqtVQsFhWGoVqtlur1utky18Au+H+fyHh/5B6CIFCj0bCWupyLOAEYkhgSIDkPtQDFYlG1Ws0ShHq9bjZHcCdG4cskqcQC7ANpC3GHcfAvJJ6Z4/mYIwAFGyqXyxdJFPfB8xIfPZuOj+OTsJQ+bvAs+JcHRy/F8mweenbG08vH+G5yfPiRYGSCkQlGJhiZYOTPHyPfd3yUphkwMdLlGyIPirMSgDKZjGmJfcDhISl6RBfLg6RSKZXL5YtN/aRzYVu5XLaOOLAnsCK0b53NZgYo3D9vpn/s7TmOY+tglE6fusGgM8eAYFAIhn7ZExYLUMQw9vu9SSx4ZgwIg/Ng6o0nDMOL5f44Pml4YX0YV9/WliLtdDptBdg4A0EijmNjTcfjsXK5nBV78jueD503OnhYMZyCfb88yGLUzOV6vVaxWDQWFF0vDgtANZvNCwcksB0OB9MXY1dBEBggE0QAvPl8bppfWEjGFbkKbJlPWPg3SQGBB2lPoVDQZ599prdv35qUBvumVsF3DEK/jlwAVoikxgcpH6z8c/qEBBv3GwNWq1WzH2okPGu8Xq/1u9/9zsAadhvfI7C9G1jS6bTtj4GtFYtFNRoNS0rCMFSlUtF0Or1gYX2Cw1xxDS/Jwd6ps6ATGb7KGHnmkDGhhoVAzT0CkofDaXNOxh4mmSJj6gVgwGBOfcvtIAiM1cUujseTfKZarf7BObBp/s+/tBAjuTeCN37Gs/F7xgFmjXMBJNg08YrvwjwT3/xqBckyAOSB3rOknh1Njh9/JBiZYGSCkQlGJhj5aWDk+46PIilkUgnADIBn3PgMn/OD65k1lhZ5oyYoMyB+aZlgHsfn7kvb7XmHdq5FtyLewgnYsCIMPgHTX5tz47xM0HK51MPDg3q93gVY4hyFQsGCIvcAI0UAXywW1uXncDi15oyi6ALMstmsXZ/AQNtSHIixRItOkOEcjCkO47vxrNdrdTodYxuRdDC+GHcmk9HV7zcZZg4IjjB+sK4UUHNfMC2MN22LG42GwjC0+ywUCrZkDsD75V8KVwFvv3xNEEV3TJCo1+uqVComVwCEfSKTSqUMXPr9vtmXTzjYa4QgyPK27xhF16x2u61vv/3WQECSsaewV1zDs68EwP1+f9HVC438eDy2Ll8AjWfYYKVIgLytE9BTqdMGjqnUSV5xc3Ojer2u6XRqTCCBcb/fW4D3HaM8eAEu+/3e2jH3ej1LgnhWWF/aFdM6l7kiUMM4cc4wDFWr1azAmfjCmNGBikDK7/Fr6iEIiiRS2+3W/IfjXdkTsi8kNcwPLDX245MNmGbOxUoBIIRO3dsrMcUnzf6ZeG7/t3/x4WcSS+adsSAWwHpzLz7p90BxOBwuQMcn1skK1//uSDAywcgEIxOMTDDy54+R7zs+iqSQm/MA4t+sYVr4f/87bp6bhi1hMP0SI+fACPwE+EFhif/d6/I9WAR+hhFh0pncarWqVCplQQQ5wW63s6XgbDZrTohhUazLvfFMdD1iuRtw5Pf7/WkPCZ4nn8/bcj4gwDI7zkSggNGkixJBG+cE2DA6DJl7QKoB+NVqNdt88OnpyX7f7/dtXGEbJdm+HTj4fr83FoMAwrzWajVb6vdspXTSXjM23qkmk4mm0+mFxINrxPG5vSdzwrPASHA9AgWOjwQD5yZhgP1C3+4TJja+5N73+9OGjvhTmNsAACAASURBVCQEkowN4/ez2UzH41m77IGG5IGf+RzJB2PqGbXtdmsyFtjSbrdrn/HJDe1xCYq0qW02m8aMUROBTxCg2JMFJpw/BGFAdDQaWZHvdrtVFEVWME1y4pfrU6mTFIHxRz5TqVQs2eAzkqzzF+f3cYdAh9yE//NMFj5FEgnzS72E36eEJMLLHOI4NruN49gkOAAbc8m88v+cG9/Ab4lf+BtzhQ4d/+I7BHR+9lIx/iZW+rjM83OPjBl/sDn+jV+QXDNmnqVMjg8/EoxMMDLByAQjE4z8+WPk+46PssLFTfmlPAaCIOFZGAI8AZZBknTRoQhw4u3eDxLsCg/MGzWDBZhw3SiKzFCZLO6NJWLewn1w47yAjn9DZ6mcCZFk90xAOxwOtg8BBZwsTxM4PIPJs3ANZCQYng+cBCACyLv7HTBWAAa68t1uZ4GFQNJoNLRarayjE0HwcDiY5IAuOz7Io5uHnfFADFvhx5nAzv8R4JFAFAoFawWLZCGbzWoymWg+n6ter6tcLmu5XFrxryQNBgNjbH2nHkBHkrULppUvLCfyHc4lyZbkkQMQpCSZLIFC2SAIrHNVsVhUsVi05MQ/I7IYdOvMGcEllUpZIN1utxdJznK5VKVSseSCufLzyR4sBINs9lQUvVqtVK1WNZlMtN1udXV1ZZ2YlsulZrOZ2Qgbf2I3sH4k2z758gwkQIQvLZdLNRoNpdPn7mzURpDseLDw7BA/+5a7Xo4Ew+vlCtwDY5jJZNTv9y/YL5h+78vYAckWYEA84BkZD3wde6WWA7AkXsH2+8DNuWDSABrPlHFv+A02B7uL3/tEmevyOWIQ9/wuE+5ZQh+T+T4AAvNPcuEBKDk+7EgwMsHIBCMTjEww8uePke87fvILl3979EyaH3h+B8jw1uuX7oLgrMn1elYcEkPgWjyoZ5BgY9Bro72lgE+Stc6UZEvVBBOuTRBEs8yb/Ww2u+imgmPzVkwAx7h5Y2dZ3S9f4gxopWEOfatPgjNv1el02t7uCT6MBX+urq4uAjHGhXH6oAnLls1mDSQwKs9ywGANBgNji2CQvOYYJ4LxQMMbRZFyuZzdGxptNnH0bAXngr2EKaETF915ADpADB09tQTL5dKeIZ1Om3yFuZLOnYn8viksl6NZRktNAGR+YV+wFcbRM2eMBTZAQJRkbA2Axve4FgDI87KTPEEBe4d1ZG5arZbCMLT5lE7gR8F5On3eY4Rn8npxfua7u93O2E3G2gdi5hHbWq/XNm+LxULSua6CczHWsLfYnGfPqUMh6FII7wMyCRnMNzYD24if+/hBfPLzgH8QXJknfMuzq+/W4nAvnq3jZ28fxDa+i794f/f3wHORlDMOgD5xgRjjf8Y2GHvm3AOQvxfiM78jTvnE1gNScnzYkWBkgpEJRiYYmWDkp4GR73vp+smSQi7CQ0uyn3kI/9bo32IJiBRwHg4H26Xcd6DxQOIDtmeGgiCwoleW2+lUBJMGm4bEoVqtWgErS7sEIpYxpXOxHZvM+Tdtrsn/4YQYAOwXziTJlrkZF7rXlEolKyAluHptLYyG/y4BC+ANgsB02/wehiOfzxvbEASBBV+KqP0yvZ9PHBXgY25gLQAR7glGB/20149TNAm7QZclAjHPk8lkVKlU7FrValW9Xs+cqVqtGrMZx7HCMLSxZ28RwAxH495xCsbFd99Jpc6tVwm4ki66VO12p85LnhmESYOZyWQyptNPpVIXBaT8fTweLZhJJ0Bhk0hsn8/D6vnrxnGsZrNp2vHD4WBF4Ox9Uq/XbRd6xp55aTaburq6uujM5MedPWeCILANNmGAYYi4b8ZuOBwqlTp1eGJ++JtxhHGHBUPOwfWr1aqazab2+721aCbWAHQE13Q6fSG3IAAyZsw1gRZp0rtgiI9zHZ90UBPi45dPOgBlQAP7LRaLF9f1HbbwQeILz4K9M64+WfdSs8PhYPGT8WScvdzMPytjzPl9EoovMHbvvgB42VpyfNiRYGSCkQlGJhiZYOSngZHvxYI/gRV/8vATIMke3A8GD+B/5mEIsD7Is1QJe8QDYkg4KJPKdQnmLNXX63VdX1+rUChcBH2uVyqV1Gq1lM/njd1ht26vNw2CQNPp1AaZt3ucAUPy7EOxWLTORYAADkUglaQwDO28LItjAEwwRuqZBL8MzjNVKhUNBgPbJBI2gTEsFArqdrsG5HwP7TKGCxMjnWQDMEHszTEajfT4+Kgoiiww4SwY9n5/2gvk7du3iqJIz549M0YSltWPG2DPkrVnUtLptLrdroIg0Gq1Ui6XswDIvGUyGZtj7MgHasZROrMVBCfugcAAwHrmlvHCUf3yvnc+EgzuBxtnw0PGHHYVAMfZGR9JxoJJsvPtdrsL9pNnYMNSispp6RrHpwJlzuHnhjmHIcS/YMBqtZpdg9bBSFpyuZzq9bolSlwLKQjjQoADOPEB33ENMCGxoFMbgAhbGsexBWgfM7x0hXnx7Ge1WjVGlWQslUpZLQJxiaCO32PLgBJ1Hp7twr58QTOHT16obcEmve3hP+8mowASia4HfQ+wMPp0rcOGYNm5V58cA0TYIcmJB0xJJiHjnvzzJcePOxKMTDAywcgEIxOM/DQw8n2k5EdpC08QYiJZTvUshXTuohQEZ80phsMg8kDvLnXS+hYjw2D4Huf1S4wMGsvpb9++VbPZvAjuFB4yWN4BCNbz+dwkCvl83tpl+p3QpXPHo2z2tOFgEJx2Q18ul2bEmUzmolAV5mU8Hpthvbt8GwSBgRa6cgIb48850+m0Hh8fdTwe1Wg0VCgUbMkUtszv98EbO9p9ZAYUXtIidT6fazQaKQiCi31hVquVaWwJRAQsgnS73bYAUK1WtdlsNBwO1W63jTk8HE6tbWHH0FZzj8ggvIyExIN7xOYIpt7ZOQByxpO5hrU7Ho/Gfi6XSxWLRc1mMzsX7BhFwhTsInNgeTkITnvDEPx3u52NAYwg7DLgDYNTKpW0Wq1Uq9VMxrPZbCzxWCwWxlQ9PDyY3TFvs9nMWLB+v29MmA9iyBFInEiCYAZ5Dh9cYQi5H8aJwESQ8+dg004vPwAwOB9BfDqdWqvcfD6vFy9eaLvdXtSsbLdbYyCxf38P2B1MGf60XC4tLgE8JGbMu2fJABgSxv1+b/vL4Hven71MBBv3NQbIInxHKII8DCbzQJctEh7P5uHvJKmAiSRLwBkLZDEkQvzh+YiTHpw8E4tdE5P9akly/PgjwcgEIxOMTDAywchPAyPfd3yUphkMLoPPzfIG6JdV+R1ggFFxozwgb+rL5dKWer0TYlgEUIIiQYriUwwMx+cNnkkMgsAKiVkeLxaLWiwWtgzLWzmAws/7/d66FLVaLZsQCogXi4VNAgwT0gGKDtHwRlFk98r3YQnZP8SzRAAgy9f5fF739/fqdrvqdDrGzDDuLH0DJjgQzATGjiMWCgXNZjNJJ1bw6enJpCUAe71elyQrfHyX8YDNaTabdp8Ez0qloslkouPxqGfPnhmb6bXegAdjzrXZvNPbFOdl3EhgYFu4R8adOUGbDzjh5JlMRm/fvlWn01EYhgY6JA5+6ZlAScckEpjZbGbBxxfuUtBMYOczFM5yPp57vV6bffogxsac1WrVkpXdbqf5fK7b21u9fPnSggI2QfEvCcZkMrGky2v6YbfxTR/IpHMROm2W1+u1qtWqHh4ejO2r1+sWpPkO98O/5/O5pBPLRhIBEHlf4t+woQBjKnUuMGbe2A8kjmM7PwGcJCCKIrN5fA8wAUCILX4PElpEPz09qVQqaT6fq1gsWutefLXf72swGFiCBADjw8QAgjfzwc/vsrwk6tgFzDfzQowjrnqWmWviV4wF8VGSzRFxmITo3ZWY5PjfHQlGJhiZYGSCkQlGftoY+VHawvu3fAaHhwBoCPYYBW+9GIK/UQIEG8exVOrPi1F5Fgvne3x8VD6fN33rfD5XrVbT1dWVfZeATEcfjJMl8jAMbX8DDBOAyuVOnWZubm7UbDYtkLJ07CcF9owlaAKLX7Ld7/e2FJrP522HdJ4dGcF+v78IBrAFBHsMkKVugiX3BQgsFgszqCiKLLCiEz8cDlawipHPZjO1222l02n1ej0VCgVdXV3p1atXxt7567VaLVWrVQvEBFnYzHQ6feF0GDS2Ip2Z3PF4bCBPoIUdOR6Pevnype0ez3mWy6XVE/glZt9KlXk9Hs8SAHTOSBA8g4KEJAhOWn1sF5BFZoH8A0YReYv3lyAIrCMUwRGb9kCHrcIMsiw/Ho8vNiYkqEqyPVWQA0RRpGKxqMFgYAGbzwOAm83GpAUE9Nlspu12a8zk4+OjptOpHh4ebH4Gg4Hu7+9Nm4/WG9CgKN/LCLyEhZoMAiPdv94FY34Xx7EVOCODQNpEkMafCoWCtdRtt9v2XWyE+WPsCfDlctl2tmdO+RmfhO1ljnq9niqVinq9nsUuQIEEiWSbg6SZ5yMhhx33qwqAJbZI7Dgej1bvQWLMHOAns9nMGH6ux7h5bby3JR/PPfPK/yfHhx0JRiYYmWBkgpEJRv78MfJ9L10fpUshLNB+f96MjuVWmAEmkDdGlmZ5UybQwJDQtQeD5I0VQ2SAuRaMBkupLDNjaIAcbBtvufv9ufg4k8nYzt2Ax2QysUllZ/lsNmtFvJPJRNVq1Sbs3f1FKKT0S9TpdNq6MgGAvJnDLAJskuz5s9msxuOxPQMtUDGUUqlkAZJAAUNEgOKZ8/m8nZv9Epiv6+trk1DgFC9evFClUtFvf/tb1et1kwK8W/gJ43k4HNRoNCzw4eTL5dLaoqbTaT179szkIjAOuVxO2+3W9N2VSsW0urPZTI1Gw5hFnp0lYQL1fr+3TRYBFD+WPtmhuJj5ms1mFhhhteI4VqPRMPkD48o5cTa/9Mz400YWrTGsUzqdNl05wQPAINh5CcrhcFC329VsNlMURWZngIF0TuyKxaJGo5EB2mQyMb2335keTT/L/PzNfKRSKWPQmOcoijSdTvXy5UutVitjwQ6H0w71JD4EWp8AYvc8H4GUJAQbBhBgpguFyw0/+Qzf8/UYvigXUOO8JDXEFeyWeWBFgIQxiiJjNbPZrMUHxni32+np6UkPDw/G4NF+mcQJSRS2QYIGyLI6IcnqI0gestms1U2QUCPfACwKhcLF5paeaeVaJLyeNWR14Hg8dwLDBolZfI6DWJgcP/5IMDLByAQjE4zEDxOM/Hlj5PuOjyIpRGZAwOYNzy+NcuO8QfJwBACYBxyLB4QNWSwWNuEURPLgOCDBmOXQYrGoh4cHlUolaxnKxBHI4/hUhMnSICwczGG1WrXl0uvrawssm81GT09PiqJIvV5P+XzeHDOTOXd1wXgAqu12q6enJ4VhqGw2q+vra2PeYIIwVMZlNBrZOTC05XKpZrNpYxSGoQaDgQE7zueXS3k7Z/md52XTxuvra2sdG4ahisWiXr9+rVwup5ubG41GIyu2huGEPfTMHY4SBIHJIqTzLuUEGZiEMAyNraFImxaujUbDwJAl+d1up0ajofF4rHa7bf9PO+N2u21tf4/Ho0kUcC4YKYIBwSedTqter1sCxDxvt6dNCmFECbKwkARKL3VAr8wzI29oNpsXCRGyBGwcx0VKUCgUTBeNXwDU2Bjyl+12q8FgYDKdzWZjLCoSgvl8fsHi5PN5C9K73c7OiZ4b2yGYZTIZhWGodPrc1hdwBEzxJ0ADGyRQwiJ7wEE3nk6n1Wq1dHt7qx9++EGvXr0yKQU6/3z+tN9Np9PRX/7lXxq4Pj092XP5cUT/TbEy15dkhdrYp68focU1Y0kiMBqNLCFLpVJqNBoajUamuweQ2XAUJpxkjsJqD4wkECTTBHIK/7lfQAY/Y28bYt98PjcbBKxI7rgHZFuAG/GGn4nlzLefQ88+JsePPxKMTDAywcgEIxOM/PljJBLNP3akv/zyy58EJP/8z//8JZ2G/CR6/aNfFvRvlDByXn/qAztvmbBdnBtNtQ9IJAMMUC6XU6VSsW44o9HIgh5sAcvavp2pJPu/3W6n6+trPTw8mK67WCyq2WyaY5bLZTUaDVtip5MQQSuKIpMv7Pd7W+I+Hk87w6fTZ219Pp9XpVJRu922pVi08xgDAZBroLcFQFnSRTvO79BzB0FgAW69XlvR62KxMBaV8w6HQ63Xa61WKzUaDdMy4zTIT3zLYvS5MAV0tCqXy8b+EBCQlBDokXVst1vbNA+nJYAtFgsrRpXO3WRev36tVCqlyWRiQXw4HFpwqNfrqlQqFvzv7+/NXgqFgjkKRbcvX75UEJwKbgF2NpH0hcqeMeZnWBs0w7BfPC/L2rDCnskBMKTTfhawje12W/l8Xo+Pj7ZUf319bQkBUhyvq4ZN8nIPQBTGEr0/40DBNEwb/sR38R/85O7uTu12W4VCQff390qlTkXJBDVYSQI1YOUZR0AVvyBZ2Gw2enx8vAia7AeTTqf16tUrYyZhfJFXIEPabDaq1+vW3po58+wff1PgDJMmycYIVjQIArveer22REiSyWk880v8QzPuX1zwfX5mNQLJDHKacrmsZrP5B/vEcD8kQshgqFVAdsGKAEwvz0IcxkZI/D0DLulCjpJKpRRF0f2XX375f38ScHxCR4KRCUYmGJlgZIKRnwZGrlYr/eM//uP/+WNY8FG6FHIh6VyA6QO716VjQL6zEcuidCHK5XK2RAqLwFvx4XAwNo0lfhzKazi9Fhy9KcYxHo//KNNCsSda8bdv36rdbqvX6+m7777TaDRSq9WyYALbFUWRDoeDJpOJyQcoviRQADS/+c1vbD8RQK7RaOhwOGg+n+vZs2cX58KpCZCpVMpa6fJWPZ1ODTxxpOl0aoDDmGB0MFpIBcrlsl6+fGmBbLFY6L/+678umKpsNqvhcGiAyvwhhWFJ2LOgMIitVssSC87FvUnSbDYz50Qnz7I2TAJL8jhAv983cCqVSrq5ubHAvV6vVSwWjREhwERRZMvvy+VS8/ncbPV4PBoLQ+tQOhS12227D5hA5gWAYjx4fsAhDEMLIARngMkzKX5JGufHbrhetVrV4XCqA7i9vdWbN2+sILdYLGo+n5tkgQQCUGMuAYsoivSLX/xCr1+/tsRtNpupUqlYgPVjUygUrJNTHMcmp4AZv76+Vq/X05s3by4SQRjw+/t7k0d5mRNzQlcwpAHD4dAKyUul0sXqSqlUUqfTURAE+td//VcVCgW12227VxIofMQztdjpdrs14CfZZQ4I5iStzDnP02q1FASBxuOxnp6eDOixYVpBI2GiVTTSF+IWwZo55/m8pIoEMwxDY/2YVxLyVOpcw4OP0CkMoAa0SGDelYl5lpBzIa0gtn+IdCI5zkeCkQlGMu8JRiYYmWDkzxsj36cC+SgrXJ4l4CbefRskCAAq/B6GhmAPM8X5GBAGie+zbIzkAvaHINbr9Qy4CC4sUTKAHrBSqZQtvUvnTfzQ9XI/LK0Ph0NVKhULaGjWMVRJts8DXZfy+byxEM1m01ijfD5vS9xhGNrzsaSJA3OgneVzjBHGl06fim13u51ubm7sjZ/7Z1z4/n6/183NjT7//HPTJqOBl07sGswB1240GhdMAGPAHGKIOKV3cjZgpCZhOp0qjmMr+KQjDsEaTTa1D/yOIAn7iNaXYCXJpDQ8J+OFU/uf+eOX+SnOZg5xYjrreD35dDo1h2VO6OSFzEaS2Q3/pjMWDBLL8AA3Rds4Mj7GUj6MEmDQbreVSp3qAjKZjL777jtLkKIoMoYS9gi2LI5jY009WGYypw0uKe6u1WrGppfLZbVaLUskdrudyuWy6vW6sW1ILZhz9sB5N7nBB8rl8kVihwyGxIfvfPXVVwqCQGEYWnAkCel0OsaIZzIZPT4+GiuZy+WsUxT2NB6PdTgcDLg5D6w0bNzhcLB9T9brtTGUYRjq/v7exoWgjxQCQMTG8RN8Ex27f6lBwuBlHJPJ5AJsqDPArj0Iwohms1m7X+IAgEkMelfCQdJHvOWZfp9sJitcH3AkGJlgZIKRCUYmGPlpYORyudQ//dM//XlWuDhYumMwYHBYpvMgwL9Zeo/j847rOBJv2wwEb/YUIMJuYFiwWwwyeuZU6rSzd7lctolmeROWje+gO2WJ0gd2llqZ9F/96lemYUdDvNlsbGnVL11yP9K5kHk2mxmLh+a8UqloOBzq+vra3vQlGRNEsIEJlE5L6v1+3/YIgUEgwKHNLZfL9lbOcioBDjZrNBqZjl6SdQ3CITAqxgUApyCW5WAcL45PbYuHw6G1BKaoEkaDsWWpHGYPNmM0GknSRTJSLpd1f39vz5vJZEzu0m63NZ/PrZ0u7NF2u7Vl7larpfl8bkW1rVbrQr5Sr9dtuX6xWKjf7yuOY9ukbz6fK5vNqlqtajgcGnjDfEpSrVbTfD43XXOj0VAul9Pj46OkU0IAg0sHqlarZYEX9utwOBhLMxwObaxIHLzU5OnpyZhbJBbD4VDFYlGTyUR/8Rd/ofl8rn6/r+vraw2HQ7Or6XRq3Zey2ax1TgqC82aq+Gs2mzVWCgYRGcLnn3+u29tb9Xo9mz+kJF6e42VSBGiug2xjNpuZVIAWtUhmfvjhB223W7169crsE7kEcoX5fK67uzvT3ufzeUVRZLUqSB0obMcvvKSBwI4MJIoi83XizmKx0GAwUKPR0GKxuJAr+OJnwBRQwv7ZZ4eECBujVkSSJbXU5gRBYKsckux+SGiZL+RZ+BAJYqlUssJiEhLmBIkLySLXY2yS4393JBiZYGSCkQlGJhj588ZIyK8/igEfY4WLwlC/VM6yqNdFM1EEMN4S/ZKeZ1UAFB4yjmN7eK//9JNDcGAg/AZ3BBaCLVpsnBWNNsFhuVxaS07ebll+3u12+vzzz401I4iypM1n5/O5bUiI3ON4PJoWe7PZqFarabfbaTQaqVarGXNIpyLGiSVZJAW73Wl39sPhYMEUBgpmAq2wdwoAgfEFvLPZrG3CR0DL5/PqdDoWPNiskmtgsBxor3O5nLUvJejB7ADCsKgwYp6Ngr3M5XKKosi+D8tFcM1ms2o2mwaijUZDYRhqvV7bc7daLVUqFQuEtVpNkqzgdjqd6urqypKXarVqkg0Ay2uHYaoAPQIhPtDpdDQajey5YVm4LswcGvHpdKr9fq9KpWIgR+vdw+FU/Alo4x+M2fX19YU2netJskSLACudQG4wGCiKIrVaLS2XS6s7APToZhTHsUk5sLnZbHaxYebj46OGw6Fevnx5IQcplUp6fHw0TTrJVbFYvGD7AOaXL1+q2+1qOBwaY7rdbnVzc6PJZGLP9PbtWwuqjUbDWDI2WJVkDPtut7OEhucgmHrwgH2D8YdZJB5lMhkDQzq7kXRIJ5aNxBVZTjabtSBOzEFGAqPmVxdgYwEymEbqZKbTqcbjscU9AJnYyXOn06d9Y0jwPKuMbxKDPHvpZW/EHC9587Kj6XSarHB9wJFgZIKRCUYmGJlg5KeBkdPp9M9bw+UHAFDmJgiETIj/HkbKQYDK5/OazWYW4DzQ+M/hwLBQMBm73c4MaLfbmZMSKGEQPRMBwzafz+13pVLJmEVYjXQ6bYWKkvTixQt1u13VajVFUaSHhwftdjuFYWiMEAH75uZGNzc3evv2rckqgiCw9q1cYzKZXOiT0c963SwBGVYCY0ilUnZtus7wZg+gE7TZP2U8HqvT6ZgTA2KHw0GtVkvtdlv9fl/dbtf09pyL9p/IEcIwNFaKhIAxJ+h5kO73+8Z8HQ4H0yMDwOl02uYSaQpJC4zaarUyloiWuoXCaY+PfD6vXq+n2WxmNQjL5dK6VW23W2M4aHHKNQh42WzW2EvmA3BqNBrK5/PGnJFM+GJK5pEi8t1up5cvX9qyfKVSUb/fVy6XU7fb1d3dndkiwBxFkdk+MiE05xS77nY7ey4SFJbcveTjl7/8pb7//ntlMhlrzcoczOdzY5aRA5DAeA0zevZms2lJGq1wqQHh2bE9pBeSzCZqtZry+bz5Z6/XM3vqdDqqVqt6fHy0zk0AHMFeOreVZt7oYrbf79Xtdm2cASAv28JvsL1cLndRO0Fck2SaeJKnxWJhdSFPT08qFAq6vr421pxkya8mSDKZinRucUucPB6PFzKN3W5nshNf4+ClaMRF33UslUpZkkPySv0CiQEgwfPzTNgLTCkgtd/vreg5OX78kWBkgpEJRiYYmWDkp4GRXtr87vGTN1WBycHA0QzDqAEOvL2n02lj43j75C0yl8sZUNTrdXv75GG8Fp1zERhhUtCAr9drC1j8fxiGJkVApw7gefYPEGMJGMDjfsrlsnVJGg6HWiwWtmTMjuzcB4aZyZzatyIHYKwAmuVyqVarZdeRZPp53up5Tt9JCADCaClW7Pf7ms1mxoARfDkmk4kmk4ktZVMoi74dVqRarVqQZOxIBJhDHAfZB+OJJAA9M/9mmZnuUSytMwc+mEsyrSxjQ2calps9cxxFkTk/OnrYDpjR6XSqKIpsPuv1uvr9vjKZjHWVQjPMH+yTwEUSQqvTZrNpQAUjhaMi31gsFhZc3759q9FoZAWprVbLGCqYXaQSBPlqtaparaZU6lSUmkqldHt7q9lsZq1bYTYBFjYBhUUjKev1ehd1H9JJCjMcDg3wAaxMJqPpdKogCHR9fW0+iL8cDgeNx2NNp1NLcmCAmH8Ca6VSseckaZBkTBWsFLr0w+Gg6+trY9qRsVSrVUkyuRPxBT+J49jAj32Ams2mxRFaaWezWXuWOI7NZ5BSIY96lyHHfpFHYF/D4VClUsliC3GEInrPmPFMu92p61ytVlMYhhYzCPD4Nv8HQGy3W81mM41GIwVBYJ3hiClslsk1fUE340usTqVSxrbzvMRWxpV4kxwfdiQYmWBkgpEJRiYY+Wlg5PskhR9lF0uCOowdxoGx8WYKkDDQ/O13nMZ5YOb8hDMwvFmiy2XpX5ItgXrAYJkegPfW7gAAIABJREFUw6Arjm9zy3Igb9oYJW/rLMkSfOv1usrlsvb7vUkWwjA0J9/v96b3haVcLpe6v7837TIyhE6nY8vhtOjFAAASgiyaVIoHYbhYRpdkjCHyFKQh0vntHE0wS9jIL3yQhRVAEw4jx9jDCvCMu93ONMiMP2NI/QHSABwxiiLNZjO7J6QAMIQsPz89PZkTYjPMNyweAXg2m1n3K8CQOfYbRbIUzvh64MY2SQCkc9ca5p3xnk6nmk6nNvfokpH6EBS5ZyQiu91O4/HYujFR5A2DFMex6aklWWco5D+tVssYR9hFwJIia8DS7wkDa+f9MJVKGYtHUkTAwueQKwEe2WxW3W7XdnGvVqt68+aNisWiyS5Wq5X5DYwb40aSud+fWkGPRiPNZjNtt1s1m01j/ACmzWZj4IOtSecCaQ8myEy8xCYMQ4VhqFwuZ7IIur/1+31tNpuL5BgWEHAj+Ws2mzanJDpovzOZjAG4TxZhYZEr4B/UG2DXfjWEYl0COKCH7ZGU0aAA5pSEb7PZWCcoWMg4js3XiasAB2O52WzsM8RjfN6vwCTHjz8SjEwwMsHIBCMTjPz5Y+Sf/YULA8VpCXI8GEECpsw/AJMO2NABZrlcWuADjHjT5G3TM2G8rRKAKADl/5AlfPvttxbkmRRkCAwkg8cEAiQUKLJ8WSye9huhew3G6JfIU6mUtbTNZrN6eHiw1q7pdFrdblftdtvYL54JzTuOQuCDzYTphNmEgYO9yufzxnD4OfBMFKDB2/p0OtXt7a0Fikqlov3+vA8D88V48Yw4MveAwSMPYH5Y+iUYPDw8aLlc6u7uzgKwvzcKLzFqnJD7ZkmYe9rtdhb8YXQAK5aRr66ujEXxoCjJAI+/e72eqtWq6vW6XZekZbVamQa40+mYpAG9OvYDc5jNnro31et11Wo1GzdYSrrrEADYWwQWFRZ5Pp9ruVzqq6++0uFw0NXVlclCfLBnbmjFm81mjXXGRznf559/bteN45OeH+01DI+fZ/yV5AMb3e/31m4XFp/uXSy94wf8jgQF/wX8sS/mnDFbLBYW5AEOxtnLqQBJ7IBOWNI5AZlMJnrz5o3Vq6zXa9OtU1xMMkmBNKsCyDIqlYo1EiBpggmEccPWiDsAmLcpalLwEc/Ke6Y0CAKTOjDPrFAg7WL/GnyXOEIzBUkGLIyXT/6wOeIGtkximBwffiQYmWBkgpEJRiYY+fPHyPcdH0VSGASBter0bAgBkyU+3iLt4qlz4TD6XbqCwKayfMobP8YFW8SgMhhMuH/jxLiy2dM+GezJwLImWm4YK4ycweReuXcYGkkXjBb3VCwWbTxo0RkEgRVcokePoshap/JZxgQduQ+gABjXYnmZ+/RGTpEiAdqzDBiLZ6no5LRerzWbzbTf71Wv123JlAAOGwNwIrHA8TBU2AXYTYI+MgO0zEhFcCSCIAydB7PD4dStZzQaGfCyNA6z6dv0Hg4HK47M5/NWTIl0AOegrqHf79tGkAAyn0XCMJlMTG4BAwLw0iK50+noxYsXiuPYpDuHw0G9Xk+lUsnGjvFgvGFiANJyuWzBqFQqWSIQx7FGo5G++uore3YYrna7rSAIDHQIlKVSSff395JkjO90OrXNF7F39nuBsfKSChg67AkQR3YTRZHq9brtteKDYaFw2qMEtgrfRi6A7QKcJFz7/d602fg7zwxQ+ETVt6GmVgEpA0w+AN5ut41tg20EZBln7hMfKRaL1u2J56Ad77v+ToLISgF/8ztiD/cryfTgdGDCJwAUGh1gczDLJK20/8U2YfWpxUH+wM/+WQESkhwSa+Ku9KcBJTn+8EgwMsHIBCMTjEww8tPAyPcd733hCoLg74Ig6P2pk8DA1et1m2wOf/OwMr8/9wV4wMbwdspDEsgYOP+Gie6cSZVkoIQz0F2GN1CWbQlYnr1hmdIzi9wfb+r+bZp7JWAweTCNLDN7pyEg0vkmDEP1+30rasaBcW7AEaaCoj8YKgIorF+xeNrPA7aBP7AH9XrdGCpJVhw4nU6NKcvlcuac0in4bDYbK1CF3UEewOcIRAA4P2OUhcJpP5fNZqPRaKQ4jo3dCcPQtOT7/f5Cz0uywf0RQFim5zOMD8yDZxe32611DmLDS0k2tr4jFYmIT1h8IIO1rdVqtnyPvKBcLuvp6cmcNwgCa2mLAyNfgJEhQSDAE3iZX9jcRqNhGnU2sWTZn0QEfTg6dkm6uroyYN9sNsaKAw6vX7+2ccK/KAgnMEkyH4VpQmteLpcteSJQMt/4GzawWCysOJs4wd4ldDjKZrMGuFEUWZJI7YcPjNgG9w5bFQSBgTBBWDrt+0NXLfyiUqmo0WhYhyyYUuYMYGQ8OfdkMrFCZnyK+yGOlEol676FXwFGrFLEcWxjRzLJH2yEgmZAlLhIIsyc+USFuMb4EdfwB0DKAwXfAxA9Mw/gJMf5SDAywUgpwcgEIxOMTDDyT2PkewX5x+Px/3vf7zkY5GKxaE7lgzVBhUH0kwJTgN4U3SdacknGuODoMHE8MEbEIGCMLAMul0sz8iA47X7NcjAD5CfIg4iki6XI/X5vzA/FhhgQzoVEwE8KLCYFusfjUbPZTFEU6fXr12bYfsL9RPM7jkKhYJ1bgiCwXbaXy6UBDsHI/xsNNiwGy9+whLBxMHzcZ6VS0fF4NC07+4xQ1OqlJ+l0WvP53DZ+ZNmea/rxQqPMEjtBDycgiGPQ/X7fkg1aG8MWY3OwFYVCwZbW+/2+BQDGgOsw9iQdsE0kIPwO1sQzuZwHoGM/kyiKbDNCukRFUaROp3PRFQtQhJ3ygPP09GSgwE7ubPAIGNIR6ff+aiw5QbJUKimfz+t3v/ud5vO5adrxwW63aywf34UJJUFB+uKZHmRKaNozmYx1o0qnT3voAEbYGQkfMgKfuLAnDP4PS7bb7WyD0eVyqWazqc1mY7ZN/IEJZZNQZCqANIwfHacYuyAILMnwNoxcgqSMQnzkELR/zWQyJqHy8QrpF7bD+ZknAIKY4JMKYhp2zgoJgR4b90BD4gSYAPqSLmRr0uUKCM/D7/wc+8J0fDQ5Lo8EIxOMTDAywcgEIxOM/DEY+VEkhQwAwUI6d3UBRGCtGFAGjLdWQIa3bwYCY+HNlTdW3pC9htUvm0qywlQCNODFuVnKxFkYTK53PJ5acvrBZjmdQC7J9jdg6ZPlXcaAfU54S2dZM45jffPNN6YVJfACUjgSn+dZAErGoFQqGZMQx+f9Q9AQ06VlMpnYfWGIx+NRzWZT5XLZ5oC2v7vdzsaQ74xGIwM23x4Wg8P5FouFgQw/o5OG5SwUChYIuUan01Gz2dRqtbI54Br5/GkHd6Qc9Xrd7gsWEpDHYaMo0nA4tPqCWq2mZrNpvyfokkjQDQo2BjYYFtJLP/iO19xLsk0r6RJVr9dtrAE6NOiSTKbDcvu7xZlep04iBWhPp1PbM4QkIZc7dWCiSPTp6cmedzQaWXEr991oNKyQni5V+AfsLMlNFEVaLBYmyyAZWS6XxqABwsgMSGJg62GaCN7IFGD4YeVpe0288LIgGFv8lzkjEcCP0a03Gg1J0tPTk7HdjCtjyr3A7kmyuEWM8R3VkN+gXyfu8RmA3DOg3Bcxxce+/X5vwIy98EycF2CBUcafGBv+xj75QwJEzGSOuA9Ahet69s9/Nzk+/EgwMsHIBCMTjEwwMsHIj9JyioEnmPklbAI1IMGN8Tb/7j4GGFY+nzdnZtkT52RwPdPFxGCgfunRXzOXO+3tsVgsFIahMUSeBcRpYQLH47E95+Fw6vDT6/UUx7G1mIUFpBMPh++aw9s4z5ROpy1grtdrTadTffbZZ7aUWyqV1G63tdvtDHT7/b4VN8IQZLNZvX37Vjc3NwaA7B6Oht0boCQDPc8IwnZ6nfd8Ptdnn31my7s41Gg0UhiGF86LRhoHgjVFhoAMgiDJ3LFEz7i8y5QxL1wbJ97tdgYEcRyrWq3aErSffzobrVariw0bKbrudDrGHDPnADWbQxK8uUfAhiDAc8Kq3d7emr78/v7+AiAI/B5EYEYKhYJpymGquHecu1qt2lxiR55NpLvTdru1rjy+G5gk00ljI5wHX0YjzRgRlEh8guCkgYflw/6DILC9Lfjdcrm0vT1Y9geksCt03TzfeDw2FhO7wx4BEc+CAQYkV9gjic5ut7NrHI9H9ft9e7btdnvBfrOhJiAFw8ymoTw7Mii6g9HRzK8GMJcAFysV+L2PHd6+fHvkw+FgduL/j7hAXEKihG3yfNwbrCIA4lcLYL+JA5wP28KfeBlIjg87EoxMMDLByAQjE4z8+WPk+46fvMLFgKDN9O1oCe48LINKIIRtwIiks9bdM31+Kd0PAi1iuRYBi9/DmGG0vmCPQMAkMZAEJpyLZX0c7Hg86vb2Vv1+35aW2RxROhX03d/fWwEqoOQZwlqtpmz2tJt7u91WpVJRtVq1NrDtdtucgXElWHHPXictyZiv6XSq4XCoMAw1mUw0Ho+Vy53a/NKpB62zB2b0xdvt9iL4B0Fg7BCdhdhgkMANyONAxWLRWBfYHYy1VqtZMTaMHwE8juOLgkkAL5s9da0BVEqlkjlkOp22ccNusAcYv3K5bIwqS9gAOVILzyr7+b67u9PhcDBdM9dnHmDvjsejLf3zfE9PTyYdoXYAn2AefE2DT6oAdj/3SBQmk4ldv1QqqdFoGCOXyWQsqaNtrHTuUAQzXa/XFYahoiiyInD8BH9er9fW0Qv5Sb1eN6ABmABfzxwCPF7WlM1mbQ8NnhNQKRaL5guMC+MgyXT7+CtzASMKsBKUYfmoReH6lUpFqVTKnov7pgB3vz/XgxAHYP/YrJRVgul0amMLaLFHyXK5tKSVBJff+ZgpyVYxKCrnuYibrDp4dpOkBT8kbvmVDZI0YioMsHTeWNLHO9h+4oKPwfz9pwAlOf7wSDAywcgEIxOMTDDy08DI961yfRRJITeC0aO1JCgQWDDmdxk1Bi+TyVjAQDvKA+H4DA4sgmd24vhcjBwEwcWbr2fpPIOE4WI8nBtGgLdrgBKmYbFYqFKpmAY5kzl1arm6ujLdOcueGAKGg0N2u13T0t/c3KhYLGo0GhnQzOdz3d3dablc2u7sqVRKYRja98IwNDaRMYZhWK/Xqlar6na76vV6tnEggQ2nxwELhYIxK3RwIkDH8XlvAs+YHo+nwk863MCwSTLHZW44JwGUMcOpuB8cNJU67c1AYTDMMP9GZ75YLDQej7XbnfbsIGnh8zgv+8yk02kLDMgssEUkCLAoQRCo0WhcMHvUKwAMzAvP3263dXV1ZeD76tUrhWGoXq+nzWZjbWIJEj7QwPaQFMH2sjs77N58PlelUrnQKmNfYRgaA0XHJAIKiQy2OZ/PbS7wH5g2zwoiKcAnNpuNMepIBQASmC4SPdhGgiqJHn9T7+HZJ+LGbrfT/f290um0SYeokeBzxBRsgda62Cf2zFgAMGjvuVeCKoHeJ7okAJvNxqQwSF6oZSFWEXi9lIFEFHDwIEvsk3RR8wLbhn+QfDFf/rsAA4eXkBEjsfF32Th81McCP+/Y14+RTCTHHx4JRiYYmWBkgpHMWYKRP2+MfN/xUV64kCDAsOFski52jobF8iwbxoBjck4GHjYQA+DzMAoYNuwDLB6BiUFiqdEXCrNkzyaCMEbH46lTDPINro1DsefEbrdTq9Wyc08mE7XbbSsAxVny+byNRbVatRanABUBFiNik8tKpaLFYqHb21sdj6ed1AmajB1ORIBstVrW8vbVq1fWthYjJCDyXKvVSk9PT5rNZgb+bMI3m80MyAjmgDJ/arWaBc7ZbGZzy74HtPCEsUGf7fW2+Xxe8/lc6/VaqVTKmF6YD5wH+6CA83A4FY4/PT3ZXKbTaetSBCOKUwKAURSpXC6r0WioXq9fBHS/MR5sI+1zSQgICDxHrVazsc1kMjZmnU5HURTp+vrapBOAJAEYW2cujsej6ZpJfPxmj5lMxmwOtkySnQ+fAQB7vZ6ur69NfoKkJAgC20AUAAGwcrncRV0GgZjP8oyeMaJ9MddhPAjA+XzextoztfgwDJpvf4wv7HY71et1VSoVtVqtP7gvWhYjlUBmBauGrxLc6VJFjPGSAYIpoADLDHt4PB7Nt9m1njgDkwxjjL3gp/gQMRAfBnCYY2IpiR3yB7+qIMlAilbI+Ajx7ng8WuLgmXqYPB8f+ZkY6pNr5pmYnhwfdiQYmWBkgpEJRiYYmWDkR5MUspyNsRF4/ZKfD9YEBgKxf+M9HA5m9FEUXTwkD10qlazlKQDxbktIAhiOydsnEgSMiME7HA4XhYSwE3QP8svZ6NLX67VJAAaDgTkyAREnoi3o4XDq7uQZNMaRJWkC1eFwsK427JXhmQbkIldXV8b8wEhIsqVpAr1nNAj6nJMd7aVTAbPX3wKOsH+wlrS93W5PnbXQQMMopFIpa7cax/EFU8RyNsGCOUA7ThE58wX4w8gxjvv93tiwxWJhQQwA4dx+eXgymZid0q4YPTfsKLYJC4htMX6cE6aOlsjsIXI8nrTV+/1e4/H4IpFip3qCQDp97pZEUCOwMpfYEucBcNipHpmDt2NAEp8hAQCgCW7z+VyPj492Ds96E+hg2GBUYeoANcaQgIONEqhgakulkiU99XrdlvQpwgUcYacpSI+iyFhB9gja7/f2/MgD0um0tRRuNBrq9Xoql8vmN+Px2BIYDn9dzsk8Y0vH49FYc0AZwCb++ASJ+8GGJV1IdRgXYqekC1mMZ1gp1mbuYNM8yBBrScr5m3hAkiadW/pyf9wvYAUzGMfnVsHcKyxjcvz4I8HIBCMTjEwwMsHITwMj37fK9ZPR83g8ms4zCAIbXIwfo/MDQZGq15cSjDabjbFXME+0WAVUcECcAQCCPeGa/J5AxNKsfzsFODA2ZB71el3r9VqPj48ql8uq1+u2PN9sNjUajdRoNDQcDm2PAtgGv3M50gikIHTUORwOur6+1uvXr+28vV7PurkQjAuFgrrdrr2Jt9ttVatVNZtN3d/fm1wC46EA+XA4WMFzLpezzxyPR33//ffm0KPRyJipRqNh9438AelKtVrV9fX1hXSDZeU4jo3tYQ4IatnsZYE158SZ6LTkl+IJXHF83tsjl8sZI5JOp21vFpITnAMpDC2T2T+FwmFsI4oidbtdA1m03wD34XCqO5hOp/r++++N+YD5DIJAtVpNu93OluFJAkajker1uqrVqhaLhR4eHvTy5UsLyiQK2B41AYVCQePx2HzA66SpSUDyAQvG/JPAcT46HZGsEHxh1rGRzz//3PTpsIdv3761RMPPJ8lKuVw2hgjGFh88HA4aDAbabrfq9XqaTqcmIxiPx0qlUjZeLOMvFgsdj0erISARGwwGyuVyajabyuVyxjJjBwRnNkZlo1CkC4BUPp9Xu91WFEU2dzCfJBmwgbDg6MBZHchms8Zmf/HFF3r79q3FlVqtptVqZcGXuhVAEAY4n8/b3Esy4GXlAADKZDLG4PlxguGG/fTSMPyR50YmQTLBZ4mhJAGABSBBLCJ+AmgAUvLC9eFHgpEJRiYYmWBkgpGfBka+7/jJ6ImTLRYLeytmwnB0dM3H49EKPiUZs8KyOv9mEDBilghh/xhIlvq85lmSJpOJOVu9XtdoNJIk68iSSqWs+A4mjyCPLrXT6Vh7W34fhqHS6bS192QvEVrEeoedzWY2gQ8PDxfOh56bXcyRHbBkTNDjXoMg0Gw2s70oFouFer2etR3NZrP2HDApsGTD4VClUkmFQkGtVktRFJkhSbIlX3S6OJGXihQKp/0Unj9/bgwOIAGAMU6wODA2m83G7pu5Yl8KGB4SBq+d9jbg7QSpATpt9OCHw0GVSkVhGBqTSfvQ+Xyu0WhkY8Jyfzqd1s3NjZ6enmwZmWJbHJLnAazYZ4L/f/v2rdrttlqtlhVfw/jC8jDWFLI+PDxYAIzj2AqMmXPYMTZrJNgByGz4OJvNLphSAtJoNLKxI+mCWfIMNYkBDDS2XSwWjcGCjeQ8BDgPVMy/b9eLpGWxWKjb7do8TKdTK3qH2abLFPeUTqc1mUxOAer3IMk4hGForFocn/b6QFpDTICp2u12Fww9sQUmar1eq9VqmU0Qr5Ag7HY7XV1dqdfrmYSoXC7r7u7O5hWglHTRGIA6hP1+r4eHB7NP9nchCSZ2Sef9jqrVqgaDgTGoXtYBUHkGmX+TjCFRYr8bZBUw1iQUMOT+JSqdTl8AHjaK/76PvUuOP34kGJlgZIKRCUYmGJlg5EehK+M4NvZhNpvZEuN+v7dJZiJxSoyOgMkDEkx4o2ewCEBcj+9JZ8NmczUmh7dgAjJBF6kCnWQ6nY5t/ocDwTx2u11JMh20JNtxm+Xs0Wikv/qrv1KlUtF8PtfLly+12+1s74/ZbGbnxODL5bJub28Vx7Ha7bYymYx++OEH5XI51et1Y9FwYByC693c3NiSJuAGm8GGlQQ+jIsNNykYRt4Bc4d2ljECZCgwpRC1XC5rNptpvV6rVCpZNxuWv2EHYGuLxaIFkqurK6VSKWNmGUevCUbqApO3251qCADUer2u6XRqumRYhuPx1O53OBxeaLhhGZGA0Lno7u5Ov/jFL4wZ5vsENYJSuVw2lhEJBCBZLBb1+vVrG9M4jvXq1StNp1MLdt1u1xgzz55gz/gJtoyMwncbAkikU6BiM83dbqcvvvjiQk6EXINAQY0HNQfo6ak7ADxgSAlMSEW4brPZtG5ctVrNwGk2m9m9p1IpdTod89mbmxvVajXNZjNjuCmY5/y5XE7D4dDmErkUMhBkLchYkNP0ej1NJhP98MMPSqVSevXqlSVnSByIF5vNxgAf+ZRvndzv9w3IHx4eDASJLa1WyxjOKIo0n891dXWlRqOh1WplXbuYX+o8kC6k06dC8+l0qmazqXw+b53LkJ0gbYqiyPwBkMOOOQBH2FWSL88EY1uACD5J7Q1Ay9wBKh6QYf58YpscH34kGJlgZIKRCUYmGPnzx8j3Hekvv/zyxyHG/3D8y7/8y5ftdlupVMpaQOIAGLtnYGDCCJRoMjF2ltN5+4fNQk/r9ZcYOYV6gBL/xz4BaF09mB2PR1uSZvJZJqxUKppOpyabgA3c7U4dfl6+fHmxrDufzyVJnU5Ht7e3Fpzp1AQTBKBQiNrv9xVFkVqtlgX8crmsMAztvBgHXWwAPWQTaLRhljKZjAV1v1wahqEOh1N3KeQdvNHjoEEQ2IaK6/Va3W7XJCx8niVzAiPaXK8ZR0tMQgHTOZlMTD5Sr9dt+TqVStlmiyQC0ondRcM/m80uQG65XFpAfHh4MFbDL1lTu8ByPg5TqVS02+30n//5n5pOp/rVr35l9pfL5WwfC9gwmKP1em1MFsvf3O/xeNock+DARoqHw0HPnz+3+4rj047xALFPEghwy+XSwAiGC9uMokij0cg218QO0DBj9yQugCfnyuVyVryL3eKPJFjeJxqNhgWR0WhkNQIAXq1Wk3TeAwXmko0tCbYEKcZ0t9vZ+aifIAllA9AoilSpVPTXf/3XWi6Xxp4T4I7Ho/7t3/5Ng8FA3W7XCp/L5bJ1QqP9M/aKbT4+PpqvsyEqyZJ0qu3I5XJ6/fq17UfE+KfTaXW7Xd3e3qpUKunFixf2DIz3eDw2WRNsNmOAHQGKjB3F08w13wEYWfnwyRPxFnaPWgfADfadZInYCEvHOPJdr5sHUGCWpRO7Nx6P77/88sv/+5OA4xM6EoxMMDLByAQjE4z8NDBysVjoH/7hH/7PH8OCj1LDhWGm02nTneM8FLtJ5176LAsTgNCf87aI3ILACIjwpslgMkC+WBM2AsNgCZGl/TAMLZjwhkyQZt8EgCKXyymKIis8pjsMgWw6narX6+lwOBiTV6/XzYAxFNhImAo004PBwJb4WXIFtKrVqulsARMCHEu63W7XPlOv1zWbzUwmQtEnb+o8PwEQeQIyjdevX+t4PBq7M5vNDERHo5Et4RMsbm5urHAYvTrzSBF3JnPqYERNADvZH49HSxL4vi/oxpCLxeLFfiYYPEv3LPezzwqsDKzYuwXijUbD5iKdTuuXv/ylbm9vdXd3py+++ELr9dqYouFwaIkP7CJL+8zjfD635AHpzWw2s2JpHHs8HmswGJjt0s1osVhYUEAClM/nbal7Op0ay0WStdvtrEsUPoRGvNls2p4sdD1Cb0wnJxg9mHFJ5h905kFC4BMwfC0Igot9Zfb7vTqdjjF/URRZ62bqDDabU2vp5XJpkiXmC7+C/ZxMJioUTht3djodG38CKvfS6XT09ddfaz6fq1arWeA/HA42l7BTdEY7Ho8aDAbWNhdpA6w29RQUvQMaj4+PJrfh3vCf7fa0Dw0JNONJQrdarQxE8VUYeZIwWLXBYGDyED672+3s37DL0pmVg2HH1kjuSE6QSMHoSrI4il15aYm/lk/aWV3hHMnx448EIxOMTDAywcgEIxOM/GiSwkKhYJrhavW0CzZGC0ORy+VsiXS/39uSKHpiGC1ABQYKrSRG788L0MBc8PZdLBbN8ZFHsIR7OBzMKGD40JWjMZdObEWz2bSiy+l0qlKppNFopFqtpiiKzFkxlkqloru7Ow0GA9OWFotFY3bQ8rML/eFwKjz95ptvbJk6nz+3psXgkXHEcWwsF8ELYIOlK5fLmk6nF+wXrUDRIOPQBHE0zzgZgJLJZPT8+XPbDBN2r9FoWHcX2NNms6lSqaTxeGxgjEwDmch8PlcQBBqPx1qtVgZ0sDMsIx+PR3U6HdPmc1/YQz6fNwkGS9O5XM6SGZwMRhHn4dzlctmYlR9++EFXV1fGMnqGKZPJmKwAW4PVkE7sFXKWp6cnlctlFQoFvXr1St9+++2FphsWkDoKNNaTycQY2NFoZNp6DyIeFGGpK5WKsXHL5dKkL94nGS+YYKQSo9HIEjISKALOeDy+0M0DJH5fGj642nTVAAAgAElEQVS/252KSpvNpjFkgONut7MkajAYqFQqGasPIMPuEiNgK+mqNJlMLLD3+31VKhU9f/5cq9XKuills1lNp1Mbx5ubG6XTaevIRS1KGIZ68eKF1Yjc3NyYPcKO00kOwMjn82q1WtbpCWDgWQFkdP08N2NLknE8Hi3pkmQJ3d3dnUql0oUdsVJBYwSCfLF42tTVy6h8YwXiIbEykzl3UuJnrkGSzXl8QoONkdhg71wnOT78SDAywcgEIxOMTDDy54+R75Pd/+S28AyM14KzDIdTwijx/0gaYLcoqiMAMThoq9FdM7AsM1K8y1ssGvZKpXLBUuRyuQv2K45jK+bFyWEHMBAKHLfbrbFXtCWFrSuXy/ruu+8kydiaYrGoTqdje4PwzARAloM9s4JDRlFkgALDB5vJMi2Tjw53t9tpOByq0WjoeDzt9fD4+GiBj+D4+PhohZc4AHPV7/dtmXm/PxUw8n2Yjslkot/85jf2f5lMRvf398ZMPTw86M2bN3p8fLRgTnvS1Wql4XBozkZBLqCezWatqw57f1xfXxsrt1gsDKiKxdOGfm/fvtV4PNZyubT2wyQSJC2bzUb39/cWpBg/zjsYDGxp/9e//vUFkwaDCuuEFj6TyVh7Vhybnd8BoHK5bM+63W5VKpWsZgB9+36/V7vdNjBtNpuqVqsGNGEY6ubmRrlcTnd3d5pMJsbmUYAsSY+Pj5JkDDPXhH2DIfLFqD7IwqjToQomieV06hAmk4lJDPBPlvABdgIe491sNg3MKc4mSdztdpZM4KPYgAcrmF9YcQDmt7/9rYLg1AULhrBYLJp9MK+wdg8PD/p//+//qdvtGisXBKeOWWyoyn4sYRgqCAIDAnzshx9+0Gg0uuhShk2m02nd3t6aNpygS3E9fl2tVu2ZgiCwfV2Gw6HK5bLJZVKplM0Pc8dzkhSSLKBLR8JCwkQba1Yj8A1sA/BhTjw4etaQGA8wJceHHwlGJhiZYGSCkQlG/vwx8n3HR3nhotMIgcKzLPv93pxOkr1xl0olW46DQUBXzIAQiPk+7BoPDLtBIRvBHOceDocXGtB0Om0dT2AWJJlGlmVDnAkHbTQaprcGMFlKfvbsmVarle7v7zUajYzhe/78ubrdrgqFgj3T999/b8/QbDYvJB/IApbLpR4eHnR3d2fFi8hLVquV6Z4xLJa9WVaXpLdv3xqjQoCBtcHQ9vu9FR8ioahUKhbkl8ulXr16Zc+K9OXp6UlxHCuKIg0GAwNKWITD4dTydDQaGUOKcTKGbMRHYvH555/r2bNn9iycC1a01+sZU8Gy8nQ6VbFYtIDb6XRs7mj7iyOSAOAYSDWwGVjWt2/fqtVqaTKZKJ/P23I39w9bxnzBekVRZHIdHFs6FY63Wi0DNa7rl8JJbpB2wCqS2MKMwnLV63WT6LCrPInwcrm0ZX8YF+Q6Dw8PWq/XGgwGdn3un2diyT8IAptvitO5NwJiq9Wy+cDPdrtz9y+eG2BYr9cajUaWAAHKMHmAWKVS0X6/V6PRUC6X02effaZU6lRA3+12Va/X9d///d/GZvtWxuzHg4yCehCYZ+ncPhYw/uKLLyw2kXAQuwACEtl+v6/pdGoxbjgc6vvvv9fXX39trbHZv2a73doqBDp/5CiMCVKJVqulTqcjScYsAtTcLzEAEIb99OCLTIrifUl2fWpTuK8gCCxRJaEgjkrnomCkb7B+nDc5PuxIMDLByAQjE4xMMPLnj5G8uP2x46O0hWepF0P0bNvxeLSlT252Pp8bwwbLQbCEtWOAJFlwY4IZHK+bJSAAaMgKKF7lbRe2qF6v24DCcvj2osgf2FxyNptdLCsidWADOZb/YR0IELwFE5QIgqlUysACZ81kMtbqs1AoKAxDYzg4J+CChno8Htv+CcViUY1GQ2EY6s2bN+p2uyZRSKVSF4GdcZvNZrq6ujKWCD1qtVrVmzdvNBgM9Dd/8zeKokiNRkP9ft+eA/kHLAJFpcwtwIF0YTAY6Pr62sZkt9tZjYIk0zfncjnrikRg3m63pldmXtm7IY5jmweciGBDsKJbEMwI11+tVspms7q6utJ3332nTqejer2u4/Foz0Vik81mL9hb6RSc0um0aX4JuKvVytjQ8XhshbowKXRRYqyZ3+PxaIkW0pp2u21doPhsoVCwJf4gCCywc+3VamVSFuYKliaOTxtgUqyOH8LqwE4RVNLptBXIEkT9Mn0qlTJpDmCOFh1JEnvvUAgOe0Wis1qtLFHkfhjrp6cn03v/8MMPVtROgTjPT8B9/fq1vvnmG/3t3/6tvvnmG2OEj8eTvrvT6ZgM5sWLF6rVanp6erLgDAg9Pj7qeDyq2+1quVzq5cuXarfbSqfTZpepVMqSxmazaawyKxS1Ws3uVZIVA5Pk+cQahvL29lb1et0kWaw6IJdBe+/ZU4q+CfjMDUkOcc6zcCQcfIc5ITYQ74hpyEWS48OOBCMTjEwwMsHIBCM/DYyEqPhjx0ep4cKZKBrjjRfHwdHQfQJA2expgz46oDBgFDvypouunDdynJ2HRXvp34xhHihwhWHg2jAbBETfsYZgydv8YrGwVqi53Kk9J4ZAAKE9J7uDM+EUkwKW6PdJXrLZU7cm9ONovsvl087fvMFHUXSx5Nztdu1eYUmy2awmk4mCINCzZ89MroGzAkQwTKVSySQKdHKirezt7a12u1M7VViXzWajTqdjwEQhZqFQuJAB+JoCpDTNZlO1Ws305pLU6/UsgOFszM9gMLioHSApKRaLqtVqtsxMNyE66zB3hULBCm494yvJWMt2u21L4tjVr3/9a/393/+9sbSPj49mxzBeJDG0Mubc6Ln5bKlU0nw+NwAjaeE7+AUBDPYTFg3NPfMnSe12W3d3d5JO7PWzZ89sE0lYXFgwvkMRO0H5+fPn+o//+A9L0ABvZBvs/wLbWyqVrGg9mz1tasl38UOuD0M+m81sPvBREjn/nN1u16RDzCcsNEnlfr83CVAcx8aYIymAoWdse72evv76axUKBV1dXanf718ETupNmF8SMnTnJF1IF2CFb25uVCqV9Pj4aHEslTpvfIisg7kmycU+iVMkMbTcZayxGWox1uu1JZ4k4J5BA8SxIVYg0K8z9sQowJaEmfhLbQCHZ4D52TddSI4PPxKMTDAywcgEIxOM/Plj5J/1hQsHQ5PsCx6RJvggmE6njX2CSWB5kN/FcWxMB7pXX9xGUMEoYY4AksViYcu8y+XSllWZkP3+1GcfWQWBjEmAedzv9zbJ/I6OL+v1efPCVqtl3yGo0TUJVhEJCcWNFBN3Oh1b5gawJpOJ7u7uTP+KpOHq6krH40lrSwFqvV43R0f3zfVms5lubm5M1oBUghaYMKoATDp93kF8tVrZ9abTqa6vr+3cOFmn07Ei0XT61AHm9evX5mAwC8gX0CCHYWhFw+jtYQ0BWBhU5lU6b8p5OBysSxEJwPF41GQyMYaYa3Nu5hBbg7nk2bGT4XCof//3f9ff/d3fWSChSBUmmOV7pA6SjB0GnLhfdNMAMCBFogKzXC6XbRyxdZ5/MploszltKsrzsrwNy4t9zedzbTYbSzZgoAgmPBMyErr+kGgwxmi9CcLMKX7ipT4c1Fdwb74WBUCTZHECcEBDT5CD6ZzNZibT2O/36vV6NpcwgiR1JGGLxUKtVkvr9do07J999plGo5GWy6XZKC2d37x5Ywwy++wwz8wB0phCoWCJL2wi84lsg+ci4BOE8bdarWY2xPOj10fTT9wEcBkT7NbX8lSrVS2XS2Nive8B3MQqkmHqW7y/IXfz/sf1Ye08SCXHjz8SjEwwMsHIBCOlBCM/dYz8KJJCjIs3Pem8JO/f4FlmpGCTz/i3QgaJg5asPARGTPCDSYFVqVar6vf71pmIzxP00FmWy2UzUu6PScKxfDcfimm5FnIM3uAx4NVqZfKEzWZjhaAU5G63W2WzWQ0GA223W7169cre3ne7nR4fH7Xf7619KROJvIOCwkzm1DKVLi43NzdKpVJqNBp6eHjQcDjUF198ofl8bm/1vm4AjTAshCT7eTAY6NmzZyZfOBwOGo/H2u12ms/n6na7ajabVqxdqVQM6MfjsQUugg/zBxMH49BoNIzRepeJYFkXR8UZPUOYSqVs/wiAWDqxWsw/duWXmZEyALCbzbk9LO1Nv/32W11dXVnbVp9UeMc7Ho/2rOyZgn3DHmLf2A1AB6MryQACG4elYfPGw+GgMAxt741Wq2UdfUhioijSV199pUajYWOJbAf/pM0xdgnzCtu0XC7Nlwh0fp64P5bQGQ//HGjlsS8SOVjYVCpliQ0tXGFnGc+npydLBmAzy+WyHh8fDXDRgTPvfk5evHihIAj0/fffK5VKqd1uW/JEMvbFF18ojmPrsEQBLqCHhCCTObVupsPTfr/XdDpVtVpVEAQWL5CoMA60wZZkUofFYmHSDor6gyCw8xGr8CeeCx9FEiHJGHlsE/BDehPHp6YMy+XSVi7wIVhEDkAPG+T5SYxIFnme5PjxR4KRCUYmGJlgZIKRnwZGjsfj/xELfjJdicNjeCx7wmwwwLAxGCGTwu9wIN7mYSEkmWFjiDgvnYt460d3C+NBEOAtFUkE4Lff760QF2cjaGHUAASDu1qtrA0n8ofD4WD7PKCRhrXxGu5Wq2XBBJDlmQeDgd68eaMwDG0jvDg+dYqCASHIsuzZarV0c3NjXWZgjpBgMB48H+wRzwMQM19IF1qtlkkbCBi0v6WQOQjOBY3T6VT9fl+TycRkEdVq9UImw/wRNHyR5mKxsLoGDB6WkRayzLskCyywTgRB7hvmlPHCxrycBlaC7kokGqlUSs+ePTMdfhCc9tTAZgBigr1njBkrlsPpikOggc0jYYKdXa1WVqjrGcH/n70327HsOq61x+77vt/ZVCZZRYqUdWFBgC/PheG3kgEeWH6J8zT+b2zAsGRLBE2yWF1WNrvv+/6/2P4i56YpmmVSN6y1gAJZlZk715ozZoxYI0ZE4HxwkK4eG0Bj8CEp9ng8bt3GJJk9HA6PBeTIGdBtZzKZE1vAedzf3+v169cnmmX2krVn1ggOEm08z8jaJZNJm60CMzSbzU7YVb/fb4wwv4uCXr/fr1evXplEh6CEswaThz6eYK9UKqnb7Wq1WlnB7mQyMaYWOw8GgyZhcO+PIMqtGchkMprNZvriiy+sMxXO2mX1Oad+/2NXOs4vEq1A4NjpijOOr2O9KaQ/HA6WycAXcb6xK9aSbAaSC4AI0AfAAGHu12Vj+Tr+CUbPDfS964ddHkZ6GOlhpIeRHka+HxjpSg+/ff0kNVwYDkWBFBdy6NBvcpgpGMZxuG/JsHmwgPTux+GhqZRkiwEzst/vTdMLG8tMDEAul8uZXpg3ZUnm8F2GAjDEyZJqjkaP8x06nY5pktfrtbWCde+TTQqFji0yMXgYAdgEd9hct9tVNpuVJGu1i0whEAhY4SVv59fX16Yv73Q6Buo4IPbF7/dbVxn+DmMCuxoKhWxY5XA41GKxsA5IOAXWLplMqt/v20EB/GezmWnPAQUkJgAD6W6fz6dSqWS/HxYLx8egOpgQnHggcJx30m63zflhB5KMhSCFTlCw2WyMdQGsAHbp2JIWrfXDw4Oq1aoNo8RZcLgAEZe5woZhPRaL46BLdN5IA2BK2dvFYmG1FN8OYpBTMA9jv9/rzZs3evbsme7v71UsFo3FOT8/Vzqdtg5knL9cLqflcql0Oq35fG4aa7Toh8PBzhkBDADKjBcAibS5z+ezfeQ+OdN0GpvNZiZLgZGCjQLk0YRTgM0gVXwATGen01GhUFCj0dB2u1U6ndZoNFIoFFKpVDI7xMZ9vmOxLXNbGO54dnam/X6v4XCo8XhstkSwGQ6HT5w3bXX5+nQ6tba42CQzclarlQ0pBfSwV4Jm9jAYPNZO7Pd785vT6VS9Xu+k8xbBMD4Oto71wQdyP9gmPpmaH9Ye0ACQ+F4+g+9z5RaSrHuTd7375WGkh5EeRnoY6WHkzx8jeSH7rutHZ7j4cH45i+syZrwFwuSxKJIspe4yAnR7wSGTtoMN2u+PrUU5vNzHbDazwkBYvXQ6bW/IOBo+w2XrkDe4xYW8ObvtWdl0Cmdpg8rbP/IRN3W/2z12ToLpGI/HlqaezWZ69uyZYrGYdW8BdGBzXAaA+yctjzP0+XwqFouSjk6MTjmAlJvyJrXK2rHGAE48Hle73dZyuTTnH4lE1Ol09OLFC9NVA8gMk0ylUspms9aBZ7c7Dgd88uSJstmsGSzacpgEgAOJi5suJrXLQefQ+P1+mxDOoEycPSwOuniXzeIQRSIR1et1O2g45OFwaMwazFi73dZgMDDpAoAKYCYSCZVKJQNLCnNh8wAwVzZC/QSyHxgrv9+v0Whkkg/Ah89er9caDAa6u7vTarWyAutIJGK/T5IFEXQHogsVzmE6nVp9Ag4H4OF8xuNx2yPAE5ZvvV4rm82qVqtZoTgFtzhbpCsUEQ+HQ5M1IU9gLTebjQaDgcLhsLHMyCpWq5XdF0EP4AfIw5gTePp8PpMOTadT/elPf9Jms9HV1ZWKxaLJOhjYyiwfzhtsMYwtdkIHqVwup4uLC2PFJ5PJSYclbM6VNMD2E2gfDkfdN0xcr9ez8wvzRsCCHh4AYM9gXgEfAhF8GIEZvs71fa7EjYvADF9NpgO/6l3vdnkY6WGkh5EeRnoY+X5g5Pe9cP0kGS70xxx6CuZI8/EWSxoVZgVnAnMTDAZNNy7JwEl6dHiutleSHU7eOF3HTjqVtK5riGhQ3VQy6VKAhWGCOFQ0mn6/3wqKMTDkATAPgUDANpMiYNgYF1BJ4/K1ZDJpDAHdXGA20bNSvMp6YEDr9dpazrZaLc1mM9PiI5/4droVsIYdSKfTymaz/20yO4d3v9/r7du3isViOjs7s3WCsYANI9XMYEzWgRQ6TACHD3sBwAFkUsAAL8yW250K50gwQqobpwCTHAg8zivBWQD+FFTSajUQOLailR4HMaLzJlAALGEcYa0oTIZFXSwWOjs7M4CNRCLmeGCG2WNqAehERGE7BdBuEDEYDPTxxx+btpmCZHfeDA4DW8BeCoWCgTZOajweK5fLKZlMmozIlX5wv7Cr8Xj8hN3C8fA92CmAn8vljC2UZMEIPgD/AZuJnYbDYbVaLaXTabMb5Dc4cs4j526/P866Qc//wQcfWPvl/X5vASL7Pp/PzUFnMhl1u13TtRMQofVOpVJqt9sGmO6/w37C2GPHOHDOLP8G6x0IBOzfkVyxHm4Nh1vvgE1ixy5bD0AAPPg5V95G1mS3O+r82Q/uXToGX8g6/icw8a4/f3kY6WGkh5EeRnoY+fPHyO+7fvQLF6l2DNaVRsTj8ZNCWgybN1aYKRgoDJcFAIhwCrBa6HRhm1gk2AeXwej3+8pmsyYxcFk20pT7/V6j0cgWH2OXHluwUvjKm7DbeYVUOo4BY0bXDesCSDH4DceOHODs7MwMHBkHrBUGAVhz2HBwGBCHzDXQ4XCo9XqtTCZjzj4UChkQoLmFaeFnarWa9vvH7i/r9VrpdFpXV1dWKBsOh02KglaWA4rTcQulKWZ+eHhQrVYzFpYLo+cZMH73MAIyru2hM14sFlYczAyPSCSiSqVijBCHBafEIYEhAbxms5nVDJTLZQ0GA9sL2qLiVN16AwCQ4BS7dnXaLoDA1OTzeU2nU93c3Bjj6qbk+Uycm3voh8Oh2TgBDc+4Xq8N4Pg82EDkMsvlUqPRSOfn51qv1ybNwLmzh0gzXHkNLCPdu3jeTCZjRegU91Kj0ev1TubCYKuRSESz2cxqBjgnFBinUikr1AccaEkdCoXU6/XMFwBET58+VTAYVL1et6COugR8CZ91OBw14c1mU6vVSk+ePLHADlkJAeZut7OgKxwO21mECWevYNLQrrOXaMj3++OAztlspmg0qul0akENRb/YiBtkwQJyRmHYuAdJJ6ww60kwhF92/TMsLbaGZAbQdzMg3vXDLg8jPYz0MNLDSA8j3w+M/L7rR79wwYJwqCUZGwALx0O4bUL5mqsrPhyOWmb34MB2oKd2Haf7tgm7BiOIcaBvZ4HYQBaY1Oq3Qck1jGAwaOwHP8fPSjr5bEnGHsFoTadTK2rE0SQSCTOGbreri4sLhcNh9ft9Y318Pp+xCqRFYfBYb9gDF0jQ7CIBIA3NvRUKBTtsSEvQiLsFkDBpGNlwOFQmk7FhkbT33e12Go/HZvg+n88OibuujUbDpr4D6KTAF4uFCoWC/H6/sVXYUzgctgODw8ERplIpY6/cw+LWMSQSCcXjcd3c3Fj7YKQFrEEikbA9px1oOBzW27dvtV6vValUVCgU1O/3TXvMAQsEAup2u7bfqVRKy+XSpBW0CMYGkN6wHwAee5XNZq2ugqAEUODMEbi8fPlSn376qYEkAzUBKZgdSVas3W637T4A8u12a0BCgAcQIKlA/uAGfsih0H7zu8fjsRXOJ5NJk0s8PDyYo0QOROqeICqZTJqzhlXK5/PmvAFNAjoAhsDNldxcXFxoNptZfUOv11OxWNSLFy90eXlp9sgFqGJvnEWCIs47Mqx+v2+a/X6/r8PhYPtPK1pXtw9ryH4B7m4wPBgMzIfiE5HMfFtuw+eylqFQyM4LwTIg6IIMhenu97MOBEz4VEDF9fHe9cMvDyM9jPQw0sNIDyPfD4x01+rb108iKZR04mRJ5eGMMMxA4DhALBw+du4ZDAZWGImEgQdkgfnDIsI8SbLP5oHR27IpvHXicF0WcLlcWuoeFo3UJYPVOJAY+m63UzabNYaQ4mMcO8YGI8hzrVYrO7ykYykU7Xa7BqA4JJehYT1YYxyeJDvsfA1ns9vtzKkcDgfVajVJR4fBQalWq9bJCC37mzdv5PMdC3QHg4ExQZVK5YQ943e/efNGhUJBi8VC7XZbyWRShUJB0WjUZAOwmTxfOHwcIBgIBDQYDHR5ealOp2MBAmtJjQAHkSBBeiwQXS6XyuVyVgAO4Lv63XA4rNFoZIdvs9mYBCSTyZgzoKVwMBhUNpvVbDZTOBzWxx9/bOtKgMNBhxGFnaELD5KMfr9v9sA9w2ziVNgvnhP74D4AcVd6gU1LMmkF600hOXYDqwXwAYTIGABU1h/W1+0UhV3TQQnmcb/fW/vYi4sLGwR6dnZmE+HRoiOXcYvqYeywEewROch0OpUk23fXEcfjcdPww8Jls9mTGUIMdf3mm290dXVlTBVSm91uZ7p6QHAymRgokSHA/qbTqfb7vTnxbrcrn+9Y1E0dSyQSse5x+CVaa/O9MIibzUbVatXOhd9/rEtIp9NKJBLq9/sWZCAxQk6BHAbWEGYPZhWfy+9FykLgia0RIGDjgBD3ji/HJ/1PDJ53/fnLw0gPIz2M9DDSw8ifN0Z+Hyn5k71wcZEuxcli1Lxhut/D2zlsC2n5zWZjm+gyaDgJ3sx5MFKHGCGHolAonLyFkqoMh8OWfnRZLxZwNBrZhsHEMQDSZfYmk4kCgcCJ/pZ0ZSAQOJF1cBjQp8Jk0gq31+spEAgYK4AmlAOM8cGKAozr9ePMFgCbA7/fP9YCkKYej8c6HA7WgQcjdjX2HK5AIGAsKwcRXavLGPJssCqAuvQIcovFcUgkxg4b0O/3Lf0Na8tnAp44T+4NJrPf78vv96tSqWgymdhhRzohHSUQd3d3KpVKpjFmPXGkFKn6/X4bVkmavlwum7Pb7XZ23ziPxWKh0Whk9h2LxdRsNtVsNm0+DjIiAg7a1LoMMBKV+XxuM0Ly+byBlvTI9K3XaxWLRRvqORqNlEwm1e127XMBPQpkmVUCew0zBYtaLBa1Wh1ntTBwE+CByYpEIsYosZaLxcLkP9hDo9HQxx9/bCwTgQDnnHPDvUoy22f96vW6SbAAYQYYss/sMcEhxfTlctnY6dFopOvraxWLRVtr/NJ+v7ehlgyMbDQa1iksn8+bDblyumazaZ8XDAatq1soFLLACF8XCoVMWjOZTOwcce+xWMxkMolEwoLc7fY4WyWXy2kymajf7xsjTt0Ea4AMAzvjjBCcE5Djb92aH2xru92arIuvAVjYIAG6d/24y8NIDyM9jPQw0sPInydGft8V+Oyzz773G/6n6x//8R8/y2QylpqHJcOJ8eaPobGIfC+AgB6VBcPR4+RJOfPGiTG4DBwHDYAKBoOmkSVlyqK6unnkGDgE7m0+n6tarapQKNim7PeP2lM2IRQKneiv+d2hUMjajNKVCGDj2TEcGDLezmG3kEYAqDgz1hUQdtupkvpPJBI6HA7WMYbDyRt5o9FQMpm0LkSwprRF5V4AQZjWYDCoUqlkQB+NRpXL5QywYVhSqdTJTJJ6vW6MHsXBMDiwCxwMvsa8EEnG+NBNCm0wBZoUMFMUPpvN1Ol0LKCJxWI2RBDGCl19MBjUaDSydr8cOFgngBr7RYLB5Hg6c5VKJY1GIwUCAXMEgcCxSJQaB7oK+f1+q22AJYtGj0MWb25ubL4KmnuceDwe12w203Q6VaVSUb/fVy6Xk9/vPxmoCUNTLBbV7XaVyWR0OBw0HA4Vj8f18PCgeDxuAdJmszHAwJZYB2pN1uu1tbJlnzKZjO1NOBxWt9s1BopggICCYuVUKmVBJTZE3YXP99ihiza3fr/fNOK9Xs/0+5w3WDECA+49n88rFotpuVyqXC6fAHAmk1G73dZ//ud/mj0iUYnFYif1GvgjbJcsAv+/2RybAazXa2PRuEdJJ75uvV4b40qns9FopHK5bF2ykKiEw2EbCBsMBu1Mw2wC2Pgmn8+nwWCg8XhsPpcgkgCdoARb3m63J1kOSQacBJoAyX9JYxqfffbZ//tRwPEeXR5GehjpYaSHkR5Gvh8YOZlM9Pd///f/97uw4CehK2E+ccZoq2E5cOAwCrzR8ne//3EaOmlnpAK8eTKfw73Q0gYCAUvtk+pNJpPmGNbrtXj+xbMAACAASURBVPr9vmmuccSkVgEf6XE2BUzDYnGcsfHw8GAsHm1l2XhJlt6czWbW8QmZAIeJTYJVQ3NfLpeNLUGfS1EgTJ6kk6JTQCkejxvj12q1JMk64wAOsAKZTMbui841dAUCkPP5vNUc4EgwSgwccIJBkmTgk0qltFgsjO2BuQCkASsOZjQatbaj9XrdZlkEg8fiTxwqQzpxljgg2FYOII6Q2R3McRkOh1oul2YDANdgMFClUrEWxNPpVMPhULPZzOaLJJNJSTKmZDgcml0TsMBcxuNxXVxc6ObmRtvt1rpZ8b273XFqO8/I1wksgsGgMUEEIsgy0um0yXmw9fl8rg8//NDOEXaCPAJ2tFwu63A4WIrf5/MZ63U4HDQej1Uul624d71eG/vY6/UM6HD+1B3M53Pl83kDR+QeBDOcDexOkp3lcDhs5wF72G63qlarkqR///d/V61WM5YzGj3O9snn8+Zoe72e9vu9crmc2cNgMDBgdWUyAAUBz2azUa1WU7vdNu21K0lxAZYzDKPd6/WUzWa1WBxnw1APUSwW7dzD7lLwDKtNMBmJHAeiutKuXC5nUgt8TDAYtEwE/oZuXDQGkB47gtG5DYAnCOd8utIkGF3YePw5Ugn2RzoFRO96t8vDSA8jPYz0MNLDyJ8/Rn7f9ZPM4XJT6dIjM8LbLXpxWC3e2mHmUqnUSeFcInGckA5bJMk+jxQfg/ZIndIhBkYBx4uUAXkBCyPJZpK49xWNRu2Nm9/Fxvv9x3kak8lEs9nMWuKSvq9Wq/Z70bPifChk5SDTAYf7mU6nGo1G5uAAUnSp/H5ACBkFn8cBicWO09TPzs6USqVUKpVUrVaVy+UUiRznt8A20A2IVOt4PFav1zPgGAwGxizwPMgU+v2+FVtziF12kWJISScHCpYJ/Tup/2QyaYcYQIHtASww7vF4bMyb6+zcYmkYZHTdyFM46ADkeDw2/S37zfyL/X5vXydQ2u/3toawaTA2rL1rX/P53PTxsEB0wcJ2CYDo/IU0p9VqGbBxj7BgzWbTHA3MEmzWarWyblLz+Vw3NzcGKG5A5Z6pw+FwUjy+Xq9Nh57JZOxnYHJLpZKy2ayKxaJCoZAajYY59fV6rUajYZIImPFKpaKLiwvbEwq/eaZUKmWB2mAwUKFQUKlUsu5ShULBirUnk4nJNMgYwNLW63VjY5HipNNpq89AhoJfuLy8NHkGzBvsJC8ZAOhqtTJQwB7q9boFdDwbARId27gPSeaf6BYFy8vnwrwVi0ULoJAAsWcUlBN0TCYTkxu5kin0/9KjrISuW9w/QOPKxjgrh8PBfhd24l3vdnkY6WGkh5EeRnoY+X5g5PddP1pS+Lvf/c7kEjw4Tps3XjbFTcv5fMcuPLw18pa9XC5NU45T9vl89ibNwUJbC2uDkfOzwWDQev0HAqdT7SmIJCXPv8diMQMdn89nTpvvAdhcrSZvxiw6hwSNKqlc2niig8bxoiNfr9dKJpPm9NA9Azowdjha7h2nMZlMlMlkFIlEbMgfTpM06WQysQnrrjOGFUBLv1gsbB9jsZgNH4RNdB0qbJ5b1ApzAMOQTqfV7/dVKBQsJU1wwfqgjeVQwLDBEgFGbqoZcB4MBvL5fMrn8ydMmmv8OFr2FVaPguFcLmdtYl+/fm3rRPEwAUsoFLK9Z8Ah6zQajdRsNhUKhfTq1St9/PHHVljMfsPWSjJnA9NGEIaTaDabxkJGo8d2z3RyQmKRTCbNNpDurFYr6+IDc/bmzRtjZkmhu8xlqVQ6GR7oBkUw65zXSCSiVCqlwWBgAQDMDoHiYrFQvV7Xcrk0UGO/2Nv1em11GaxJLpfTH/7wB5XLZVUqFb1588ZYNr/ff1LH0Wg0Ts4NGQFJBjbUbdBi94svvlC9XjdHTcczghAkW3SvglkF9JD8jMdjxeNx6zDV6/UUjUZVLBY1HA5PuiMh5RiNRhacAt6wpwTdi8VCs9lMpVJJsVhM/X5f+/3ewAawgOHjrLjrCsu8Xq/NnwKU0mOwjx+JxWL2d4IL7nG3250UFXuSwne/PIz0MNLDSA8jPYx8PzByNpv9WUnhT/LClc/n7RDAfgEu3Bh6TtLMOGhXawyTxeLAIuH4ACmcAJIGNpWUNFpOOt9IMiZnsViYU2SRYFVczelmszFdOhvnDnpjIjxvwRgFn03HJobtcZBwwDBx/C4OwX5/7PBye3urVCqlcrlsG1sqlaytKg7M5/OpUCjYYUfzulqt7A/SiW63a44EPTSsItrf1WqlTqdjaV0OJqCO8dIpikJFijCRgLBPrA/24fP5TA6DHIC9Yr9gQ9HcM5F+PB5bmrvb7ZpcJpFImN45nU7bWuM4qU84HA7W8haHwYGMx+PmYAD4aPQ4t4Hvw44BApx8oVAwaUK1WlW73dZmsznpRFWr1RSNRjUej60T0X6/t/WrVCqm+Q8GgyqXy1oul1Z8SoEqDC1McbVaNXmSe77Yq3Q6rVwupxcvXugPf/iDOSm0/LC0yJw4w1xondkbJA90nbq/v7fgiz2inoB/C4fDajabur29tc8KBoMn9SwAPLUCMKKpVMq02NhWNBpVrVZTNpu1s7RYLE5Y58lkolgspkwmozdv3qjX6+np06eKRqNWJDyfz0260e12TV8uHSVHzIKBmd5sNqbN9/l8enh4sCGvyElgEQF8/IKr+8dvIItCe//27Vs7W6FQyAZHTqdT9Xo9qyEheHCf2917/CfBGAAvydheAmOCNQJx7h9QJSMDUIVCIfX7fe+F6x0uDyM9jPQw0sNIDyPfD4yczWb67W9/+5d54fqHf/iHz2irib4zGo1aqjUcDls3FB4QYwJUWq2WstmsstmsGR9Oxe0YAjvkSjD4OynjQCBg3Z/QBdO5xa1BcGUVbHY6nf5vBZvodBlYSNcYmEO00hwKSaYxPRwOJ4xJOHxs9xqNRk90oYFAwNLykiwNXy6XrRVrPp+3AkuAFXZvv9/bYEHWHX03xoTTZHjjer02/ar0qK9HwxoOhy3dXyqVjI3DGe73e/V6Peu8tFgsLC3LZ2LAHDb0761WS71eT7/85S/tcBAw4BTdYIKDidOXjuzW+fm5Pv/8c83nc33wwQcnaW1YEA47em4Oi8/ns6/v93u1Wi1z0JFIRN1uV5JULBYtxUwqH2aZlDlBRyAQMB1xvV7Xf/zHf1jBqM/nU7lctj13Z9/EYjGTZlDgyqDCQqFgrVthYfr9vvL5vLHYpMVJybvdpUKhY2vfUCikt2/fGtu0Wq2MSeUMoeNnL5H1UOCNc4FppWMTbY9LpZIxXkiB0KAD7ofDQQ8PD5bSn8/nVnQuSV9++aWKxaI+//xz0/szNLHb7do6wzA/PDwYeAJeblvoeDyuL7/8UtfX1woGg0qn0zo7O7PPogHBkydPdH9/r4eHB5VKJWUyGQuC1uu1Xr58aQHybrczCdF6vVYul1Or1bIaDzpo4eQBYlhApBiAcL/fN9lEIBAwWUO327XZONgJgSTgxnkLBAJWAwDQEGSFQiELwmBn2RNXmw5Th33ze/gd+NXBYOC9cL3D5WGkh5EeRnoY6WHk+4GRo9HoL5fhogMTkgYeEsPBgVO45nbdcVN9GAtv50gwWGTYqlwuZ738t9utTdeGLWJxwuFju0YOM06GhWPIG2wab+gwb65ml03HAWNkOC8KV/l8nDUbiBFFIhE9efJE+/3eJrJT3NjpdLRcLg0A3QGYgC8OBTBD2sEwOdYcZwETRCrZbYO72+0sFQ+TA3OZz+etw896fWyx63ZhwlGEQiFzsIAVsgIKmpn1wFT6WOzY0jSXy1lhMEPnXKaUe3YlE/v9Xslk0pwbbBFzKyQZgKJjJiCQZN8HoPZ6PW02x9ayo9FIb9++Va1WM8aOegWfz3cSvMDK7nY7PTw8mDOFsfv666+N3Ws2myZloKDXnYvi6o5hVAks+v2+drudDdFstVrabDZWbIs0BufD3A303KFQ6GSIJRpvSVZrsFqtlEqlVK/X9fLlS0kyhtcNDmFvYBJns5nu7+9NHpROp5XJZNRsNg0YACAKfWHLYA2z2ayBJ2xuNBrV69evVSwW9fLlSxWLRZMowHQj61kul7q7u1MmkzG5y3a7tcJ6Oj0Fg0GdnZ1pOBzqxYsXOj8/N1kOz4+zh211g9DZbKblcqlsNmsSB0B1MBjY+eEPzjgQCFjgC5MOK83zwgb3ej1jwe/u7tTv9/Xhhx/q888/V6vVskCE80BTA5w9gQa+NBAI2JmDwYc5hLkDgF2J2Lf/jk8j0AsGg+r1et4L1ztcHkZ6GCl5GOlhpIeR7wNGTiaTv1yG63e/+91nzNbg4NFtR5K9IbspPEmmW0WvyYMEg0ErWqR4D6YMQPD5fBqPxxqNRvY7WCicP6l6jESSgZfbtYRFprMNxsLXfD6fFdC5AyBJ81PEy7/DXHKfk8lEy+VSlUrFGDy//1gQO51OVSqVFIlE9PDwYIYGSxAOh61IlEMCAFIgSxvS5fLY0pPPxyGSDoZB5TCGQiHlcjlVKhWtVitLXaP3HY1GarfbxqQiieHtfjqdqlgsmqaW9eWe6MQkSaVSSZ1Ox/TWFxcXqtfrurm5MZYDJgzjJdhYLBambXZlKICNz+fTaDSyVDc2grOYTCa2l5FIxA48axaJRFQoFNTv9y0gAPw5eKT/W62WcrmcRqORDdwDQJn7Qi0AUpDlcml7TLtSvhfwgPXkGVOplM7OzhQIBHRzc6Ovv/5aV1dXJr358ssvlUgkVCgU9Ic//EFPnz41lok2yRRhz2YzDQYD+2ykO9Pp1Fgntzh2s9kY8FATAtsZCBxbyhJIUHuAjXQ6HQt8ttutBZKcDwaYck7C4ePgxkgkYgwdNSqr1UpnZ2em7ae+A5CHFYOJ6vf7xpLiL9Dmw1D2ej2TJ1xcXNizTSYTdTodCygBCpeJLJfLqlarJ8FeKpUyNhlwj0QiVkTf7/fV6/UMSNiLVqtlv4OaDVh2AiokP69fv1Y+n7cuV7CqBMWsMcEqjh9/RDZkuVyesHK0+PX5fP+NuePzCRDxg3zdy3C92+VhpIeRHkZ6GOlh5PuBkX9RSSEFwbxZoivlDRA2D3YNwGABWBS/329v0xTQdTodY4pgeEhVYoTozFlYJpAjRyAdz2JLsunhqVTK3vK5V4yDA0WHJpy3e+hJb8MWhEIh+/9AIGAs2cXFhXK5nG1oJBIxoEwkEtZxBUfHJruFthzO1WplrXUZcIcGHJ0+MgrYUNafN3nWAWBCksLvhU0CiBl4Rwce9oeuTuwlHYMAREmqVqtmkN1uV9PpVM+ePZPP59OLFy/UbDaNsYIBdhm37XarwWCgXq+n0WikbDarRCKh0WhkBbZ0F6pWq1osFqZnRnYQDodtoCRFuJLUaDQkSefn55JkuvmzszP1ej0dDgedn5/bzJB+v69sNmugiiOVZG1x3YLadrttRcPn5+d2rzCOfr/fugjhjKLRqNLptG5vb61+4nA4tqRdLBY6Pz9XrVaz9clmsxbUDIdD6zo0m8306tUr0+sjU6CYF3kGUpder6dUKmW2FY1GjSmFHcdGYZ9wfsykoT30ZDI5kUxJRwkRjPxyuTSGqVAomGSKYCeTySibzUqSzeBwnR6Odr1em6yHFsPVatWY8/+Sv0mSZQey2awqlYqKxaIx5ovFwiQiw+HQdODcF76Gc4LMQDqVBlG0i69CzsL3EvDsdjsb8Ih/JNPR7XaVSCSsk9V+v7eOUjh1np36EgJpnpkzBCC4ungkUuyd9DjkknPv3rvL1LOX3gvXu10eRnoY6WGkh5EeRr4fGDkej/9yc7hYBN4Q3ZQboMG/SY9MGywRbAfOjnS33+9XOp02Haz0WEy62WwMsNhYd4MpeuOQ4jTpXsTvgwVAMkERIW/nq9VK4/HYhrOVy2U73LPZTLVaTb1ez/TQrgHsdscWsZPJRJVKRalU6gQckH58W69cq9V0f39vwwqlRz00mnfSmJPJxCQRw+FQ/X7fhlgOh0Mz9NFoZIWWOGD2AHYFNmK9XptjSCaTymazVuRZKpVOmL7FYmGzW0ajkRXODodD+12NRsOYUaQm/X5f0WjUnARyGwqa5/O5Xrx4oUAgoGw2a7IOtNir1UrZbNbSxu76sJ7ZbFb7/V7lclmbzbGNK+nuw+GgbDarjz76SM1mU69fv7ZJ6KTXU6mUscOr1UqxWMwKeWGp6T6EXAD7415x6jDKOOm3b99aZy+kRQQhAMNutzN5Bd2IYH+wM3Td33zzjbXRpRsRuvxOp6N8Pm8FrrCGu91OzWZT2WxWgUDA6hxgeQlWKCiWHougGcCKDAI5SSaT0WQysZbVBF+DwcDYK5/vWMBODYTf71ez2dTh8Nj9B5Z9Op2ajGM8Hutv/uZvrNV1Npu1ItlYLKanT59KOspA0IXjJ/AH0hFUaM8Lw0mwNZ1OT4pqcabz+dxAKRqNGvjudjvT/nOO8HO0CeZ53Ja++DCYd5gzPn+73ardbhugwyjThQw5C0Gmu24EisiJCHD5DLIoBPCSrG2zKwEDUAjUkePw+d71wy8PIz2M9DDSw0gPI98PjMSnfNf1o1+4WDzAAwPB4fHWiPY7GAyeTBanEBTmjFQ2hxejkGSMRSgUsm4nOFe3uBanO5/PzYHAYh0OB5MQcJA51DjRXC5nb/6ka3k+mKVQKGSOjNQyLWV5W57NZiesByyHJGMGs9msFZ/W63VLUZNW5llh2Ugrw6LwrOVyWavVSr1eT9vt1liOXq+n8XhshcnRaNQAwu/3mwNMpVIGojgNnPhqdZx/USwWtdlsTF8+Go00Ho9VrVZNb81cEZ/PZ/MxAA708be3t7q8vFS1WlUqldLt7a3NgKAAG1uKx4/T3svlst0j68D+0OqU/eQwxeNxDYdDDQYDpdNp05DXajW1Wi3bz7u7O7Oth4cHXV9fW+DBnsJ2TCYTY/AIiObzuYbDoTFp3W5Xq9VK1WpVh8NjVy5kOX6/X5VKxQ4tz9NoNMwGALVgMKibmxsVi0WdnZ3p5ubG2CvWloGPBCQ4zlQqZewg5wEJAA6UgAzwhf2eTCZmuwQMkmygJ06K8wmoYNv39/fKZDJmo67+HDac9UbWBHvHui4Wx1kqtVrNpB20R45Go8rlciaXwaaxP5fRpwgXQIUNzefzVscBiw8I0iIZW+QeX716ZYXFfv+xeP7+/t46SEkyWQy+yGX6XH+Cb4zFYhoOh5KO0iKY93a7bUw0jCcAiOzMlai5gbff/zgzxJVA4G/5msvaucE560dAQCDH373rh18eRnoY6WGkh5EeRr4fGMm+fdf1k2S4uHEeFu2qJHsIWDIMGPkCqViMkf/n7Vl6nAqPTpyHZzEp/OTzOVQUVsIqzGYzm5dAKpffxRsxz0MaFcOAaeQtezqdWjFkqVRSMplUs9m0rkQwfRjB4XCwAYpIJrbbrUk/AEd0smhqeeuGKVmvj/NN+H0cXjSnACfpaAp63cPu9/utgw8yFveNPRQKqVKp2Bs77A1MJWsXiUR0eXlp4IfGeb/fq9PpGANL0evz58+VTqfV7XbtIMP4NRoNdTodzedzFQoFA3Qc/suXL3V5eWlgHYlENBgMrBibgkraoEYiETUaDU2nU2OKYCNYy0QioSdPniifz6vdbuvZs2dqt9uWskZn/e26BmQ/sL8wScgfYEUpIMfeYWbcWgA6PmHfsG2Hw0GNRsOc7zfffKPLy0uTruCEr6+vVS6XDUQ4H/w/mm7qCG5ubvSLX/zCag7c9P96fRyWisPmvMXjcbMxHBHzLmDPXJ0+afxisSifz6darXbipJAmwDBx1tmf5fI4H6NSqeiLL75QuVxWvV5Xp9MxnTrr7kqn6GzEJHsAnCGVtLAlyEQy4TK6i8XCdPR+v1/ZbFapVErL5dJYZ/xHvV7XbDazM3Bzc6NarWb+ARABqKbTqQEIATPntVwum3wsGo1aC2j8HAA8n89PwAlbxP8hvYKJJfhysxYE1pIsC3I4HOwPzCCfD8MIi+dd73Z5GOlhpIeRHkZ6GPl+YCRS4e+6/D8WTHBQOEDeIiXZwaXb0XQ6tYJOFgAWje4kgA9s32azMeeAVIEHZVHcNB9FkDBY/A6+nwUkvQ1DwlstrJvbIpXiYxiodrt9Ag6wN7wJ+/3HdpP5fN403DAOhULBDIpDj3OXdNJKczKZ2BR2ihnRhZPedtnT0WikVqtla49mvl6vm+OjGBWDc50rBx4HzcEGZEhh03Upk8lYS83pdGpD/qgNoIsUjvTq6spmO8AEwGK22+0Th4vjGA6HmkwmqtVqms/n+vrrr3V7e6tAIKDz83NFIhHTFTOLASC9vb1VMplUvV6XdGRAYG9g8pDrwFRmMhljBUmVD4dDrVYra+/77WdjWCdF2QDwaDTS+fm5MpmMdSPiMMPiwTqHw2EbXHhzc2Np8t1udzIbg+BmOp2a5p39wY7pzgUDutlsNBwOdX5+Lor3CfAIjpArULMBexUKhUyLjg0RcKB9B/BZ016vp6urK2PWGBbZ6/WsqB5bonDWZRNZYxjRTqejbrdrRazJZNICK+oVKM6dTCamA5/P59bFCgkWa//VV19pOBxaUINenGAN1oogZD6fazKZWKADSCHNSSQSBmIAMI6ZQBrgm8/n6nQ66nQ6VrAOu0lxM+fO9U3IiSgCJhjkD5/j8z22KuYPHdVYY+wQ+wFQJP23r7msIxIu7/rhl4eRHkZ6GOlhpIeR7wdG/kUlhYHA43R5nPy3b0ySpTE5PK7Gl8XnzZAbxuhg2TgEvOnztgsIsNB8nYVArgBjttlsTqQZq9XK2k/CDG63W9NGk/pfLBYqFAp6eHgwJoCiSdgzDpyrJeWAk3KHqeFCq9xsNi0tzRrx9syGw6DALPAWztdoU0u6HZAulUrWcYg3epwCoAqgUFSLfAI2BXCXHp2P9Ng6EyDn/nkuHEAoFFK/37c9yGQyur6+1osXL6zzDh1vCA7evHmj7XarTz75RP1+X51OR1dXV2o0GuYYer2ems2mPvzwQ9OBMzUd5gH2AyYFeQnabLoA0YIYhkZ6TD27xZ3fZqeRUpTLZQMXn89nbVjRM8Pe4Sxms5mtLfcmSS9fvtTFxYW63a6CwaDy+bzu7+9Vr9e12+10fX1tQEo7Yp6VegcKc9HeZzIZFQoF228cHufGDa6wBRh0V6tN+h3ngh11Oh0bVBiLxUyWQdH6aDQ6kS7hD7h3LkDJrSHpdDoKBI4FxMhyYOoYdgkDR6taV150OBzbDz958sSCwLu7OxUKBR0OBwsGAFAkFfgkpB4UNRMgYss+37E98GQysQ5y7DFsOhIw7IWfG4/H5kc5f/gNF/ABbRh3GFDAzw20XcDgfgEdAI7Al4sgH/t2bYM9RsblXT/88jDSw0gPIz2M9DDy/cBIvv5d108iKWTB3JvnzZOb3G63xgLgYLh5ug7BJEmyVO5ut7OiPRyMqxVn0WANASbecH0+3wlIuRuBQePYADo2KZFIWLobZkM6OhdmO9AuFaAC+CjAhUmIx+O6vr62YXKkiHFarpMG/AhueBa6+fC8pERhOHFedAyClbq/v1etVrPvlWSOlD+hUOhktgrspOtsw+GwMW8ArKSTYXakzLfbY0cndLwwEzhx2EValdbrde33e93f39tes78ffPCB6appmdtoNNRoNOygVCoV1Wo1rVYrY4XOz881m83U7XZVLpfNkR0OB5NdwKphp6PRyBw+TDN6YuyWQmECF5zCcnmcel8qlbTZbOy/HEAKt5FH+P1+s3k+OxQK6dmzZ3p4eLA2vMzXePXqlYrFopLJpMrlsn0WLC6dkXDEBDvBYFCNRsP2DsANh8O6vLw0xnU+n5v9up13ABg3gHBlT8gmMpmMnT++B3a33++b1hspEEES5w7pDCwybBP7hZTDDQ5gfOkutlgslM1mbU8pmn54eLDWv+fn58rn8xqPx4pEIgYotLN1pVdci8XCno92xuzpeDy29dlut7q7u1OpVDrR20vHNtkEoXw2YNDr9ez8EIi7TCnrwl678ofdbmd1L9QLYE+cUxhRlzlm//CPfKabkQHIpMfOc971bpeHkR5GehjpYaSHke8HRv5FX7gwuPV6bSk2HpgHBTBYFDdFhyMDgEgJxuNxax1JURw/z0a4hu0WXR4Ox+JQCn/d3+dKDNB/7nbHTiocbj4fjSmsIUxWqVSy1DdvygCDJLuHyWSiXq+nUOg4/BBw4fcDlv1+X4VCQel02hiOSCRielZS5/wuDhCFyBQ74kQikYgViOLwBoOB6vW6GS+F2y57Nx6PrdUr97rZbKxFKkWyHBIA1WUOCoWCIpGI7u/vdTgc9MEHHygaPQ6/4+0fyUY0GlW329X19bUGg4H6/b4+/fRT3dzc2JDAXC6nq6srjUYj07tPJhN99NFHdvjPz88tVQ2IlctlY15Z52QyeZJaxmGMx+MT9hbmFYaYWSGz2cwCTr7utl5mDQmElsuler2eyQoYFshBRuMO8+sWXdbrdd3e3kqSfS2TyZjuGwZ6v9+r2+2azGE8HluraNL4MMU4fuoIKCjGwbF23Nu3zwyBGJ+BVpv1yOVypoXH+VAjst0e60Skx6JiGCqXLZQeZwFJMkeHs8Zm0cDjGJfLpTlNv99vXYVWq5UGg4F8Pp/q9brdMwCJPAK5CSw7No0zpbUuMhpJ1oEL34XPQZ4SCoVOWstS50FdAr4Pto4Algv/yDNSOOxKXPB/+EU32Oasur4appjvcf0Y98bnuoCFjbvg4l0/7PIw0sNIDyM9jPQw8v3ASL72XdePfuHioWHupMcWrTwcN8T38He02XQUIfUM20faFCMjDT2fz+3zDoeD9eSn3SULSmtOCl5h/GazmdLptAGW9GispMrd3++mlg+Hg2lyWWy+Dovhyge+fSDpysRbOcbJZnE/3DtO2nVGGBMOZLc7FmNT+MphgSV5o2NRkwAAIABJREFU8uSJer2eteRcLBb2cxjScrk08AA80RzDbLKvGC7TxSlsdBlY5B6kmJfLpfb7Y0Hj5eWl3QssCKBWKpXMwcK8AIB0NGJQ5vX1tTqdjvb7vQaDge7u7owhvry8tPs8HA7WCcfVWA+HQ0uDE9Dw3/V6rUwmo263a3vI53DwDoeDyU4AfXTI2WxW/X7f7I0gizayMCs4bp/PZ3NnRqOROp2OMV+z2cy6LDUaDeXzeQukaAGLPAVpDqwprVYzmYyxktjeeDxWu90+SaEnEglj8Xhu6kewe86Ly4xzrjm/q9VK+Xxes9nMWuvyGfw+tz5ltzvqqJE+hELHQmmKvynKxTlvNhsLBmGggsGgEomE1XZQg0L7Zkk2WR5Qor6EmTWsD8EbAOz3+63OBvBBguIGp4fDQaVSyYIp5B3ou9Go53I5zWYz9ft9AxdAHzkJNrjbPWr26Y4GsPL9wWDwpOUujD5r48pc+HzACr/s1gS5wQ7BpQvy3vVul4eRHkZ6GOlhpIeRP3+M/L4Xrp+kaQbpc36pCyau7pV/w0hxIuiwXQNbrVZWHMtDUYQZiUROBs4FAscBi0gTeGjSlBwwukPxNVeDzffhqCkyxMgXi4UVvMJY0AmGDi4YDAdGkkk3MN58Pm9v0RgR8zIwVheAWT+Mh/UF1NLptKW2/X6/sTHSo6ZYkrEAGBegg9TBZUFYP2ZVkHqnuBJnz88BjqxltVrVJ598Yp13AFsONxrsxWKhfD5vDpbBi2dnZ0qn08aIUTQJ45HJZNRsNs3pz+dzYx1hn9wCSFg6nCLdtfgvXWsIBEiLu8ELgcN2u9VkMjGgdlPQh8OxoHg4HBqz6fP5NJ/PTZLjpqM5P36/3+of2Nv5fG6tc7Eh7qvT6ZywXtFo1IqWfb7jDA9kFhRbj0YjC94IjLC/4XBoBdXdbtdmgYTDYTs3PMNms7GAaLM5dhNbr9caj8f2ecFgUP1+X4fDQfl83p4RLT7PA0iwhtg6Dg4ZB22bYbgBUIIXHF46nTaGC8Y/n8+bXWHj2BIBFMFJv983uQxnJRgMWrE7basBUQICzqokkxDhA1xWkjOcz+dtzabTqcmKaETA7+RzCShd58/vxbaRIyF/4txJj3U++A8CcUDTDaDwIXTdIhNBcIzv864ffnkY6WGkh5EeRnoY+X5gpJsx+/b1o1+4cFSk6qXH9owsursgHFj0k0gDeCPFWboaTkm2caSvSSHDqPD70WaS+uYeSJmzMPwONgFHDsiRauSNdzqdWktWuqhIj+lFDurhcLCOMwDRbDbTmzdvNBwOlclkbNL5ZDKxjdztdrYWbDjPwO/huTabjbXwxQmh/XXTocglkALwu7jPeDxuRbOk0GEt0frC9sGI8JzSkT2kGxTrBGNHETAMzHq9NueNTpxhfofDwdLq2AVsKLIAQBpnCVNVKpXsPhi+ibwBuQl2Apiw14FAQMlkUrFYzBzAdntsQzwajWwtCWrcQMINVlhXnO1kMlGz2VQkErECaAqzGfRIe2QONCz1fD7XcrnUxcWFza/Y74+af7/fr2q1qs1mo06nYxKKZDJpw1A3m8f2w9j+V199ZUHWfn8sssX5LxYLtVotm8XCM/R6PXteHCuShMFgoMlkom63a1KE9fo40f7u7k7hcFjD4VCvXr3S4XAs0nVb8JZKJaXTae33e9Ns84yLxUKTyUSDwcDWAseLbTSbTd3c3OjFixe2fjhlGPDtdmsypGq1av+OPfN3BnniT4LBY9E3MgtJxnbSNjibzSqfz6tYLJ5IEPi9DPLEfrE5VybSarXMJnK5nHa7nXVZw4fhVwnYCRwY/ol/AyQp3gVIONOxWMxAG38Mg8q9Exzy71wE1zDU7te864dfHkZ6GOlhpIeRHkb+/DHy+66ftGmGJEu1cfFgbCJ/B2B4CN7e0b26b72ACs6fwXNoo2EvYHtI7QUCj0WGvJnSdhdGzE3vspD8G+yguzF0MUK7itbblTZst9sTxjAYPA76Y/4IzoWDzTpReAvrBCPEs+CwATDu1y1qvb+/N2032mkYIP7ARjCPA2YSmcV4PDaWBNYUAIbBIl0NCwIj4xY4h8Nh9Xo9xWIxAzba1br6YQYjTqdTAzqAnEMsye6HQk7sDI2tJCtqLpfL9jthLVz2A7sixQwbw765+mgOEoEOrJxr43RPglUdj8d6eHhQPB5Xq9VSLpc7Odw4bfTmpLulx7oLHE2n09F2u1W9XjfbiUQimkwmBqqu4zkcjgNJS6WSdXByuy3lcjkbIImMiD+JREL9fl+z2cykKzz/fD63YAdpEE4Odpbi61gspru7OzWbTT19+tQkCdhNNps1x8fP45ilR/YLlhmHeHd3p263ax2TaFVL8Ob3H7sdwYLhA2AcOaPMFpFks0Ww78PhOGhxv99bK1pJ9juy2az5OreOAbkJLZNTqZT9DjejwUwhv99vbZSxA+yJgnS35sS1YVhOAI/7hW0jWEJvD0Bi12jNARP2AjkQ5x121v1/73q3y8NIDyM9jPQw0sPI9wMjv09S+L0vXD6f7/9I+tPhcBj8ue/hQPKAaCxd3aObIkbvyaGBMeNGObCk8Uml8kaMk4eJwxlyoDFIipR9Pp8V8sFukWaG0eD72DgKF11nn0qlTjrycMDYPADH1a/T8Yj7brVaur6+1m6302AwMCDudrs2IG46nSqbzSoSiRjbBSCRiqa1Js5Ekrrdrtbrteld0TqTTuWzLy8v7S0dmcV8Pj9JoTO1nM/CsAqFgtrttjlBNOlIjWD+uJii3u12td0eOy5VKhVjwPL5vB4eHnR1dWUHodlsqlAomIPHAXK4XFYK6QX/xowJF4Bg/8bjsRaLhT0j7ODhcLCZMQQ2/EGCwMGjZSxgA2u73+9tH/b7vd1/r9fT06dP1Wg0jLGOxWLWPQn2hI4+2WzWApvNZqPLy0tJMnDn+6fTqWq12klnnUAgYMzPdrtVoVAw0Mpms9ZJjO5IboelZDJpem709q6ECQeCY6GTE46Vc75er3VxcaHlcmnP8vnnn6tarSqfz6vb7do8Eeoa4vG42TcF+DCVnU7H6ij8/uMg1NevX9s6EgTB7I/HY6vZIFMAg9tut1WpVAx0XTYcFg92cLPZmG0R/G63j/N2YL86nY52u51pzWOxmM7Pz9Xv9zWdTrVYLFSpVE5qb77lX5XL5ewcA9w+n8+kSYCVK/lhf8ggcHbpBEXjAtd/AaQwnIAKQQL3yNrANLu+F5DyrsfLw0gPIz2M9DDSw0gPI8FIl2j49vW9L1yHw+H/+76vY2BozF39J//O31lMV47gtmmE+QCcYFz4GVJ9pG75PDdVu9s9do7x+48zONAWIxdA8w0ThN42HA4bU8Wi4+A4LIFAwLoH9Xq9E30oQwElmdOTZAxDoVDQmzdvlMvl7I37/Pz8xHkAFBTh0kUJKQUsF0wRDB+Ajc4b44Kp22yOcx5gyrLZrILB4xBTHCNOge9B8oJ2XpKlfqVjsAC7gBwBwwb4cPocbg4TIM7cjHa7rUwmY99DChwGAvYkmUxqMBhou93q1atXSiQSur6+NvYFY08mkxY0SDLHt9vtNJlMLJBw95SUPfIObAB2lmdBpy3J2Fy+B0eJFIaf++CDDyzgYC7Mzc2NOY77+3vrNoX8gbUNhUI2f2e7Pc69wZl0u137/dgKzoZhmbBZ3B8p/el0anr3ZDJptRs+31ETPR6PrdUu6xEIPBZ5Uz/CYMRisWiDIzl/wWBQb968UaPR0OXlpclnCJT++Mc/qlQq6YMPPlAqlTKJBP4gnU6bXeVyOQNVniEWi5lTpWaCwI1nns/nuri40G9+8xvd3t7q/v7edNfSkRGG3ez3+yZHAIQ4jzCXkUhE2WxWw+FQm83G2jBLx5a2iUTCzgOgQ3ALa0wROrIHfCBF625g4zYY4HziCwEBwBkgpsYkEDjWNCDX2Gw2J13I8JucScAUqRuzYmD/OPfe9Xh5GOlhpIeRHkZ6GOlhJBiJL/iu6yfpUsgBJEXupt8ADw6wJEufw7jh3ElPFgoF7ffHrjowDHQkYiGRXPBmi3FwQGBvVquVFZ3iAGCEXGeHM2aBMUS/32/3S2tQChkXi4UVzqEX5+DTKSWbzdrBPDs7k8/nU6VSMQDbbDZ68uSJrQUD8Pb7vX7xi1+YJMJts4lR4tQ48IVCwcBpuz0OEsSQ+QwcJFpWvo5uulwuWwo7n8/bfAbmNjCPAfaR1qocLN7ukYiMRiNlMhlL/TPkEDbk/PxcnU7HGCbkDTBjOGoO1np97Mjz8ccfm03l83ml02lLZ9PNirTzaDSyQwdbzKEklY6T6Pf7KhaLFtCgSca5LxYL69hEMTesWigUsqJngprVaqXxeKyPPvpINzc3KpVKxrIEg0Fj35AINZtNZTIZZbNZDQYDpdNpmzUCq3t+fi6/32/DGmESAdDFYqE3b94Y+4fzJzDJZDI29HO9XtsZ5PzSSQpQx9EATgQQ19fXNsgSpi8Wi6nZbKpSqcjn8+nXv/61ut2usaiSLDgolUpWXwETl0gkrH6BtWc/AfvJZGLgHI/HbS8JQHw+38l8nFevXqlWq5l0BNYcBpOfoYX0ZDIxFn86nZ7oypfLpbrdrhaLhWq1mvz+YzF3KpU6ke1st1tjDwlM8As46+VyqXa7bfUI0nFeD8wdrCAyL84V7B7Mr3u28QlkEFztOkGidAQvshHSo3yCP9gvZ4jP5fm8690uDyM9jPQw0sNIDyN//hj5fdePfuHy+R7nJPCg3CCsFxtGyhbjR4PK4uIsSIXudjtbADYch4Ox86aK1tNNraLLRjOPg4Kl4/DBKsEEwoZgeBzCeDxuaW6fz6d8Pq9kMmndU3D8sDTb7fakyBFjq1ar+uqrr6zoFiar2WzavAhJVqjI4RiPx/Zs6/Wxk1IikTC9PU4ymUxqPB6r2+2qVqvpcDiYjGE6ndrndrtdjcdja0cajUZtICT6+UKhYKntdrutYDBozmw8HpsufbFYaLvdmq59vV7bYeXzf//73+ujjz4yBhGGgo5K19fXBugYNcwtOmq3hSjpZQpbcYIUKM/nc1t7nBE1DOv1WsPhUO12W/f39/rbv/1ba6VM0LJcHudYuAwuTh92ms9D3xyNRq1FLXaJw8xms8aC5XI5A02KUbPZrKbTqb7++mtJ0l/91V9ZATlsynw+PwGReDxuzCNOHgYLVomfzWazKpVK6vV66vV6qtVqZhPpdNqYcTrwcB6oA+FsxONxJZNJFYtFBQIBXVxcqNVq2YT4xWKhRqOhfr9vrGSr1bKADE15KpUyp/7ixQv1ej07NzB9sM+5XM58DQEfzDhBD3M9YCKpq3j79q1ms5kKhYKeP3+u9XqtYrGobrd70iK3Uqmc+AQccaFQ0Ha7tc5UzWZT+/1eT58+NSkSjOFms9HV1ZV6vZ6x8vzhHOKkg8HHmUN+v9/YaaQbBOjYFbY3nU5NMoI/mM/nFswmEglNJhOTvDCElDVBngJo4x/dGgdJdr5gvmHwvOvdLg8jPYz0MNLDSA8jPYz8STJcMFpsAOwdbB0HjwXlTRunjCZdkqX2+V4K+3K5nB0WDFaSAQRv+qFQyN6kSd3DrCUSCUs5InWAHSKFGgwGrVVotVrVq1evDKjYWFiO+fxxxsBkMjHdNwwULBUGtFqtdHNzo3w+b1pXWJVQKKS7uzuVy2VtNhsr2EXyIckKByXZoZvNZuaIhsOhtQVOJpN6/vy5er2eMXZ0GkLL2+/3NZlMrAvRer22AwAzBbOCpGAymajRaJj+n9Q6LUgLhYI5umg0qkKhoOFwqFwup1/96leSZAzXcrk0bfR2e+yYk8/nbe9Z8+l0qnw+b2wp9+T3+21ex2w2M505RdcEJjC8rB8BDNKSDz74wGQFpLHb7bZms5ntAQ4DqUs8Hrc2si4LPRgMDISm06mq1apqtZrtN4EDYEydgCSTM1SrVb1580b/8i//or/7u7/Tr371K71+/dpS5wBhqVSS3+9XJpNRu922Qtt+v69cLnfijOLxuEKhkG5ubkw6Ua1WLTgLBAKq1WpaLBbGCFEgjt3s93s7d6FQSA8PD7q7u1MgEDAgjEQiKpfLev78uQKBgLUiJoDZ7/e2brlcTvV63RyZqx8nGKQddblc1r/+678ql8vp4uJCX3/9tQWxAB8MGfcN81gsFq0AGAZxNpvZmtO1C8kTLY2xMQAPoCEzgIQlk8no/v7ezs1kMrEAlK+7GY7VamXBKkHAZDKxomjqHWBN8T98P2cYxrjf70t6rDnAxxJ8b7dbGxaLLMOVoLm1NtTbuIG+y0Z7ksL/3eVhpIeRHkZ6GOlh5M8fI7/v+kleuNAU4ygOh8fiahwsmkuGzfHWSqEZBiod2QRYO7TOLCCAT5EeqULYuWDwODE+EomY/hOtNguOHCEej1tak0nnsFCkNnmDh2Hka6S7D4eDbm9vzYnxvX6/X6VSydLsMDfr9fqkxeZyuVS1WtVgMFAulzMtKBpd2E7Sn61WS4vFwli6drut9frYUpbBjZlMxoYaApCkQyninc1mms1mxg6iiWb+AZ2q9vu9nj17psPhYFrXcDhsBbYMw8Po0PD2+30ryhyNRlosFjo7O5Pf7zf2AEeP/KTX6xlYx2IxZTIZ9Xo9Y4xIqS+XS6VSKQNxn89nTCjsGb/XrXuAAeRQ7vd7+zotQQFuSfrwww81mUx0f39vTCx6XhhqagH2+70VtJZKJU0mE0v95/N5PX/+XP1+39L2Z2dnqlQq6vf7BlBo/yuVip49e6bhcKh/+qd/0m9+8xubTUNdABIfl8GcTCbG8MG6Ii/ZbrcWbKRSKaVSKTWbTdVqNesyBNAS5OVyOW23j/M2cGxIZXK5nJ48eaLb21ubJ7PZbPTNN9/YeeVevvzyS3344YcaDAbGiBHErFYrA1y3/iGRSGg8Huvs7EydTkfD4VB3d3f667/+65MA9HA4mKyAzkcUzWezWY1GI5M3AQTMQCkUCvb7kMrA9OIrYOy3263Ozs6s2BeAjsViqtfrJk9AmoPfIpAGWPhZmF9qZJibxLnn2QiICZI2m43NN6LlsCsPIjigIBypBIGC67Nh9vDX/Cw2ze+nUJxmCd71bpeHkR5GehjpYaSHkT9/jPy+Gq7AZ5999kPw4s9e//iP//gZzA83xds4jCjG6d4kxWccTBd0MAQ2D+dHAafbTcllC4vFojneQqGgSCSiTqej1WqlTCaj/X6vXC5ngBSJRCylyAYCMsgx3ALjYDBoBxHWEceOJATGgM9iM6PRqCqViskr6FqFjleSsXwwCNlsVrVazaQm3BPruFgsNBwOFY/HVS6XjQ1zGclKpXICqty3q5Wl5SoSA54Dw2m1WvZvnU5H5XLZnMVm89jN5nA4GIu1Xq9PWs4iOymVSsZWxWIxSysDwLvdzuQnLjMDaxSLxSwt7/P59PDwoPF4rOVyqV6vp3Q6bX9ndgOH3z3YFG2n02l9+OGHkqTnz59rPB4bW8HQvVAoZBp4wI7DNZlMTgo5s9msnjx5crI+pVLJWMhKpaJOp6MvvvjCgldsb7vdWgtbnAuMeDAYNHYJfTvMIfpz5prADFFEij4adq1YLFrggvZ+OByq1+up2+1avcNqdWwx7Rbcco63262xu+v1Wm/evDHm9e3btyoWiwZ4biqeICGXy9n5eP36tWUAOGfpdNpm0FQqFd3d3SmVSqlarVq3Ith3Aj6CGQYvUnOx3+91eXmpTqdj8hN3KCTZgtXqOLj08vLS9gYnyxlLJpNKJpMW0ODckUPxPUi50OwPBgNj2gkWkXvMZjPzXwynBdTdi6CRPaVegJoY7BsbhTFkPgvnm/0AuAFYzjB7QCDh+o9AIKBOp9P47LPP/t+PAo736PIw0sNIDyM9jPQw8v3AyMlkot/+9rf/97uw4CeZw+XqxXGeFPi6BaI8KKDABpK6I12YSCSs0xAHJ5PJGCjBDrKAMFnSsSC40+moVCrZXAPSvP8VLJzcO4cK5qBYLBrbwR9JJtXAISMXQIfMfdAVBZYKo/f7/WZcu91OZ2dnko4FgOfn5/q3f/s3M9DpdGqskCRrRQoziIM7HI5TynEcsICkOrl8Pp/S6bQZR7/ftwNIAfXd3Z0Nq2s0GsYi+Hw+K7pMpVK6urpSPB631CwA6M7dwMExi4GCwsPhYO1aO52OZrOZyUlg+jqdjgKBgDkEimUpnGS+DM+SyWRMZgED1el0tN/vdXZ2pnw+b9KG7XarVqtljkGSer2e7u/v1Wq11Gq19Omnn1rKGxuBIWQ44WKxME0xLEk2m7V76fV61haWgAnHf3Nzo08++cQKrAk+mP+CE0faU6/XlUwmNRwOTR8Ne8MZYB8Aamw2HA5rNBoZq5zP560DGFIAtPcugNMxzN1LbMHV6iMbWq1Wenh4kCR98skn+vTTT81+M5mM/vjHP1pL58PhoC+++EKRSERXV1cql8sWPLJH/J3idPTZnJdvvvlG+Xze7osieWpPttutsbAEkO12W7vdTqPRyLpFIX8A1GDPABqACHCgyB8WzGV7CdKCwaD5LP4NAKaAlxkvBB8wrjDtsJCbzUbj8dgC3mg0antGBoFABKCnID4Wi6nT6Vjba2o++B34LPwT0i2YcAJAfob99uq43v3yMNLDSA8jPYz0MPL9wMjvkxX++dzXO1yuk41Go3ajPp/PUrosKg9MCpafQfIgyXSXpCsBHd5oXQd/OByMDWNDWBBaS6J1XSwWNq0dNmI0GqnX6xnLR4Efn5XP5607z/n5uTlXGEBSkhwyQGk2m510ZKG4sdVqSZKq1apJIdbrtRVA0l5zv98bg8nb9WKxMElENBpVsVg0p46enSJR3vDRVaMZ3+12xhwCAIlEQo1GQ41GQ9vtVi9fvrRDzbBM9NSLxULT6VS3t7c2g4O98Pl81j2oVCopEAhoPB6bPaCpv729tVaibiABCLbbbWP+crmcDakMBAK6u7szm0DmkUgkFI/HrbNVKBRSoVCQ3+832Ua5XLYhhTCJMB/r9Vq5XE61Wk2hUEhff/21AVqtVtMnn3yiarVqmnNsPhqNqlwuG2gSuLBn2+3WCtKbzaY5xM8//1yRSMTaGqdSKZMDYPdomZvNpv0utxCUwA05jvQ4MHA0GplEJR6PKxwOG6sEEwgw5XI5LRYLPX/+XMVi0SbOo9VvNpvmDGHV+HsoFDLd85MnT9TpdPTP//zPuru706tXr4w9urq6UiRynBFDDUksFjN9/X6/14cffqhf//rXtg+r1crmh7RaLbPj/f6xVSuBKDUcOD2CQBxou93Wzc2N7dFmc5zfgpwDH+H3+63rEeCDNMhlSvmMxWJhbYoBZPwbTp2iZ/TuMK0ACLIQzrDbUQn2HJZ4NBoZcBwOx2J5gBQwoBibmUPdbvekvgCm3ZUMwdhRZE8htcv081ze9b+7PIz0MNLDSA8jPYx8vzHyJ6nhQv4As8YbXjAYNEeDNhr9Nzrp7XZrDGA4HLbPIh0oySQW6KxJA+OoYTK4GD7osliAnVtkCrPS6/UUCASsYJGNYT5BOp22t2/0z6Q8Kf4D6OjK4/cfO8zM53MVi0V74wbYSFninIbDoa0L07TRHKdSqROWZjAYaL1e2/BAuiqtVis9ffrU2AW6zJDuXq2OQydhqdHrIjlxWTQ37UrnFtaNIk8G7pGaRu5wd3dnXYMCgYDy+bwFEDAFDPRrNBpWMHk4HFStVs3pkJbfbrfmjPP5vEajkdLptHq9nrbbY9cn2gXTXpe1XCwW+vLLL21eAmwRwQCsFAERNkfxMM9Hi+ZCoXAi4YB9BDhISdOud7c7DvOkW1e327Vhhblczpw9rPNgMLC2z6TYb25urKsW8iOXseYeOQcwfOz7fn8c3Hl3d6d4PG61Eq9fvzamrlQqGcs7Go00GAysVTDBIBIAVzMNYxsKhXR1daVGo2HsMg50PB5rt9up2WwqkUjo6upKq9XKpCmcNYDA5/Pp7OzM2FIK/rFNOk6Fw2G1222Vy2U7T51Ox4AQNu7zzz/XbrfTRx99pOfPn6vb7dp+uM7UDUYnk4nq9bpCoZDValQqFYXDYUUiEQv0aHncaDTMb3U6HVsjSXYG2S+3EJzAeTqdqt/vmy31+32TbTEniYDVPcvsfzgctsCFsx0MBjWfz60bE8HMZDI5ARgYUgI0WEiYZXwGMjHvevfLw0gPIz2M9DDSw8ifP0bSoOO7rp+saQYggmGThoPB4i0dJ4yOGRaBnwc4EomEOV0OOs4abTrpdHcRksmker2eNpuNvWG7bFY8HjdGCR0yB4auShhCoVDQbDaz1CgOnZ+BtcNpouuMRqPKZDKaz+dWuIdzqVarZrRucSBMFGu0Wq20Wh0H51G4vN/vrQvPYrHQ27dvlU6nlcvlzLCy2axt/HA4VKPRUCQSUb1et58nPYxumXkaPM/FxYV8Pp8B3HQ6NcNijXkmahPYG54DJopnIZggHQ1zRecqilFJqSM3SKVSJtcIh49zSh4eHozhgBFJpVL605/+pFqtZvNGcCqhUEiZTMakCPP53BzVarXSxcWFMZFPnjxRtVrV27dvFQweu3FFIhFjCdH1wiiHw2GTPiyXSxUKBW02G7VaLZVKJfX7fXvubrercDisZ8+eqdPpWNDAbI3r62tjXH0+n0qlkmq1mvr9vhqNhs7PzyU9ynIIIGj7DEgDikiXYOzOzs5sbs9gMFA0GlUikTDdOzNpkLUsl0tdX1/bvm63Ww0GAytW73a7xtYvFguVy2X98pe/1O9//3vl83nNZjMNBgOTPdGtDJnIYDDQxcWFEomEFRmHw2Hr8AXwr9dru29J5hvq9bpKpZJpsMfjsZLJpElGaHl8OBwLf2E0kRjQWhqbI6CCOUMegSQCSQIMLT5kNBqdFAIDyovFwoZ7EuBgOwTD4/HYiv1d2RlBNi15eXa+j9oYfpZGQh9IAAAgAElEQVRgDB/E82w2G5PCRCIR5XI56zBFYOKycwAL98f/8zXv+t9dHkZ6GOlhpIeRHka+3xj5k0gKMWTXSfF3NgD2zH2rhTXCUaNPxVBhyVhcJBc4dUm2QbASOCvSm+l02tgOtMb8fpdRcwuX2cj5fK7hcGhg0ev1rLC02+1aMajP51O5XFatVlOhULCZBBgyAMEbNGlNn8+ni4uLE/bYHQqIfls6AjbPTKFvNBo1ZuPp06fGOtApqNFoWAEjvw/giMVidigoEsRoU6mUGR8OdLk8ztpwO0oVCgVlMhnbG4qRI5Fja1GGB5L6dpme0Whk6wGgwcSVy2Xlcjlz5tzLbrez7kM4coqUGXI3Ho/NaUiybjyAQTAY1NXVlUKhkIrFoh16isjX62Or43g8bvMeKNx8+/atTY4fDocmtRkMBsagzGYzayPMPUYiEbVaLWPper2eBUGHw8G6ja1WK2NaKHz2+/2WTqdL2O3trRXBU6wJY0dws9vtjE3EYWy3W5VKJVWrVT19+lSlUulEm4wNwPTB8IzHY3U6HasR4ExytpPJpAGoJF1eXqpWq9lZo+NYPp+3TlKSTPZCAAYbyP6+evXK6gTOzs7MYa7Xa3U6HTUaDbNZipz5LNg89PkAVqFQsCL+1Wqlu7s7s3kYL4Jdl9lLJpMajUbq9/sWYGDrMKfBYNCCPwIk/EMwGLQA2mXb8EFuy20Aw+/3W0czSVbjglwER+8y58ggSqWSJFnAACiNx2MlEgmTomEjZE3wjRR6A3ysM+y2d73b5WGkh5EeRnoY6WHkzx8j2bvvun6SFy6MwtWPf5tV+Da4oE9HYwtrwOfxcHweGmtS6aRROegUZ7LYfA1g2e/31vYWJpA3aelx6jT3ig57v38sCAwGgyedeHhTJpWM4+/1enawACfS1n/605/UbrctDUpBMqwGQOLq9+nw8vDwoOFwqM1mY88bDAb18uVL7XbHlr9fffWVdSFCq47DRiZB6pWWrBSHSsd2wzh99LiA4cXFhQ3cRFfLzwHirCldbmAzKMrEWUynU61WKzssdLxx9fPSIzOMVGM4HFp7YBg2utjkcjlj63AsOANqDyjYTiQSJs/o9/vWsWqxWOjFixe6uroyZgknjz4XB8kh456x+91uZ8W5SC1g6mjrezgci2UZkOgWegLK7MtqtbIZK7vdzlgt0thusBUMHjsSoc8mIJrNZqbNp3tUIBBQoVAwB4qdZ7NZVatVmyfT7/dP5qlw0S4Z9jsWi1kgEQo9zr4ARLEjzsl4PFa73VaxWDTAxOaDwaDZE0wutkjBKoDA3iKvgeGnZiWfz9vPwLoCCJFIxCQFoVDIZu+Ew2Elk0mVSiWTl+CwN5uNOXtsCnkRQZsbOAPWrDtgzzPBBJK1wIZDoZBJuWD4OQfr9dokZzBuAAFNCmgwwGdhX0gyCEj442ZR8C08C4EG/tK73u3yMNLDSA8jPYz0MPLnj5H46O+6fvQLF04NJ8WFE+UNGGeH9pnv4TNYDHSibuqQQlYOLRIGDIm0KBtC4RwbV6lUrIMT7Bgp9kqlonQ6LUkn0g6cL6wU947+lA5D2WzW2pTydr/dbpXL5U5as6LNpjB4OByaQ1qtVna4eDacA0YHE8gQQwor0VA3m01jysbjsQaDgTk3ZA4AMgWjOHpXH4z2HObQ1dU+PDzI5ztqtNGsY9SwQAA5WmmcN/uNfAImEecHk4DB45AozIRZJyWOrr7VaqlSqVjXqGAwaAP6cDDcPw620+mYk8dRw4xIR83u7e2tyuWyrq+vVSwWrUOS29UL1hknC3sGSwwz7Pf7bbjlYrFQvV5XoVAwpw+TAxi49kgg4AJaMpk0CYnLuiHlIT0P00qKPxwO2/fhEHe7Y0FtOp02CQbn2OfzWbtgHCOyGSQd2+1W7XbbnNpqtVKpVDJGrNPpWP0GzwArhcwHtgz9NAwhbXG3262++uorq0fAZ0iywGC321ndBsEj60VtCd+HzQHMkUhEw+HQ2LhYLGbrB7it12sLHGCEeV5XpuLWSOCzkIEQpBKUSPr/27vy6Lir83pn31eNNBotlmzLNsY4psQrhgSzGBuMDYQlTWh6TmtImxQCDUsoISVpSxsOZGvJH06zkBKawGFtWVowkBwIa8DYBmyDLVn7MpoZzaLR7P1DvZ/fCFm2QA7YfvccHYw0y+/3lu++3333fZ+cy2HfUFkkAdAaxjnMNic5kiDVv1cqFcTjcVFG0+l0lY2N446LYMZX9bNLpZJ8PgmFcVFjetAcqTlSc6TmSM2RxwdHst0nw4ykhVfVJNUWofrWeVEMjnx65JO6agtgsGFnTHbYmCTF96mV1jk48/nxCtrNzc3o7++XA45UnvhUzBoDJDn+l5PbYrHIFiT9nlTwOEl4PQwyDocDFotFbA5UKT0eD8bGxuthNDU1SRDn5GQAYGBQi7mR5KhccNuVh15dLhfq6+sRjUaFSDnwAYgKQ6IGIBOd3+/xeGAymeTgc7lclsOq2WwWs2fPFrUpk8kIcXPLmJ7o2tpaUTC59cogw+DHH9XioZIblZG+vj65HuCAauT3+8V/z8nMBQy/gwRGdYwKELMDsZYLbSQcZ6Ojo9i7dy+sVqu0CRcaVKl4IJpjiQGXWXH4O47PZDKJmpoapNNphEIh8YPzWmmboX0kmUxKdjEeAOecYABS5wAXDDwYSuUomUwKOXPBzIxXtLLwfWrbqxYim80mJErQqqKqj7xf2lqY6lb1wVPl4pxRPd1UbklY9HUnk0lZLOXzebH0FItFsQtwsUerDOMSxxTvmWc0eHDZZrOJ2uXz+aoWtlSz2ea8ZoKLN6/Xi6GhIekLqthGo1HiB2MF7V4keYvFIjVvSDRsEx4iJ6mWy9WJEfgZagzlIkklC8Y1jmOOC9UqpsY+vo/zh8roVOqdxuTQHKk5UnOk5kjNkZojZ+SBSx0UfMJTiYVPfnwNG5MDi9t1zPTCBuKF80a4XTmRXPh55fL4QUdWkI9Go/B4PDCbzXIQuFgsyoSNx+Ny6BWAKGFqJigGH7vdDqfTKUGXk5qHIfmUzglLxS0QCMjgotJDvy79srRDAON2ARbD42dks1mZOGwDtgvVlnw+j87OTsyZMwejo6OSUYf2C6NxvJ4IB4bRaJQAwyDGrWO+dnBwUA7kejwe8f9TXaWixu+nn5nV3KmisH8BiNJD5VHd5iU4Eai8xeNxyYI1OjqKdDqNVCqFsbExKWZZX1+PbDYrtVQ4Fvn9fD/7n9Ya1lgpFotSTI/XYDQaxS9OW0GhUBAy48FWBkCmP2bf8CAsMyFRdaQCzXFuNI7X0GDApCKWyWSkUCX7xeVyIRqNwmQaP0zP7XYGGy44mF44kUjA7/dXqUDcpjcajVWLAWaZoqeeqhAtN1wQ0YbD8dzU1CTWIo4JWg+YBWl0dFS+n/OLQZZKtslkEi+2mnXIZrNh9uzZ0l51dXVVNgkSIFVjErrFYhGfNgmN4x1AlYLF+MEUuCyqycP7JGGqf2raYfY5Dxuryi3foy44Sc7cRSiXy2LtYLxgLR4uLEmqnG+Mm/w7YyH7sVwe9+wz7nChw8U7xy4AmYMc+xMX/1SpuUDRmB40R2qO1BypOVJz5PHBkVNhRs5wMcCxQ/gDQII+L4aThzfCp0nggEecQYgqDH/o72Qn8t88yDc0NIShoSHYbDaxMtDrCUBSb3Jrkof76KXn99JXTwLJZrOiEJCcuA1O24Xf75d6GLQC8Nqo+NTV1aGmpkYCC7dZY7GYDCr64mmbIHFwW1TdwnW73YhEItLxw8PDsFqtMsh4yJHKAL/X7XZLNXNu+1LlowpjNh+ooVGpjBe/pJ2Cbc8+p9pK7yy3Z2kboKWC/cfBy/+nXYCHE7ltS2WF9TVIVkwPynFH4nW5XDJ56WNXgywXLBwPAMSqQrLmljUVHvYnyd5iGU9PykUU1Q+z2Sy2A76OY9rr9aJQKCAUCskii2OCCpvFYpGtfqa9pY+flgAGFY4T+oWpPKt2BraV6kXmuEylUqKuc56RlBgI6Tnn4WSqs5wHnDNMx8zFeDabRTQahdfrRblcRkdHhwRk+vo5/thuauDl/ZCwAUjmJh76pw2J5MA+5nkJBk4St9FoRCgUEkvPxIUZxxDnDccALSW8dxaqJBHwvqlaqzGQcY9jmGo+38v+5X0DkMUKyYbWBRIF56RqN+NCjud2SLIkZra7ehiZfct2m7j7QEWZcZoLEPaHxvShOVJzpOZIzZGaI49vjpyxOlwMArxJqia0CqhPg3xSZEcxOPLJkuRAMuE2HRsUgDxxEybTeAHB4eFh7Nu3DyeddJJkXxodHRUPOycvt6AZzOhJ5RMwX6f+qMEtlUphYGBABj2vjVv+nMxsE/pVY7GYBN62tjaUy2XxtLPWBlUOvp9bwYVCQQZBLjeeDreurk6CADMNUd1xu914//33ZcJyoHJAWq1WBINBGcRUzVjLQt1appLC4GI2m0UloCLILWfVpsDJa7fbpYAgLR/8Tto3OFZ471T9mBo4l8shEolIgOAixWgcP9PAA99UJhi0OZbolWewoI2Er1eJm/3PQFIqleQgptVqFYsEVR2LxSLXQJsN7RjsR1oiVFWHpMlsVEajUQ6M0/qSzWYlGHIBxerqVHRYq4T3wPul9YNqWzabhd/vFzU1FAphaGhIgj/7jWOM84N9RDWOZMQD5bQecfFA5Vftx4kWKvqwae+gxYfxpFgsSgpnjqOxsTEJerTYMFMbFTUAYnOKRqMwm82iRo+OjoqVitnQGJcm7uCwjaiI0tvPQ+JsXy42qB6TzEneXMDwvYx1HAeqLYFjSc2QxnshgfH9KqGp9gueqeE84XtIDCRvxk++l3GJ4BzlPfJaNaYPzZGaIzVHao7UHHnsc+QRtRQCB54i+bTHziYZ8MLU1/IG1MNo+XxeOlB9uuTnMKDRAsAnbiooPp8PbrcbPT09UhF8eHhYFEGqQrzGYrEo9QVIJOVyWQ6QjoyMSCAfGxuT+gU9PT0YGBiQCUoLB5+QOWm4hTs2NibZfVgJfXh4WLZ+BwcHEYvFUFtbK9mIqMZRqWQwIzGwgzOZDGpqalAul6VAZD6fR2NjIxKJhAR/PuXz4CDtAlR+aPugiqQGYAYxqnwsOMin/lKpVJVtiWRMWwaVM4/HI4c2ubiw2WxybwDE2sC+Yn0Yptal4qPaOqguZDIZ+Hw+uV+qOyRQ2h9IFPRn8z7ZH1x4AJAzBWxXjhd6ta1Wq7QR1Uv6vJ1Op5BBU1MThoaGUCgUUFtbW0VOHDvqZOd45RhzOBziO2bWqGQyKW1GKwj7c3h4WNLvUj2jqsqUsqVSCYFAAIVCAQ6HQwqI8v65KGIfcwHCfqZlhgsEtgdtBlQFGYA457kAYJpnqpG0KFitVsRiMVHD0uk03G43+vv74fV6JdaQlJgdioTEvuC5hGQyKX3OGMSFLschxzjHF+eL+qDBRRVfA6AqExwJmfdNSxZ/R0KmxYaLW/6X6jNrllBZY+zkYo39wFiq2ic4t+x2uyjcjHVURhk7VAsGCZvgIomvVxdbGtOH5kjNkZojNUdqjjy+OXLGHrjUrTQGInas6qPkj3rIjAcgaVHgkyqDO1/LrUO73S4dTtWLqgoP/6pZfTKZjKSKHRgYkG1Vo9EIr9cLp9NZNbjYkNwy5SCrVMYrj+/duxfBYFA8u1T2eDg3l8uJ9YOEBUDUG241t7e3Y+HChejv769K1Us1x+l0SrvwKZ1qAwMHCZRtkUwmcdJJJ6FSqeCdd96p8qPTb6/aEli4jkoUr5fqQyKRkInPtimXy+jv75cBSv8tP4eqLD/L7XYjEAhIG9LLbLFYUFNTI9vvqtLJgMj/54RLp9Pwer3S9nwP61ykUikEg0FRQvL5vNQXoTrKVKEAqrIaUQVkcGW/AxD7C9uMXmgqOvn8ePrmYDAIn88nKm19fb0UQgyHw0gmk9K23ELngoTfw3GXz49nDSKZpdNpSfdLYuOWO9tGVWCpQjKAcUzTB83XciFF3zwXFiQw+u/ZdpxrVOUZWJmpif3F++Dc4AJDVeJZw4cKbalUEo8+vemMI6oFJxQKIZPJCEFy/LItR0ZGhJjsdju6urokHS8XOyTUTCaD+vp6BINBpFIpGVf8OxVKALI4sVqtCIfDiEajVWMwFouJUkd1FjiQZY5qHscXFUyj0SjqNYM64wX7jkEdGPfvc7zwvAgXiEw6kM1mJdZx0cLrYFuqBE+S4bhn2zEG8rUa04fmSM2RmiM1R2qOPPY5cirMyANXsVgUewCDlOp3VImGjcUfelFJPrwREhB9nAyovGEqf7xpqgUul0uK37ndbuTzeSQSCaTTaRkUg4ODsm1J0qI6oW4Bc7vTYrGgqakJAwMD6OjokLogDOiqt5z/ZSDggODg5d/7+/vR1NSEwcFB9PX1SQ0GBiaqByRbq9WKkZERqWhvsYwXRozH4wiHw/B4PHA4HBgZGcH8+fMxMDAg95XNZiW48MCkulVLMmQfcECRIKgslctlqYw+NDQk10EVgf3GvjMajQgGg1XKpap0MOVvNBodH4z/H2yotNHCwm1tq3W8UOXIyAjmzp0r3n8qIFRP1e1qtiMAUXlJcl6vF4FAQA6kqgoVFSC1GCXHHlUl2iFYqJDeciqwLO5I1YbKJACprj7RjkMl224fL9jJayNpVSoVJBIJjI6OF+JkAVGqfsViUc5ZqJYGZhkzGAxy6JUH1+12uyhVzEDENqMXX1U6AQj5MViaTCYJxH6/Hw6HAw6HA4lEQiwODGYMUHw9gyzPShiNRtTW1spCIJfLIRaLoa6uDsPDw1WqP5VPBmKOA8YjkgGLTHK+c3GrLnJJhLxOxjPGHbZfLBZDuTx+4DYYDGJkZASVSkWIl2o/s0Wp6hu/jyTP9uPc4z1xLAKQsUi1m2OPC0Qq7mNjYzK32ccmk6mqCCXbV42jHN/8N+c6xyVJvVQ6kHpaY3rQHKk5UnOk5kjNkcc+R06Fj/zAxYDMLDOqr5sTkAGKryXBsPP4BMsOo4JHhYedQeWLn8vgSJXJ7XZLETMOKBbR4+tol2AnsiOpQKiKGz3Gra2tiMfj6OrqkkOJVJw4Wek7pnedk4aDYmxsTDIH5XI5UaGSySTK5TICgUDVgUtuH6uKCQMqlUwSGu+3u7tblKW+vr6qOiYjIyMwGAyiUpEsVcXDYrGIAkPVioSUz+flcLTJZEIoFJI+5kDnd1HpikQiKJfL6Ovrg8PhkMFOTzyDHxUzbjdzsNMfGwqFkE6nMTw8XBWkAMDv90sgJZGpB2Lpp2ZA4za0utVM8uI2N4sR8kwAJygXBVRX6W8nYZHU2FbsS5fLhXQ6LeNB9aVTsbLZbGIZYlG+fD4v2cLy+bzUQtm/fz/q6urkTAJtPYXCgQPjPITMxQv7iH25b98+tLW1SSYoplWlDYSLQh4w5SKGKhsLHcbjcfj9flFkGeyCwSAGBgZgt9uRzWaFVKi8ErxPtivtHDwTwL95PB7x+vM8Rbl8wEPPRQpJm+3MxUptbS1SqZT0u6peMaU0bSQcewyiXODwe+mTLxQKsvCw2WyIxWJVChljTKl0IEUxxykA6XsSN+ed+jveA8+G8DM4fqhCT4ydvGamDeb4dblcVRYxLvzUhdREOwljMeO7xvSgOVJzpOZIzZGaI48PjmQsngwzkhaeh9b4w210bpsTaoACICqVeviyUhk/+Kduw7LjeHN8EgUgT6psHG7te71e7N69W7bh1cbgJKctg5/HDuTnWSzjheVGRkbQ1dUlFgmn04l4PC73yEFHlSCXG6/bYDKZpC4EK3MzxW0ul0NdXR0SiQQcDocU6mPdAx7q43YzPb0Oh0OUHQ4WHk5MpVIYGRlBNpvFvn375Ane5XIhEonA5XIhEAhgaGioattWJW4GUKoqPp9P+o0pYekZ52Sh/YJ+Y04Aq3U8/afD4ZDsNYVCQdQNtlttba3U26AaQUWWW8EMyPQ/q2phPp9HIBCQgM1JQgWCKiuJQR2XPCzLg91UlHhGgSlI2S70nTscDjk4Tb+yxWKpqu/CbXmqTaxXwf7k53GxwXmQy+XgcDjQ1NQk15PJZCRFcTwex9y5c9Hc3CwZlnjIFTjgLTYYDOKBp4WAB2Gz2Sw6OjpkcRGNRkUtY4Yml8slCimVJS5s2BeJREKU6mg0ilAoJIeVw+EwhoeHJ21DjjuPxyPKotvtlixiwWBQMhsVCuOHVLlQooWCgY2qEhc0FosFiUQCJpMJ4XAYAKReDm0UVMWpKpPoSRBUKZla2Ww+UGyTRRsZ6xKJBHw+nxAtY1u5XK5KwayeMeH4V1U5jgMecFbHMZV8kjaJkDse7BseEuc4DwaDsmDgeGT2OI5Tqsi0Q6kWCTXGcf5oTA+aIzVHao7UHKk58vjgyKkwI+xJryUDPrd8qY6wcziZqaKpN0W1QJ0c9K2rT9v8XNVTWSwWxRpB76rRaEQ8HkcoFJKBxcFhMpkQCASqbBfqwVcGBio77e3tclBUXXhQbbHZbKLKUZ3gwJk7dy7MZrNMUCohc+bMQTAYxPvvvy+KD7epef9sH3qf6S1m0KFHOpvNIhgM4r333pN0uSMjI/D5fFXFKyuV8UJu3D7lVjC3W7nlqwZ7Ti6v1wur1SopbdVFAAe/GnSoMgCQjDUkcL4fOJDlheTBgEi1xGg0CuFSAeF3A+OFKr1eL2KxmFyz1+uVMRmNRuU+6ecmMTMwUOGj0sZx4vF4JJhxzPF+BgcHEY1GEYlEJFh7PB75/3g8LmONthmv1yuBTrXkcJLyIDRJfnh4GDabTVRctnVLSwvK5bKMaQahiQRLZcpisWBoaAgOh0PqjagHsXO5HGbNmiVnJBiYWH9kZGQEACQYuVwumRvBYFAUdWbYcrvd6O7uxvz580XNDYfDsphgX/IgL8coM4jRagBA7pltQDJkcKbyy0WOxWJBKpWCz+eTbFiFwvhB2mg0CrvdLsU1eYiduwIMpqptTg3cLIIIHEjPzdfHYjG4XC4Z67x+Lp5Iopy3/DcAWRzRHkMPv8lkksUOlVTVJ656/ammU0HmvDKZTNKmVKep+vNcCQkaOLBwp8rIBTwXKPxujelBc6TmSM2RmiM1Rx77HDkVZuSBi6oLVRsAEmzZYCSZSqUiBzK5NcctegYbWijoKecTqapkqd89NnagOCC/kwoBG4aKF9Uh/nBrnt/DhmZNgVQqhUAggLGxMelAbq8DqLLZsEM8Hk9V57pcLrS0tKCzsxPhcBiNjY3o6uoS0nG5XOjt7RVy4wSkssCCd1T8WCulUqmgublZbBChUEjukX5jHtyMRqPI5XKoqamRLXiDYfxwq6ooMHByArndbqTTaSQSCVgsFkkdSr89r5nfyXZQ1TISGScCFQdui7PwIf8fgKharK3BYKJm2qE9g4GEKiInt8FgEKWWROJwOKpquHBsceLwWump56FZYFwlYrHEbDaL5uZmsYHwx2w2S9Yhet9ZM4SLDtoXeOCUZAocSJEMQO6dr+E44Os47rlYo9LMucLDsqrKSkLgnKTVaHh4GIsXL8bQ0BDy+TwaGhqwf/9+eDwe8dqTbEwmk/jY1WCsbtc7nU7s3bsXfr9fiJUHcumrJrmwbalgMUuXqrIHg0F0dnbKooC2HS5S1RogJKp8Pi++dJ4ZGBkZkYUUME6AVONHRkZkTNGexP4gkXEsUqkmaVCx5/UyHpIMuQjieQ7OMy5ueIAagKhqXGSqmbNo62FM4/2zj/kZqnrNPuN1qEoe+4/fy8+j3UT9HBKLxvShOVJzpOZIzZGaI499jpwKM2Ip5CTiIGVgZ2Cj8sHgzq10HjDkDwmGXndOWk5oKmpsLHphx8bG5AmYaV4ZSPlUTu8zlSwGKqpWlcp46kw+7fKzvV4vcrmcpErN5XIYHBysmsTAuE+a27hOpxM1NTXIZDJCPpFIBD09Pairq0MoFMKePXtksNLjykFbX1+PfD6PoaEhCU5WqxWtra3iEadaMzAwgLlz56JYLMpWPgeM3W6Hz+dDf38/AKChoUFUIR6kpDJDe0AoFJKJyNSt7B+DwYCBgQHxAZMQOGm4IODBX4PBAL/fL/5wn88n7yuVStKegUAAPp9PvPr0L3Ngc3FgMpmE1DlpVTWGkx2AHPDl/5PcGPT4k8+PH9jmwVMetmXmGvU8BRcqTMFqNBplrHo8HlFC2F4OhwNer1fOA+zevVvU1kqlAr/fL4TNQ/EMULTPUMHjmQiOOR5KzWQyKJVKsvBJJpOSMplKNG0uo6OjqKurg8PhQE9Pj/iWqcYlk0nU1tYiHA4jnU7D7/fDarWiqakJiURCDjiri7ZsNgufzyd9H4/HEY/H0djYiGg0KsoQlUoAkjGNpEvbQiKRkDMbiUQC4XBYbA30vtMHDkBUqkqlIvOUfUrfdX9/P+rr6yWQU3Gjj9/tdqOvr0+sCbwu9QwDfeRsJ5JnMpmU+GE0GqXeDxdeFotFFko8nMs+5nynx54EAaBqUcm2prqsFqblnOMYJTGoOwEkHXr7VaLgAo/kxu/h79hWjPOq0qxx+NAcqTlSc6TmSM2RxwdHToWP/MBFXzK396gq8Ca45c2nSD4F8uLVwED7AQBRVEgiHEz0XVLlK5VKaGtrQ6lUwv79+2EymeTJ3uv1wmQyyYFJbiUDqPLEs44DfZj0lBaLRakHkc1mhcyoNHDCMnjTc8sn+ng8Do/HU+XPzefz6OjogM/ng9lsxqxZs9DV1QW3242WlhZUKhXZ2ubTeaVSQSQSgcfjwXvvvQeDwYBgMCh90NXVhfr6eoRCIdn65wQfHBwUny4nLQ8sckDHYjEJCgzkaiCmYqAqTvz8bHa8OGIgEBBfP++f/mWz2YyhoSFRa0hCxeJ4il5mcyoWi6irq5OzAiQeDnbacHhGgAUELRYLenp6UFtbK203Ojoq/nH6ibndzOBjsVgkc+wyjo0AAB9jSURBVNfo6CiCwaAQCdWQZDIp44yLlUwmg9raWjk/wPFKuwknM1VhZuJpamqSFMNjY2NyoJfFCTk/fD6fkGY4HJYgSa85CZSZjlh7hD5ktiUJr6amBhaLBfv370exWERHR4e8zuFwIBKJoKOjQzzwTqcTPp8PLS0tiMfjsFqtaGlpQW9vr2Su6u7ulmxeTBnL1L5qBjLadhhQ1aAKQFQqztNKpSKHVrmdb7PZEI1GUalURImz2WwIhUJC4OpCgQoh26C7uxvNzc1CnOrClwo7FzhMM8174Dhg3KpUKuju7pbYoaaC5vjnAoqLwGQyKYotVUMusjmf+BlcjNJyYTIdKBxaLBZl54ALNCp6HC8kUt5fLpeTuU1FkaSh2oZ4gJ6xUY2VjF20uGlMD5ojNUdqjtQcqTny+ODIqVwgH/mBi1uL6rYnL4hBNp/Py8FBkgsHE7c4qayxE9QBx0GvBnwOCA5uv9+PQCAg6hEVv8HBQQwODsrE7uvrQ6FQwKxZs6QT6FNlx/X29op/NJlMYsGCBahUKujv74fdbpeJYjCMH+RLJpPo7OxEXV0dTjjhBMm2RF8wJxGDBQ+gDg4Owul0ora2FolEAqVSCZ2dnUgmkzLomUVp7ty5SKVSSCQSqFQqcLvdUvytq6sLixYtgt/vR19fn6ipPORJZbKpqQmFQgEjIyNVnmBgvJBcKpVCd3e3nBWg0kC1jkpTuVxGT0+PDEC73S4KJ5/+s9mseIBpS9mzZw+8Xm9VBh9+F3242ex4LRH2PdUy+rvp2SUhlMtlWSyQvAHIYWKqKCQxdbtaPcBeLpeFeJn9h/UjqJ4xaHPruVgsiteYY5e2CS5Oa2pqRAlhQU9aaLjYokLNsURiol2C5xK4cBsbGxPlb3R0FOl0WhY1drsd/f39kgY2Fouhvr5e5mS5XBYV1eFwoLe3V66jUqmI0sNsYCR7t9uNgYEB5PN5uSfWzuF5ANocbDYbBgYGpN/ZX7RysJAk1WsG73K5jHg8DrfbjWg0itHRUdTX18Nut4uKSfsGyVddUNASpWaTop2pt7dX+oexhv1JmwdVPSqAtEzQikKvPhVlZr9i3OPY5rgaGRmRxTCVN2a4YnuTSDkXON6pHBYKBVnc8ppo7SsWi3KGgO9VzwgVi0X4/X4Z/4xBBoNBxgznAz+Tc5FWHCqevEYuQDUOH5ojNUdqjtQcqTny+ODIqWyFBtoPPixaW1srt9xyS1VgMJkOpH0k2ajqnvrkzkZTf0fCoXLDJ0Y+efLG6b0m2ZBo+D18olaDIBUCHhak95dPpnwPO9tkGs+iZLPZpAAdn9y53UzPMWtWcHtcHQwWi0WUjFKpJClEGcyHhoakqBy3eqlYBQIB8dkODQ2JvYLBiRMwEomgt7cXJ5xwAnp6esT3TIKvra2FxWKROg0MPjysTMuF0WgUu4jqTaWlRfWsUhGhV5kWGYJ9pqob9OWzvdlvfB3HARVE/tC7rqp7ZrNZMv5w0nBi8X20ZqgHP9UtadVfry4qstmsXB8nIlVddfHJz2CQouWB38P/cjHE7W5eLxVhqsPq53NyU2XimOU5DdXrTAWKNhXeD/uSyh/POnCcUnVWrQAulwtjY2OSKtnpdMr92+12mVe03aiLOHVhSNVc3ZKnt5vBTC1QyOvl3zlPaaUhyXERSCWY8YZzmfGE5K1aqpLJpFh1OEZJqMx4pfrnqcxz0WQ0GsXnzver38fxQ4WM84aqLa+PcZI7Fmos5Pu5AKeSrcZJWqa4C8JYzvjIOEcCpS9fHTOMNeqcYBwgCfL3VA03b978h0qlsvQjEcdxBM2RmiM1R2qO1Bx5fHDkbbfdho6Ojkm9hR/5gWvp0qWV11577cAHTrixwwUbUkNDQ0Pjkwuj0agfuKYBzZEaGhoaxweWLVuG119/fdIHrhlJmkF1iE+N/D3JgQQz8d+qTQKAPO2rr/ljgtdLJZI4GEFOvJeJv5up61FVzoO9bmIbH63EPFn7qffycY2NjxsT+3iyv6tQx4A6fid+xlSfO7Hdj8SY4udO9tmH6uuDzbnpzsXDua9DLZInfueHba8jEUc+DA413jQOH5ojq9+rOfKjQXPk5NAcefDr0xw58zjYuJjqnj7yA5dqZxgZGcH27dtRqYyncpwzZw78fj8GBwfR3t6OxsZGNDc3V22nx2IxvPvuuwCAtrY2RCKRgzbk4XTQoV4z1d+5pQxAtmz5HhWTvZ/bk9PBdAfMdF7/cQ/GmcSxdC8fFodqg8n+PvF3h/Oayf6mLrJUWxRB64y6tT4Z+H71cw72OvU7ppp/kxEm/zvZtUwMkrwWvnaq6z/YtQIHzuUAkGufavE52edMXIBPXERO/NuhrmliG0z0lk/3XjU+HDRHjkNz5JHDsXQvHxaaI6v/PvHfmiM/eE1HgiOnev2M1OHixb/77ru44oorAIyntVy5ciWuuuoq7Nq1C//8z/+MzZs348tf/rKkjhwZGcF3vvMdPP744zCZTFi4cCG+973vYd68eVUHQicbEGpDqYNzssGqejonXrOKaDSK5557DjabDWvWrJEie5Pd62TXoP5+MhWTUN+jA6XGJxkcu8ViUc5hMEUvz/TEYjEUCgUEAgE5b3GwecNMUkzry4r2mUymaj6w5s/B5gcP99KfzrMCTEVbLpclDbUaTJmClwvaQqGAeDwuB6WdTue02sZgGPd6R6NRyUzGZAH8Tp59YVakgy1o8/m8HELnmRg1rtCP7nA4PvAZE2ObwTCe2YoJFyqViiyU+Xf6zyfrL42ZheZIzZEaxyY0R07dNpojD2BGHriIUqmE4eFhnHLKKVixYgUefPBBVCoVnHDCCUilUnKwkbaKBx98EPfddx8uueQStLW14eabb8bChQtxxx13oFwuI51OY3h4GBaLBXV1dVIscGRkRGogeDweDA0NoVQqIRwOw2g0oqenBwaDAY2Njcjlcujv70elUkE4HIbD4UAul8PQ0BCcTqccjLNYLHjzzTdx5513wu12IxgM4pRTTpFDhCoqlQri8TgSiQScTifC4bAMKlZzDwaDsNvtiEajcqiQFdFDoZBkCtKEovFJhUoA27dvxxNPPIHu7m4sWLAAl112GcLhMF566SU8/vjjyGQyWLx4MS677DIEAoGqcc3PyeVyePPNN/Hkk09icHAQS5YswcaNGxGPx/H0009LQcJ8Po+lS5di/fr1ci1q8CyVSti9ezeee+45+d5TTz0VBoMBzz77LF544QWMjo7irLPOwvr162G321EoFDA4OIinn34aGzduRG1tLbLZLJ599lk8+eSTMBqNWL58OTZt2iT1dg5HIcvn83j++efxyCOPoFQqYcmSJfjCF74gdVR6enqwZ88etLW1Ye7cuQf9vFQqhUcffRS/+93vAABnnXUWLrrooqqaMk8++SRcLhfOO+88Ie2J18MEB3v27MHbb7+Nyy+/HEajEYODg3jmmWeqiOTkk0/G/PnzdQz6I0JzpOZIjWMHmiM1R04HM/rAxSwobW1tuPbaaxGPx9He3o5QKCTpb/m6XC6Hp59+GiaTCTfddBPq6urwwAMPYM+ePSiXy+jr68Mvf/lL7Ny5E263G+vXr8fatWuxa9cuPProo+jo6MCiRYtw2WWXYevWrdixYwe+/vWvw2az4fbbb0dbWxuuvPJKPPTQQ3jhhRdQLBZx2mmn4ZJLLsHAwAC+973voba2Ful0GuFwGLW1tfjf//1f7N+/HzabDXfeeSduuOEGrFq1SjLIEHv37sUvf/lLvPfee4hEIrjwwguxbNkyvPnmm3jwwQcRjUaxcuVKnHLKKXjqqaewf/9+GAwGBAIB9PX14ZxzzsEXv/hFKdimofFJRaVSQUdHB/7t3/4N+/btQ1NTE37605/C6/XizDPPxK233gqr1YqFCxfim9/8JrLZLK677rqq+hXAuIq1d+9efP/730dvby8aGhrws5/9DE6nE3PmzEFfXx8ymQx6enrw1FNP4dprr8X69evl/IqqYvX19eH222/Hjh07UFdXh//6r//CddddB5PJhDvvvBORSATpdBrf/e530draivnz5+O5557Dk08+iSeeeAJr1qxBKBTCtm3bcPPNNyMSiSAUCuEHP/gB/H4/zj///EMGWCr0O3fuxLXXXouGhgbMnj0b//Iv/wKPx4OLLroIL7/8Mv77v/8bu3fvxvXXX485c+YAADKZDHbt2oVQKITGxkaYzWY88cQTuPHGG3HOOecgk8ngpptuQktLC1atWoVcLofnn38et956K0477TSsWbMGDocDsVgM7e3tCIfDiEQiMBqN6O3txcsvv4x///d/h9FoxGWXXQaDwYC9e/filltuwYoVKxAIBGC1WlFTU4P58+cf2QGkUQXNkZojNY4taI48eLtojqzGjD5wcVuOhRHnz5+PHTt2SEE2vsZgGK9P0N3dLQUaAeCaa66Bz+dDoVDA3XffjZ/85Cc444wz8MYbb6CzsxMejwcPPPAAXnnlFTQ3N+Pee++V7cOHH34Yp59+OhwOB371q1/hBz/4AZ555hl84xvfgM/nAwBs3boVjY2NqK2txf333w+LxYITTzwRZ5xxhqR5JSGq26yqLzSXy+GOO+7A448/js9+9rN48cUX0dXVheuvvx4//OEP8c4776C1tRU///nPsXPnTrz++uuiNhaLRRQKBezYsQPr169HOByeyebX0JgxqPN1x44d2LVrFzZv3owLL7wQf/3Xf43f/va3aG1txb59+/CP//iP+MIXvoDnnnsO9957L6677roPBGIqgLt378bXv/51nHbaabjpppvwyiuv4KyzzsI//MM/wGKx4Kc//SleeuklnHnmmQf1Tw8ODuKNN97AFVdcgeXLl+Pv//7v8bvf/U5S1958880ol8v40pe+hBdffBGtra3o6urCtm3bJCVzsVjEI488gkwmgx/+8Icol8u46qqr8Mwzz2Dt2rWSAvhghMIdiAceeAADAwP4zW9+g+bmZrz99tt46KGHcO6556K3txfvvfeeFDbl5w0NDeFnP/sZTj31VFx88cUwGAz4xS9+gbq6Ovz4xz/G4OAgVq1ahcceewwrV65EV1cX7rnnHkSj0arCw+3t7fj5z3+OtWvX4txzz4XVakU6nUZHRwd27tyJJUuWyPWz5s2mTZsQDAYRiUQwb948ANpO+MeE5kjNkRrHBjRHao6cLg5eEvmjfOj/+1ZZ7I5koR7yY3594EBF++effx4vvfQSxsbGcN9992HJkiW46667cP3112P+/PnYuXMntm3bhrVr1+I73/kO5syZg6eeegrz5s3DrFmz8Nhjj+HXv/41amtrsWHDBvz2t79FLBZDQ0ODWBrefPNNGAwGuFwunH322bjjjjtw5ZVX4rLLLsNXvvIVzJ49GwsWLMC3vvUtLF26tEphIwk+9dRTWL16Ne666y781V/9FSKRCF599VW8+OKLOOuss/BP//RPmDt3Lnbs2IFMJoN169Zh0aJFOOGEE9Dc3Iyuri4p3Kah8UlGpVJBLBaD0WhEJBKB3+/HiSeeiPb2dimO+vvf/x733Xcfstks1q1bJwUb1Z9sNouhoSHYbDa0trairq4Ora2t6OjoQDQalZofTz31FBobG/Enf/InyOVyUliRP/l8HvX19bjhhhuwYcMG8X47nU7E43E0NjYiEAhgzpw5aGhowGuvvQaLxYLNmzfj9NNPl3oaVBP5Wp/Ph4ULF6KzsxPDw8Ny74fC7t27UVNTg9bWVthsNqxevRodHR0YHR3Fpk2bcO65537gjEogEMD69euxePFiqT2yb98+nHjiiWK/Wrx4MV5++WWk02k8/PDDeOedd9DS0iL2rkqlgkgkgvXr12PBggWicLa1teHqq69GU1OTFHCsVCrYu3cvkskktm7div/8z//Er3/9aylIqfHHh+ZIzZEaxwY0R04NzZEHMKM7XMA4YYyNjeHNN9/Eyy+/jGAwiPr6ehiNRqRSKfT29koxwXnz5uGdd97Be++9B6/Xi3vuuQfnnXeeFITj4LFarRgeHkZjY6M0GovrZbNZ1NXVYenSpXj44YeRSqXwuc99DsFgEMViEQBw5plnwmKx4Be/+AXi8TgqlfEK5aeeeiqWLVsmW5+sXM+B3d3djXA4jD179sDn88nTLgAZ5LFYDF1dXTIoWEiPT+pGoxHBYFAKz7Ea+sGyz2hofJIwUUEzm82or69HLBYTC9C2bduQSqWQTCaxatUqPPjgg9i+fTvsdrsEPofDgf7+fpjNZpTLZTidTgSDQYyMjCCTycBkMqGrqwvbt2/Hhg0bYLVa8a//+q9SuJRwuVy4/PLL8Wd/9md455138B//8R8IhUJYs2YNfvWrX0mBTZ/Ph2AwiO7ubpTLZSlmObGgLDBOGjabDTU1Ndi7dy9SqRTq6+sP2S4A5Nqo5jU1NcnZFZ6XUdV/APD7/bjgggvks0iIhMPhQH19PV5//XW8+uqrePTRR/H5z38e+/fvR6lUEgUvEomgoaFB3sdzNoxFKux2Oz772c9i7dq16OzsxJYtW7Bs2TJcfPHFVWnHNY48NEdqjtQ4dqA58uDtAmiOVDHjSTPy+TxeffVV9PT0IJFIYPPmzTAajchkMnjsscewY8cOAOPFwS688EK88MILuPXWW+FwOOB2u/Hnf/7nsFqt+OIXv4gtW7bga1/7GhKJBHw+H0455RS0t7fjySefxLZt29Dd3Y2LL74Ys2bNwjnnnIMHHngA2WwW5513HoxGI84880w8+OCDePHFF+F0OpHJZLB06Xi9znw+/4GAXl9fj9bWVrzyyiu49dZbsXz5clxwwQX45je/iTlz5uDuu++Gy+XC+vXr8fDDD+Oaa67B+++/j9bWVpx99tnYv38/nn32WezZswfxeByrV6/Ga6+9hnw+j7GxMVExWGlcQ+NogBqYisUient7EYlE8NZbb6G9vR3f+ta3MHv2bLS3t+Oee+7BX/zFX6C+vl4OswKAxWJBLBYDcEAFHx4eRjAYlMPxL7/8MlKpFM466yxYLBaEQiG4XK4PBFqTyYRdu3bhu9/9LoaHh/G3f/u3WLBggRAJAMTjcQwPD6OlpUUyH6n3wcQEJJWxsTEMDQ2hpqYGXq9XrvNQ4GeQqPbv349QKIRQKHTQ9xSLRYyNjcFsNgsxqNncaCWbO3cu7r//frS3t+OMM85AZ2cnCoUCnn/+eXzmM5+B3W5HPp8XAjkYSqUS5s+fjxtuuAEnn3wyXn31VWzZsgVDQ0OHvD+NmYXmSM2RGsceNEceHJojD2BGH7gCgQDWrVsHu92OhoYGnHzyyVi3bh22bduGNWvWiEWgXC6jVCphxYoVuP322/HII48gkUjg29/+Ns4++2yYTCb8zd/8DXw+H9566y0sWrQIGzZswLJlyxAMBlFbW4vOzk5ccMEF2LBhA2pqarBy5UpccsklyOfzOO2002AwGLB27Vrcdddd2Lp1KwqFAr7xjW9g7dq1SCQSuOKKK7Bo0aKqp/hQKIQvf/nLqKmpwcDAABYuXAiPx4NFixahubkZlUoFVqsVN910E5qbm7Fz506ce+65uPTSS/HpT38aNTU1eOyxx9DZ2YlLL70Uy5cvR2trK+bOnYv6+noUi0Wk02lEo1HJ8qKh8UmGwWBAQ0MDzGYz+vv7kU6n8e6776KpqQmZTAbxeByzZ8/G8uXLYbPZsH37dqxZswann3561eeUy2U888wzePbZZ7Fv3z7MmjULnZ2daG5uRjAYRDabxdatWxEOh3HiiSfCbrfj8ssvnzRddDKZxN13341UKoW/+7u/w4oVK1Aul9Hc3IxXX30Vw8PDGB4eRl9fHzZu3AiLxSJKOtV/k8mET3/603jjjTewb98+lEolvPvuu1i+fDlqamoOu32WLl2KrVu3oqOjA7NmzcJLL72E1tZWhMNhORStpr82GAzo6enBj370IyxfvhwbN26E3W7H4sWLsX37duRyOUSjUezYsQNf+cpX4PF4sHTpUnR2dqKvrw8GgwG7du3CihUrsHfvXtx3331Ys2aNEDDbSP3ecrmMu+66C4FAAD/60Y+QSCRQKBTk9Rp/PGiO1BypcWxBc+TU0Bx5ADPywMVgPH/+fHz/+9+HyWQSNc7hcGDVqlVYsGABSqWS+CWdTidCoRA2btyI5cuXo1AoIBwOS5BtbGzEV7/6VUkrGwwGYTKZsGTJErS2tiKbzcLn88HtdsNgMCAYDOKmm24CANTU1MBgMMDj8eCSSy7BGWecgVKphGAwCJfLBafTia997WsfqCdgNBqxYsUKtLW1IZ/Po6amBlarFTfeeKMcEDYYDJgzZw6uvvpqJBIJ2O12hEIhmEwmnHLKKZg7dy5GR0fh8/ngcDjQ0tICq9WKYrEoikGxWJQBqz3qGp9EqClmP/WpT2HRokXYsmULnnjiCbz11lu45ZZbMG/ePPzmN7/BjTfeiEWLFuGtt97C5ZdfDrvd/gELQKVSwZIlS7Bo0SL8+Mc/xkMPPYSenh6cf/75qKmpwa5du/CHP/wBp59+OmprayVGqGTCufPss8/ikUcewYIFC/D000/jf/7nf7B69WqcccYZ+P3vf4/bbrsNyWQSDodDFpaVSgXZbBaZTAbAuM1h06ZNuP/++/GXf/mXiEQiGBsbw9q1a+W8zFSWJs7bSy+9FPfeey+uvPJKzJ8/H52dnbj66qthsVhQLBblmtWij/Toh0IhObNz5ZVX4oorrsCXvvQlZLNZ2O12XHDBBZg9ezYuuugi9PX1IR6Pw2Aw4OKLL4bf70cikUBraysCgcAHCklmMhlRLc1mMxYvXow77rgDlUoFb7/9NiKRCJYtW1Z1LxpHDpojNUdqHFvQHKk5crow3XbbbR/pA7Zs2XLbVVddBYPBALPZDJ/PB6/XC5fLJU/NVqsVPp8Pfr9ffjwejzxher1eBAIBeT1/bDabfBY71mg0wm63w+PxfKDSvVpsjjCbzXC5XPD5fJJ212w2w263S1EzVcGjT93r9UrxN6fTKdmZCJvNBo/HA5fLJd5OZm7itZlMJthsNvHj22w2OBwOOJ1One5W46gAD8+3trbCYBivo7NhwwZs3LgRbW1tOOGEE5DL5TA2Nobzzz8fV199tRRDnfjjdrsxZ84c8XJv2rQJ5513HgKBAGKxGNLpNC699FLMnj27al6q87NcLqO9vR0mkwn19fXI5/PI5/NoaGjA6tWr0dLSIvWHNm/ejJUrV8JsNsNkMiGRSMDv92PdunUSExYuXIhEIgGv14vPf/7zOPfcc6vi0KHg9/uxZMkSDA0NwWQy4U//9E/xuc99TpSxdDoNl8uFFStWoLa2FgaDAU6nEwsXLkTr/x8irlQqaGpqQltbG3p7e+H1evHVr34Vn/nMZ+B0OuH3+2G329Hd3Y2Ghgacc845cDgc8Hg8OPHEE9HU1CR2CV7zwMAATjrpJKxevRoAsGjRIoRCIQwPD2PevHm45ppr8KlPfUrSeU8nFn3729/uu+2227ZMayAdx9AcqTlS49iF5sipcbxx5E9+8hNcddVV3550rBxOlpGpsHTp0sprr70GYPKMJeoT5eHiYO+Z6vcTv/9wP2NiIx7qWqd7bYf6LA2NTzLUYoHZbBb5fB52u10q3JdKJWQyGTmo6nQ6qxZ4Ez+Hh/iLxWLV5xSLRWQyGTidzqpt/MnmdjablYryBD+Ln18ul+F2u2E2m2URODo6irGxMfj9fsnCVC6XkUqlRC1UF6iH2zblchnJZBLlchkOh0PuCYAkNXA4HKKm8T0Tg3ihUEAqlRICJ6mx/VOpFADA6/VWWSEmEm6lUkEymZQdDO4a5PN5ZLNZWWBPXEgfLoxG4x8qlcrSab3pOIbmyKl/f6jP0tD4JENz5KHb5njiyGXLluH111+f9A0z+sCloaFx7EENmmpmMf5NPYirprWe7HPUn4mvnSwwHuozCNUuwEPIE4Mlr39ikciJ3zud4MrrOFgb8G8Tf6eC1z1Z26jv4fvUtp/4OQStaRPbV/2MD/Ow9f/v1Q9c04DmSA2NYxuaI6dum+OJI6d64JrxtPAaGhrHFhjsJvq1GYgON13qZKreVJ8/1edMFgQPdT3q+/idBoOh6vUf5gHkUJ9xsHab7Hfq3yZrp0N9zmTfeaj+09DQ0ND48NAceejr0RypH7g0NDQOAwcLPNMNSB/1cw71uqn+Pp1gfLj4KNczndd+1HbWD1caGhoaRw6aI2f+eqbz2qOBI3VlQQ0NDQ0NDQ0NDQ0NjSOEj3yGy2AwDAHYPzOXo6GhoaHxCUdLpVKp/bgv4miB5kgNDQ2N4wYH5ceP/MCloaGhoaGhoaGhoaGhMTm0pVBDQ0NDQ0NDQ0NDQ+MIQT9waWhoaGhoaGhoaGhoHCHoBy4NDQ0NDQ0NDQ0NDY0jBP3ApaGhoaGhoaGhoaGhcYSgH7g0NDQ0NDQ0NDQ0NDSOEPQDl4aGhoaGhoaGhoaGxhHCR37gMhgMK2fiQj4uGAyGFQaDIfxxX8eHgcFgWHkUX/tnDAZD4OO+jg8Lg8Gw6ihu+88e5W1/+tHa9sDR3/4a08PRzJGaHz8+HM0ceTTzI3D0x+ijmSOP9rafCroOl4aGhoaGhoaGhoaGxhGCthRqaGhoaGhoaGhoaGgcIegHLg0NDQ0NDQ0NDQ0NjSME/cCloaGhoaGhoaGhoaFxhKAfuDQ0NDQ0NDQ0NDQ0NI4Q9AOXhoaGhoaGhoaGhobGEcL/AXSwPC8Odq4zAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA1wAAAFDCAYAAAAu+g+jAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOy9ebRt61nW+X5r733uTSAhIQQxCpQIisoQG9oaKtIZE0mB1qgAgSDGLthCQEjRd1I0UqBVUgpaAhFBEUrpOwUdZQlosAFFHQgEpAmQGJKQ3HN2M+uPtZ69f+vZzzf3PvfsS+49533H2GOvNefXvP3zzXd+c66xLEs1NTU1NTU1NTU1NTU13Txt3tQMNDU1NTU1NTU1NTU13a/UF1xNTU1NTU1NTU1NTU2PEfUFV1NTU1NTU1NTU1NT02NEfcHV1NTU1NTU1NTU1NT0GFFfcDU1NTU1NTU1NTU1NT1G1BdcTU1NTU1NTU1NTU1NjxH1BVdTU1NTU1NTU1NTU9NjRH3B1fS4pjHGT44x3jjGeD3+nvUox/roMcb/e8P8ffQY43TH12vHGP9ujPFBV/R56hjjS8cYP7Xr9193399qd/4nxxi/MMZ4M/T5E2OM78P3ZYzxw2OMDY597hjjK29SvqampqamxweNMV4wxvjXO9z4uTHGt48xfs8NjPuVY4zPvSEerxxrh1+/spPjZ8YY//sY4+CKPlPZxxifuRvz+Wh/uDv2P4CvZYzxHmjzjmOM/jHapl8V6guupicCPW9ZljfH38++KZgYYxxOTv3LZVnevKqeVlVfVlVfN8Z42mSMW1X1T6rqt1XVH6yqp1bVe1fVq6rqPdD0oKr+4hUsPauqPuzaAjQ1NTU1PSFpjPGSqvrSqvq8qvo1VfV2tcWbD35T8nUP9K473HyfqvrQqnrRrOE1ZX91VX3WFRdur66qG7mwbGq6W+oLrqYnHI0xnj7G+JYxxi+OMf777vOvx/mPHmP8+BjjdWOMnxhjfMQY47dU1d+oqvfeVches2v70Bjjr+zuNr1yjPE3xhhP2p37/WOM/zbG+KQxxs9X1d9Z42tZlrOqellVvVlVvdOk2UfVFiz+8LIs/3FZlrNlWX5hWZbPWZbl29Dui6rqE2YXbjv6wtoCzOxCsKmpqanpCU5jjLeoqs+uqj+7LMs3LsvyK8uyHC/L8s3LsvylXZuHdjslfnb396VjjId254RlH7/bPfFzY4w/tjv3p6rqI6rqE3fY+M27488aY3zDDmd/YozxF3bH33I31vN23998jPFjY4yPmo21Rsuy/FhV/Yuq+h2PVvYdfUdV3amqj1yZ7quq6rePMd7nKr6amm6a+oKr6YlIm9pe/Lx9bS9e3lhV/2dV1W4b3l+rqucsy/KUqvofq+rfLsvyo1X14trdjVqWRRcyn19Vv6m2yf4dq+rXVdWnY663qaq33M31p9aY2lXW/lhVHVfVKybNPqCqvmNZltdfIeO/rqrvq6pPWGnzjVX12qr66CvGampqamp64tJ7V9XDVfX/rLT5lKp6r9pi2bvWdsfEp+L821TVW9QW4/54Vf31McbTl2X58qr6mqr6wh02Pm+3Vf2bq+rf7dq/f1V97Bjj2cuyvLq2d6O+Yozx1lX1JbXF2K9OY10l2Bjjnavq91bVj92D7FVVS1V9WlV9xhjjaNLmDbW9S/aXr+KrqemmqS+4mp4I9I/GGK/Z/f2jZVletSzLNyzL8oZlWV5X2+TJitVZVb3LGONJy7L83LIs/yENOsYYtb2I+rhlWV69G+vzan+b3llVfcayLLeXZXnjhL/32t0xe6Sq/kpVfeSyLL8wafuMqvq5a8r96VX158cYz5ycF8B82m6rYlNTU1PT/UfPqKpfWpblZKXNR1TVZ+92TPxiVX1WVb0Q54935493uyleX1W/eTLWu1fVM5dl+exlWe4sy/LjVfUVtcPGZVm+q6q+vrbb459bVX/6Ucj0Q2OMX6mqH61tcfHLJu2uI3vt+PqmqvrFqvoTK83+ZlW93RjjOXfHblPTvVFfcDU9EehDlmV52u7vQ8YYTx5j/M0xxivGGK+tqn9eVU8bYxwsy/Irtd0P/uKq+rkxxrfuKmiJnllVT66ql+uCrrbbEniB84vLsjxyBX/fv7tj9vSq+qbaVutqjPF2Ay/72LV9VVX92usIvSzLj1TVt1TVS1fafFtV/bd6dIDX1NTU1PT4p1dV1VtdsX38WbW/s+IVu2PnY9hFyxuq6s0nY719VT0Lhc7XVNUn1/b5KdGXV9W7VNVXLsvyqmvKQfpdu/k/tKres7Zb8Wv3Mgzh5kfU9WQnfWpt7/Y9nE4uy3K7qj5n99fU9KtGfcHV9ESkj69tZe49l2V5alX9vt3xUVW1LMt3LsvygbW9sPlPta3MVW3vCJF+qbbbEX8bLujeYvcgb036TGm3TfBjquqFY4zfuSzLT/FlH7tm31NVzx54A+EV9BlV9Sdru61jRp9SWzB88nV5bWpqamp6wtC/rKrbVfUhK21+trYXSqK32x27DjnO/XRV/QRw8WnLsjxlWZbnVp1vn//yqvrqqvozY4x3XBlrPumW/kFt5fv03bHnADe/pq4nO8f87tpuT/wzK83+Tm1fcvVHrstrU9O9Ul9wNT0R6Sm1vVB6zRjjLWt7UVJVVWOMXzPG+ODdBc3t2m6bONudfmVV/Xptv9u95OIrqupLdnvRa4zx68YYz360jO32t/+t2n8OjPSy2oLZN4wx3nmMsRljPGOM8cljjOeG8X6sqv5+Vf2FlTm/r6p+pKr+6KPlu6mpqanp8UnLsvxybTHlr48xtMvjaIzxnDHGF+6afW1VfeoY45lj+xMjn15Vf/eaU7yyqt4B33+wql63e2HUk8YYB2OMdxljvPvu/CfX9sLqRbV9wdNXj4u3A/pY16HPr6o/OcZ4Gz9xTdmdPqWqPnE22e5O32dU1SfdJZ9NTY+a+oKr6YlIX1pVT6rtHarvr+02QNGmql5S28req2v7bNfH7M7906r6D1X182OMX9od+6TaVsO+f7c98Xtqvq/9bvh77hjjt/uJ3XaGD6jtnbfvru1LL36wqt6qqn5gMt5n1267xQp9am1f7tHU1NTUdJ/RsixfXFts+9TaPqf001X156rqH+2afG5tX7b076vqh6vqh+r6r0D/21X1W/Gc9GlVfVBtX8DxE7XF2r9VVW8xxvjdOz4+atfuC2p78fXSNNY1Zfvh2j4a8Jcm56+S3dv/i9ri6hp9bV3/eeqmpnumsSz9m29NTU1NTU1NTU1NTU2PBfUdrqampqampqampqampseI+oKrqampqampqampqanpMaK+4GpqampqampqampqanqMqC+4mpqampqampqampqaHiO67g/JTWmMsYwxalmWGmOcH+fLONJx9QnjXeo/mffKdmmOGS+J5+u8UITjXYefNM9V/dJc0neSbzb+jFe3X2p7XXKdXtX20cwz63cvfN8r/WrO/aaU81eDZn7J81f5Muk68T2LQz8+8zsf8zr5w9un+a86dzdtruLBx0rj7dr/0rIsz6yma1FjZGPkbMzrtG2MfHzP9aagxsjL7Wbn7qbNVTz4WLPxlmWJznetC64xxnsty/L9M2YODg5qs9lMFbNmtPT99PS0NptNbTaXb8Alo6q9J8SDg+3PQpydnV2aS23Vzx307Ows8uYOrmNqx7n4WbJQJ1ctbNjf52E7n9dBIckxA3vXtY/h4/gxyiWZxZuOadzkM5RHxw4ODur09DTyeHBwsNfW9cPxvR39S+eSXdhuNrb6uKzUSepLmTXO2dnZpT4+FsehX/rxWQz6fMk/fV71lZwen2mONZ+TzjWmbOz+Rv8/PT3dswnnmfl0so3zk3xVx5Pd6L9nZ2d1cHAQF2Uzmx8cHER7ed7RPDp+t35A/XmeoI3pL/7f25+cnLyims5pDR935xsjGyMbIxsjGyMfAIw8OTmpGV3rgmsNTDxY9V3KdWFJNJQUJidl0tF5jeMCCtB0nuBCfqpqj0c6bQIakbehcegEnngPDw9jMOv70dHReXCImDjFM0myucHlnK5Pl4HfHQASj55M/byPOwuSBDYOzg6gPgb7nZ6eXrLRLHgJbrOEzeSk/wcHB3VycjL1h0QaJy1SNL/sqzaeINxXvZ/6SgeuF/lD4tn5WpalDg8P6+TkJC5EaCe3RUo2bivGNWOU5DHutne7Sh+zxM+8MgMZBzGPK/d5go3zRHsxaSuPESTUhz6gvoeHh5cWWMwBHiNuIwd+l/mq2PN+vvDzXNW0pTV83J2vqsbIxsgL/hsjGyMbI+9PjFyje/4drjHGQqV5MvNgkmNdJzh53K866Zh0VB1L4zABUcFuhFTNYHt9T0nS2/EK3/nxsSlfSo5yJOmaOl7jzatPHgzuWDzORCM906Ye5HS+lDh1znXs31Py0ndV7BxQZu3T4oNg63OlSs5a4vQ51ebk5GTPN70SRJuyvydJJu+kTyZQTxgaj/FJn0+Lq+R3DvTyAyY217H7h857bkgypKoZ9ZAWbySvnFPHs8o89ZlAZ7bQ4THZnX7kvni3/kXyfMJ86McJAFzkreW3WYWb+j45OXn5sizvdknpTZEaIxsjRY2RjZGNkfc3Ru4u6GPCvolnuOrw8HDvyk6AQSZ1nAyKZs6j/nSaFAh09BRUHpxOXnVMFQ7KkvjkWO5ss/lTMPic3oZ6YsVlTV+edNQuJUDaQoHB8VTlIG/6zCR0enp6XkGi/M6nKFUkvY/O8ba6B5wDiSdWb+f2W5btLWH3Ubanb3sQOk+Hh4d7yY+AlRYZvjAg70k3zoePkT7z+0yH7gezGHA/c3CgzhJQXSUD5xQpGbLynfiirMmnZgsY58vPEZw5p2RT1d7lnfHhvpji132AuvLFoi/sxbP++4KQ/bjIcl1S5qbrU2NkY6Q+N0Y2RjZG3t8YuZZL7/mCSwySMSasFKzpynHN0USeKPy2qI/J/+LLk56PTVDyJOgJeBYUyUGSw8h4Oja7qp6N4wHH4He+uZ1iZhe2d73ovAdRCgJ99qA7PDysO3fuTG2cQJGVnnQ7OFXEyDeT/bJcVDlU+WNlp6oiAKag0+ekF1YneYyUgtKBxo/PiLzP/FtjcjuO+FQ1lLJQhzNenTzu3WdnsTMDiyR3AkC3caqa8fsMrDxR65h0xPzBvgTPpBvO7zmLPM0AxeVdywUOCL4wvsq31Yf+6/m96e6pMXJ/nMbIxsjGyMZI19X9jpH3fMHlwT/77Ey743Ksqv3bewoAJheRBzCPMTlr/Jlz0ihUYHKSVNVIxhcvMwB0HjwAScmYDhAiGn8WUASw2UJgDTS8LY+zv8afVW7p7NR5CiLNIX9wGTUP+6StCDPfTFUPzuuUkiD3KntCJJjRz8jLTDfuQ26jGa+uB/LlMlB+jyf3ac7jPKbz7g8cz2/1s7qUgCD5Eo950mdy9IWIL5zIb4pVLcz8WNVFxZp2cPCbgZhv/UmLCfVPlVDq1/VKXXkMq6+P5X29TdP1qTGyMbIx8kLvjZGNkbTD/YaRa3Qjv8O12Wziw35ikMmCCvYqCIOEt+zoqLxFSgX4YmAWAE4JSLzvVf04/kxe14lk5W1MT878zMQh3ek/r9g9qFMiXUvU6kfn9+pNSnqeQBw0XD8iOr9/9uqpjskHZkmLzp/AlG/LOT093UuamiPJTd2wnfNA3/UEJdmS7nwc9xOfl3FSdfFga1qscEz+pQe4PVkn8HLbcHwec735MfHNOFhL+C5XinvndS1HpBilHhjLbu8ZiF6VP9xHkk5miwSPWc+hvqic6YZEOT0/+F/To6PGyMbIxsjGyMbIBxsjb2RLoYKSQqXEnAI7CezJwa9WfRwSz+k2sPPK/35lyluE5McV7POlsa+jt7Vkrs+uk9R/tmfb21ftP9hIG/htUuqe5Lag7tYA/ir7ab5U0fWKKYPceWA/8uCgymOu85ktU1B5RYrgTnu4r8+qlZRTFcGkO48P8rtmW8rCqtZsMZIAJlHyB6cEruk8dZSq5WnetGDxXENbzbZkcO7EX7JhWsCkbS/O02y8RMm2qZKX5qO8PDbLZzM9Nz06aozMY19Hb42R++M2RjZGum4bIyv2e1Ng5Fpuu5E7XO6IbqCkHA9yOoO3W7s9nByJ47vCPFBSNYhOxLFnFSmdS0k38eWJxYGXY7tDJ/ldnz5u0pPrgklYDqpAnm1p8eTMJDKrurDqSFBwmagPT7hukxnYJ1npF0wCGstlnMmdbOXzEJQ9SVP2teSR7O+/Z6Hvkif5aZI/bQNi++SfVXUpgc3iN/nfjDQXX9Wb7OjtZ3kjyU57E/g5ZrrDkPyQ40snzuta4ha4syq4BrBr53WOvqz99CR/ZfJMV8nvyEfT3VNjZGNkY+TleRojGyMp+/2AkWt0I28pTElT5N+TsqVYHzP19eNuJK9kuBN4Vc4/kzcf28dLlUHK4smDx3i7NDm1jqWAcqfyK/5UqUkO6WNzTh+TbVOFIYHMLBkxkGkXgkzSlc/nAe9zup58bE/wXg3hmKqicQyv2vnnzWaz+tsdyTZJf6yceUKa+SbHTvqexeUMdN1HyTcrvg5MvshM8ife+T0tOtJ38uXbCFw218mMH2/vuuI5+kOaz8eaAfHM/91+SY/0mTV9u85m45Ck16t+a6Rpnxoj9/s0RjZGNkY2Rqb5fKwnIkb6WKQb2VLoE64ZnUy5k3iS4pthZkKkYErGXgOSWXD7PH5r3pON88G+7OeJ09umY/w+CwS2mwFRoplMrmPqd+agtKsnP+eT43hVZZbsZolyTU4mNpH7Gv2DfTSnqmIOYm6vlIxnssy2onDO5Kdpe8R1aI1njcOH72eg5jamnsirg+0a4PkYVft2c19aA2SPe573udPCjzTT71U5w3U0Axg/PsY4/6V6r6bO5kh8JVC+Coxm8nucNN0bNUY2RjZGNkY2Rt6fGLlGN/KWwhSYKSm406hamhxEzkhjzEAqGYXEStbsan0GSuLDA5tjpsQxA4AENvrsb3BJyYbHGAh+PFXunA/XB3WYqpCiWWVEfTzR6BzfoqX+Tu4zaS7X2yzg6TdeWXE9pTfgeAJy/5vpWOe5JcbndUDyeV1OJZmUvGexQL4YR/SVqxJ5OjdLRg7Is7j2PtQJx2dVUDylirnOcf60OPJcwrs1zFneZ8a762Lm17QT+1CelOSTz6Yqv5PbcWZvl2Ems3iijZquT42RjZHepzGyMZJ9GiMfDIy852e4kjPKEXReiqaRCBQ0ApWXBGSy4vwzA/K7BwjH9j/y6Q6cHNSrT6k92yT5kizps7df0wOP0aH4p2PUjweyjzcDGpfPdZr0p/m8OqRjHoyegGeLiWQ/Bz9+529vcGzKwOBPwTVb4NAHZr7r391fyFO6hb7mV4eHh3vbQcivfrjzKt7ID8+rny9iElAmX1+LVcb7mr7TImTN/uTDAS35K+MjLRLcj7lgSHHqwJMq+eSTb6miH3pb8pYWBFeBj8ea870GJk2ZGiMbI13WxsjGyGSnxsgnPkau0Y1sKaSDcUvBLLmyyuOBkW6LpkSZAtUdlQr3YykZk2+1TYlqWeZvd/JE7bJ4xSAd97FTwp9Vhhj0yeF8TPKuP83vtkv6umpsAgOrea5/By5PVmpDAOc4rA7yNa70M09ibvNZxcoDne1pA8k5AxLXyVUBq3lOT0/3Fl3kieDgCVI+wOOudwcD8ubyui+4XzsvlCnpl/Z0X3B5aPfEm/PpwMJtIM5/+u52EPlvyDjA0SecH+o/5TfFnVcpPTZSwk++KD5ncqktP88WiSnXNF2fGiNrb8zGyMbIxsjGyPsRI9fonu9wMeD2Bp5U5DzIyLw7NZWvY1JsSj4cwwOLfM0UM8bFD7apTXqTCc+LX43tQKIxE8DNqmOpsunnUoLises82J4cWsdpp+S8s/GoD9qE/T0JMRF7EuJ4HvzJ8ZMc0qO/qYh9+Fe1v9XCg5Z6Sf7l4yaeeWzmY9LH4eHhnjypf9KngJX6pR69Ouj/1/TDMeivTglcGAvuK1fp3Ntye9HMJprfkzz1xwXKjI9lWfZ+x4VVT84neyZ+HPiZG2Y6TzHhudB1K/36+O4HblPeiUgLgqa7p8bIxkj2a4xsjPS+bN8Yef9i5I29NIOMzZRHpyZApCTD8VgVSNUKv3LlmInSL11X1XmVhJUmAlPVftUkBaobQuO6I7OP5KIO1gDMkxJ1kBIuHZ7zc17qXXpg0NHR9MBisq2O+YOlOq594LQng9gBieCeEhZ5ODo6Oud/RtSRBwz9LSUlzq9nCcgXfYKLHy5sXF9e7SPRBp4MyTPHdt+d6SKBt4/v5xyENJ+O6buP5f6WANd9aC1+vZ239e8CLdnLt1DMZPYY8LZeqfOtKGzvviWifZxHX4gQRFxO+hj9zIGDPPudAPdPxbDmb7o3aoxsjGyMbIxsjHxwMfJGLrgUSDSulEImuB2ACpBAvM3tAag2XuHjrUVPRu6UDgKJNKfGpDE8CDwxyUkVbHQcOgeTUHJW50fjezsm3ATe4scT8dnZ2XlFKAUW5fUKGfXqwJ5Ayucln/whULahvB7kfp5jCuSYoFOAUue0G8cXOZioYsPj3N7CuVPVMiU/b+OLMV+MpD5pHtfRLNFwe4zP4VV4P08d+ULGx2FspHFmD+dqb/bR0dGeLghkqZKbQCXFkdslnZ8dc3/SPDMbp+O0dbKvxvQqps7zRQKuY47r/uB+4QsWl2u26Gm6mhojGyMbIxsjGyMv5nkQMfJGLrhSIPqvuut2NZnkd45Fpj3JEEx4dU0FMzlRIZvNpg4PD8+Tjs7P5lgL+rTtYk3ZvKXtgelOdNWihvoTpb3t1LHzyIdfU8VRbRmoCurDw8Nz+b3KKd5PT0/PAUtER6fcmufk5CT+YjyBzBMlE3d6oDfx51WxmS3Vz23idvYx+N2TVErwPq+Sg9ql2/gcjz5Jopzc1pBARRXb2dYNTzZJV25fPYScEmSShdsWfC6Nxflc9rS4oV5m/RNQJrt7HPG5gZk9nVJF1he4rhfqi/3cn1Mf10UCRm/r8rr+mu6eGiPrXNbGyMbIxsjGyBk90TEy2f98jLWT16ExxuJJw5ny5GH9rz0XA0J9kxKoXF71zhJHkGkqhycNH2vNwEoO/E7HuE4Q++1OfXZndsdyh3IgODg42ANZBxImjDSP8+5VzJl+09hcMHBsd3TfrsAAm/mZ/IdbXWakZHEdnSZi+6QPT6gzP+A49OlkA/WdPfya5qZuRK5b91WRQJwJ1RM75ZglQV8kul7Ic6o0ug/xmNtK81HeZAsS8wjHTAvR1N/7LMu2Enx4eHiug3T3Yub7LtcaQIg/f9EA/UZ69Ype0vXp6enLl2V5tyhg0yVqjGyMFC+kxsjGyMbIy/REx8idX0Wnv9EfPk7JJxkoKcirCzxPZSUH1f8ZsHjQeN/ZcU/cVZf3V7uDkhwwOI6T2h4cHNTx8fG0bQJpv6p250qV6Vlg+L5rBw6vis7O0+6zNzqpjwJJ7VIS94TmoHB6enp+O11zyvndvr644H57zsEHcZOvEBgoN/XjNqHfuF78/CyJ+meRqqscw8GLdlcl2xdYmpvx7Isz8ueVXPojk6fz5fpyuR1kvBJNSsn0KrCX7T22Z+Ol/gRIblmgnB5/kuHw8DDmiGSLlAvJh4+dgI62FK98la78I9n2Kl02XY8aIxsjGyMbIxsjH0yMvNE7XExubvh0NU+nleN6MHilIiXONUVXXXYqT06axxOD5mPbVB2gsf1hYwYDjUieXTdK6ikQKbOOcWuKVyXcGZJ+fH7XKROX656y+PYX9WXC98TJ8RiUlO/sbLtFwx9uTYuTBFjkN53nPDO+krz0Xwdvr/bMwJ0JR/x7JUz+Ix2zrdtaCZK2pl87GJFPf9DbeffEpjFnixXqk1tZrloAelyt5Y2UrF3Ha4uyZFN+F7++iOQ8OjartHk/10fin7wTxKkTt0PiyeWdAQ/jkrmYzwRozJOTk77DdRfUGNkY2RjZGOnUGHkxz/2EkbtnGB+bO1xUlP7rqpTBr3aeCCUMGZbSuEeX4yej8pxu/dOhNU8yDgPXae2YJ2mONXPK5FzuhN7Gg4afN5vN+S1X17UHPvVN/ma69KTp5/27V1/JNxcMM5rZlQnUQSVVIWcJgmPqnFcK5Qfyx2XZvtlJ83sgexCL/NY3kyr90H2e7VnR1nnt4aefsx+3vLiONI6qw+4D7odexeVCjqCkOdSOsayFQFp4JdvomIMndc4FB+dOunNQXQMEj2vqgtXZBIAcR5TiLyXsNGbykdmi0r8TbBN5VdbnF098i5pXp5uuT42RjZGNkXUuv6gxsjHyQcPIPNujIA8qBjyZoWO4IvzNSBSYf6T0nYmA88tJGQipkpLGZZvZ+RlgOoiwujNzvlT58aBQoDHR+YOF+lMyT4mWf9S7aLO5+H0OjeHHyOusGrumY81JeTyB++KAOktBkexFHY8x6ujoaE9W+U/St+uEfSgbk4DaSp6jo6M9Ha7pgmN41VoxlsCT5LpnNdVBdCYfk7X8zscgKFP+ZVnq+Pj4ki+6j7pdku7Zl4sB8u/jeBL0hOs2lJ14nFVP2kLxxnY6rz6e0+inihPynQAi2YZ/3iYtdsTHGGNvAcJ+1K0v4KmjpkdHjZGNkRqvMbIxsjHywcPIe77DReMdHh7W8fHxJaV40vQqjzPtzNPZUqVL/z2x+RWqfxZvDHbyorbp+FU6mRFB1vfDig8Gr+RIMvCVsTqXEgz1l5KdH09ycs5kNw9Wjs2Km/PKoHMQU2Inv6kqw3nX7OD+pP6p+iO9MzHQNu5HqbLqOmPlzPUn8mQ5S36uL1UcOaZXqLmVZ1aNoRyuf5K/ZlVtPXmmyjP/vCKYkjl1RXClHyVQdN1TPo7vQESAW4v/FGNc9JAPtUsV7DS/L6TYhv5PXn0MLoy0hUY2cXv6mKRZzDVdjxojs05m1BjZGElqjGyMfCJh5BrdyJbCqv0kRkf1pJ+SGdu6wA5IHtQpwWk8Koi88TudRf1mju68JkBMSc2TQ0oWnOP4+HgP5NxJKQP1k2wzA4eUQN3JeauadvE5POG57bgvnf306tyUaDQO+R14akUAACAASURBVGJ1iPrywFuWZS/ZJqBKvjmbj4nK/czBmjrScT5YzHl1y9/no2z007RQSr6V+JuBsPQs0OCYAvkUp5R3jG0VlFUhtk2JyGPPE6aOpSqS64V9PFGmCjl17Dbxc+TD/Zuy+NhpPtcLj/MNaCmW1U929PzDWGB7r36SuNjQeeZTblNa46tpnRojGyMbIxsjGyMfDIz05yhJN/bDx1X7bydKBvIASE7jgUaj0TE5Btt6guD4dGAHNQ8c7+8gkwAlAafL6Q+7sh35SsnEExvPE2RYqWF7yutA4kHlY/P3H7y64mCUqqazaoZvq0nJzp1epODz36ZIwbpWNdFnykmdif+0CHCQTYuMqvkPmjoIpoWI2vOYA5fkT230nRVUnpN+qWf9pcWD4pwVvLQg8YUP5Ug6cv0lcEwV7jUQdV648HRda6xULff2lJW/W+QJOcno83keY9JmTKd8mPzFbT8j2dYXlvIFAklV/t2gputRY2RjZGNkY2Rj5P2PkWt0I+iZkiqZSxUnBxqORcbppCkJpQToVZhUIdBxHnOwYyKeBQGTlCe8WQLgODrGBOwVxuRAarfZXLy205Ow5k1ypURPsPOKC53bZUoOnPSVEo6P4YCtOVNQziqW1CPlm/GjP4KRt/c+TrSJJ+6qXKmb6dPHFa29wSol7Jk+XV8pLhM/mpcLorQoof5Tm1S9d1kpL2ltuwP55n8HfV94sC11or7kNS1KCMauP8rlcyTbVV1sbfCFjW9pmPWnbmbyeC50APTxOV7T3VNjZGNkY2RjZGPkg42R97xHZDaBC+gB7W08qNMca0G81iYpyPulQErJWe08OBMPlMfH83H9Vbnko+pir7Qf87asMnmCngW+Jx0mPHfgmW7ES0r8s6pZ4mFZtm8RShUU1zGBeE23/Oztkh/wnM/v/Vwet5PfYl5LVK6HWXK/js/SJtSl+2OqirqeHACp9xnQq522UblsPg/5TDwkW/KzxuUYPh7t6YtTxjWPue/7ApR9ycMM4LzPDFCV3L1q5zLOEr3HWAJLn585KIGJ/Lvp7qgxsi71b4xsjNT3xsjGyCTLExUj1+jG9odo8tmVeVVOYjyeiA7sCkngn5Kmnyc/yfDuaOKDMjlfKbA5D3m7yiipH+dKAZkqh+nWMHlN5+ionE96cV24TaiPBBpJviSrk8/Dz+KbixA9hOzB77yx38w/yV/am+w2TYE9i4mUvBLp9aOUWUDlfjFLMJ4gNT9lo05SzBEcfCtJkn8WW+5DikUCXQJwX2hyHFbQZj5HvlL+WUvwa/Zxv0l5cAYyyX9cx85j8kP+d39iHNDnnBfO5WOlannT9akxsjGyMfKyXGzTGNkYeT9j5I1ccM0UkZKJb10Q43xoMzn9mhFdaM5PBSa+vL+PeVVCIYi6A8/ALB1PBpS+nE+Ocx0wcxskHSZHYSWQeknjkAeXb7aNxIOOOpgBHbcdUOfkX0DqyUXn0qIgycFz5JMBrrHSA7UpySVbePsEnEmGqxYmvu0mLRTcPowX8kn5UyJ226k/beagl/igDasu73H3hJx053YigHHsBL5ug+TrKfnP/FY02yLkeqZ8M9lm21CcRx9jzVdSHx6bVXmbrkeNkY2RjZGNkY2R9z9GruHkPZcskwI9WbBtYl6f/dagjjtIVO1XPRyEnIeZQ/B8auNbPNxhGIQ+d3KelCycJ97iXtNVOjZzrpkuPFEnGXn7P82vP7Vbc7YE0Gv9kkwpuDgW+6UtOikZkoc0hut7pkMHECZOJoGZLmYA6ny4vLPFnPO82Vz8LkxaoKTtBW5r7+cPGUtGzuVjuzxpXunQ5fV8kOLP9Zn0c3Z2dol3p9l47ifkL+Uw9p/pngBDIBTRd5ZlufRjnu6v7pcE4xk/LmOKuXTHpGmdGiMbIxsjGyMbIx8MjFyjG9kjwh9RFLNueCZg/jaBjzEb29ufCxCqCvxzx6XSqKgEbPqckvNVYMV2s6ARedLj2F7RYP+r9gavJcMkr1dWk9NfJ3jFG8eWnMk2aR71nSX9qn2/Y9u0wJkFDe3nn6n/q8DOdZ582/3I+SNtNvkH+6hrypgWBE5K7jN/88+cx2PM56Gf+o+q+sIvJTrK4fNpDLep68O/e7sU6w7InM+J/sp5UnzoXKroruWbNI63mS2SNM8s5l3WWWy4XPzf2wofHTVGNkaqH3nj2JKzMbIxsjHyiYuRa3SjP3zswcg2ZNDBJRlUfRhQOu57Z1PymO2dlbLXFDPrS/K5ZwHPMT2oNA8dLo0/IyaGFEyJB8k1m096ld5nFW3phr9D4DZPY7rdOL8HAD+7c+utUyL+cGSyn/ul+5H4S7LMgopt/PcYXAecy6u+Igeu5LvkPcXNDKw0ntuTY/qPPnoy9wWCAE/jcAG52Wz2fkjQt0w4D+4rbDt7FbXbYG085g61SYk//Wgpcxxj1n3ceeBCKbVNtvW3bHGumf7Sdhf6hMfyVVsfkv6SjE3Xo8bIxsjGyMbIxsjGyBspV4phFyYlWg82rzg4w6z0qc3s6jUlDD/G4NCYsyoYgYxzqt/MIB4M4oV6cj5dN/quSojfBp195vxOazpKSYjVsgQqa0mMYEOdi9JWk7SAIDEo/NfaOZZXiFzWpK80T1WuKnul0/XnFdc1H5vFiB5m9m0XTDaUw/lJ86jNwcHBpa0bBOAUM6mPAwhl9fZeGabNxrj4gU+Oo/HpN0mHiWa+5MAy8yFP9G5Dl1t902tqfVHjOkl6dRlcfl8kiAcHMPePlGsY5y7PVQDSdD1qjLysj8bIxkjN2RjZGPkgYOSN7Q9JRp4loZRYPbjlWL4VIwnOPlW5SqE2biBXWkrwcjyO4f1mCYo6oDy+NScldCXMNJfrLOnRE6vLvpbkqi6cdAb0nCcB6+np6fmP3XllS/OTT+fDA0t7cv2X3avq0i+Qi28H4jUwpp3U//DwMAIbeWQlLcnGHz5M+vLFkQMMPytJ++KGMnAM9ifwJSBINqR9+EYrVbq8r/jy373hOfeFs7OzOjk5ufS7Qj6/f3afpH75uljG7PHxcVwgST63IX3BdeJtqW/PQV5p5bgOxj4PXwHtvHFMn0vVVY7rix6PMV+4MO/eDbA0XabGyMbIxsgsW2NkY+SDgJE39pZCr8z41eJ1+nkQsL8Hg4+b5lmrGIhPb8fv5HNNDlWTGJwzsOI8MycQvx68zg91w3nd0bzapTF9bH9FLtvPtpCsOVly+JnjOm8cg4nLKyFOaRHgt4cdoLnVYiYLfzGevu3+6reudSz5uuuDvDHZ8VfaXT+0OSn5NEFE3wl2adGQ9EFZ1Neriu5fTOApxsmPdOiVKG2/WAN26lxjy3ZcpPrcvvWDfCRK20SS7WeAlGLY/dJjgzw7KKVFFOOGpLeF0UaznER5+6Lr0VNjZGNkosbIxsjGyPsLI9foxn6HiwlVzLoQfF1pAh8JyN9SEM2U6uf9VrfO0aAOSCmIRFcZUvz7WDP+/NyMFzpoGsv1mRxR46QqKXXix10etqu6SFQpsc0ck680TpW6qsu/mk6bpURAfg8PD88TDuWl/AQV7pf2IGGCVT9PBLMFj+tQAU3Q5vjUhYOSJwjJ5uPTdlw46Hha2I0xzn+3ZJYI1xIa7eGycQxW0Jx8wSO50zg8lwAv8ee8Uc++IEiLpRQfDp46xn5J5qTfdC7JzJzo/Li8Kc/QVgmsyYMDXNPNUWNkY2RjZGNkY+SDi5E3dsHFqzuvNOl/YjQ5iAeuiEmTAOAK9MqFlKkKSAIPVz6rNeJTAa125H9mxJlB6Iz8L74YqAnomAQJGg4CM10m3jm22qcrfZffnXQGmvxPHp0f+kaqGLAPQYHg6/rkOfc18UwdEnRErEbR75xmdvUkw8/0XfcB2pny+Xy++PBFANtyLiZXjpMSjY/jCYljHB4e7lV3PYlzHLc9/UixwjmTHmb6SwsxBy3+RkwCUR5jnqNMqbK9lgNc32kxSP4ddFw+2ly+kgDOfZ9yuK5db3cLME0X1BjZGNkYua/nxsjGyPsNI9foRn/4eKYEMk7BKezBwcH5/mON4UCUEtRasuU4XiXxW7F+29+dlQZIidp1oT4pmMcYMUm546REI105v9427YP3AEsg5nJSnrTlwuXSPExOM53Mgszty2Q/S8QMFufb/YaflfC8AkpeGXQu7yzxsFp5dHR0vi9adpWO3M/ULwGJKpTUv/usJ9+k61Q99aSetjok2ydwJQh4AvM3dnk+0Piqekom+R63jaSql/PlCy7KOas+J91zXC1gUjKn/B6TSU8zX5OMOpdk1Jh8eDxV4ahf2YUAwvyX4oY8XAdUmi5TY+RlXahPY2RjZGNkY6TmfKJj5Brd8wWXGOLkqeLibdifVTWRlOHOzOMpwScAc2KwJYdxHnw8Opxk0znf482xXS909pQUNJ7LmJKFjykevd9VgJ8qsLMKjSfwlKSoxzSX+uvWvSfopEfawgNP43ilznWmOXzfuwc+tzqQL+qYclZdbNPRuTt37uzJyX3WPmbyFy4Q9F1ju1ybzWYPyJI+XZYZCWSZ5MmPxvIKHSlV5ZQA0zYSzk1K1W73PY8hPt+hfgmovHLpC6FZkmc8kdzHNC51lBaZ1I+DTlroeUwlgOX4Htv0eT4MnaqYs9huupoaIxsjGyNrj8/GyNo7z+ONkfcvRo41Z7oObTabhQ8UJiXq3CzhUEheYbLfVcr0K2k3oh6qXJb9N+KoffrF71mwJdCh083kZn/yzXGYdDxAfE6CkcZhoDAZrzkCz/t4SZ9Mugnk06LC7e9gwvkYFBqzqs6rV57kkv45r3hxvlgNq9omBz4c7GO4HnjMwYjHyZf7FPc7u544DsFvVpElud0Sf7OFEnVUte8LBAMfi2P4Z43pyd1p5qsJXMl3WgRpMZCqrmrrCZl9k840xsHBQR0fH+/x4z6ZZPC4EflCIi2kZoA9i3Pqn/3ddt5nTYadTl++LMu7XTrZFKkxsjGyMbIxsjHywcDIXQ6NyeSeL7jGGMvR0dE5w6rErd3Sd8YdFNTWwYeJxqsGHF+Ku+q2cUoC7rTs6wlJt03XnMQTIilVFqQDVoA8Aakfkyv1meQgP9TRLHk5zRYCKbmKb4Ijbc3Kl1dlZqC0Bm6aT211nrzOEofzp4TtSdoTticwrybznOaVv+jZBwcZze3+roWQPwDuciYbpgVX0iN1z36yk/tf1UWllHxwLsUn53I7rC0A0uLM7etyEihYVV/jjb5G8tzgc1zli97HeVF7+kWqdjKuvVLqseU+yuc3eHwNnDwfeCyfnJz0BdddUGNkYyTHFTVGNkY2Rt5/GLmLjXjBdSNbCikwHcwNmhzYE54HOxXmb83xtiIpy39Z3YGHCZkKdsBJ86ja52DH+SnXTHezhOe/JSE+WNniLV+XXzw4AHhbBgz7sMLpenH5UlAx2Y4xpsDrffk2G09mDsr0NenP/cJtIt0ycTu/vkd8Zjvpnn/k2dvrHHXLRYknO/VL1RaPOwKuy+7HaANureBY1NtsoeFVVi5MmCir6lJFOdmV85A2m83eb8jM5qWePS69zVUg4TYk2LsuHKxTXks50PXtfiAeUq7juGyTYtkBZ0a+KPHqONs0XZ8aIxsjGyMbIxsjGyPza0weBcmAbsS14HKDss85g3blmMbieOpL5ep86pOOeeXIAYXzJ14oA3mQfvx46kd+UtJlVc/PaawZfx5Yqj6tVf40jwedJzoRK3SuC87NB6MTn06UV9tf9N3fHkMdSF+sIHIeJgL6kPyI/iQ9EUzOzs7OH/rl2J48+McFA+eYJQ6Xlbr2uVJlm0lC/Gic9AYi9x3KS30l27A/EyWJPDqAMRlqP7/bUDy5npL9GYOsgnqszkh25VheaZPPq/rrgOz+R5tQB8neXrXTee7B1/j6EUjaRf998bPWJoHfDPCbrqbGyH1dNEY2RpKHxsjGyPsBI9d0dCNvKdxsLn48zA0pRSigPUnps8bwoPNA0XzqM1OaC514421xJhHxwn5q5wEth/IqGckrZDODuFNfBTyUNQX+bA4PCifpNQEpiUFEXhhsTJTURxpH55igmYg8AJI+KAOPkTf9yX6sDnlQecJwG9I/Dg8P9+YSeYWM4/Az/dOTT6rwJZ2nZOTjs9JH33H9cB7qkfF6enpaR0dHU39JizpPijP/YRvaNS1g0gLIx9G83FeeKpOeJ5JMrm/6kPp4HKQFFsdw8Kcf8C9VqDUXq6Q+HnXs8eQ5kGN7pbTp7qkxsjGSvDRGNkbSno2R9w9GrtE9X3BR4SkBsB0D0Pcl8600Gic5+dq8fi4F0sxo/l1zp73Kasdqhtol3azJ4AHufTQXKYGot6UOElj6Zwbi7C1SDnbXSWheFVt7exTnScHg7ZmkZ+2od467ppMxxt5WjAQgLvssGJ3HWQJLdkjA4H4/k4/+qXHd3xiv7ntp0edJ3x9mJi9pK0/Sd/JxfmaljXx7PNNWzkvSM9vMKvAzHl2HOp/2gmusRNfxcY/9ZP9kX8kqPl03aZEwi0fO13T31BjZGNkY2Rjpum6M3B8r0f2GkTf2DBcd14PPHcWV5sAhcqOznzu5Jz0lXAKUxvK5XGEpaN1InlhT8vXK8EwnrHL5OP7Zk56OuaF5LunIx6Fe6GhKOMmRaJfrOFuSjfNRR54MyROTZkpqvghJn1lJYXLnIsKThwccE46qSuJlrUJJWbyiTbvwoXq3F3WWFjrJH53Ozs72Ko7+3xeEbgsHMO67T3L73CnxE2wIdmqn34QRzQDBx2J7JwIg+SAgph8Pdd+RPHytsc+Rkr7HDhccHhMz0HVw9Rj2WPdxnZc0p8vbdD1qjGyMbIxsjGyMfDAw0t/gSbqxHz72pJMU4kmcDqjva+CkNn7eFaJzXlFxEEs0C74EHvrsFQWRV38SD0psqVI544V6URB4pYHjz4LKA8OTlgNySlgcPyUatndQ8v4ijkFdpiTm/RLwUlfySa++6TNtQF0lUPLvia9UdWKl1pNqAg62YzVLdnOb0D9S4k3x6HProenkg+m4x6m/LWotbhNopweIl2WJv0eUeHMbkRflBlYdr8oL1CNf8a3++i+9uUxsJ6LtUlWSel07R9n5PQHdml/M9DB7BqXp+tQY2RipMRsjGyOrGiMfRIy88qUZY4z3HGP8mqvacbuAJzsynSoV/ueCudH8fLpSTUpLyUzj6rP2kbKNAoA8a17KmBJ5+kwe6Yi+79Rl9mOeHBIQiRxYk14lv1MCyuTs+k47ux1TAPhCgby4/B4EArxUYUw+mPTDCo3aeNWTyTtVSzWO+jABrFWPPFmsLXh8zOQvXlmeJTZ9ZxWT/szky+NuL86TFk8JNFwePRjNY0m3nlSdj6TfpLuq+TMjM35TPtLDuJ4/0najNG4aU2MQ7NV2lh9mOSj5nee2lA8S0KQxm66Pj1WNkeSP8qbPjZGNkYnHxsjGSI3xeMXIZBvRlXe4lmX5gWu0mSb+1EZEMPHKy3UchOcIQrPjMx5nhlZb54X8pgDheORhJksax+Uh3z7PjBdvW3X5zU1JZ2vgxD6zwJ/Jveb4DrApYV8FeqzM+atJCXBeRVYbJVf3S1+UiFzvXCjNdDKTO7X1/677NMdMp64H9U8VdcrHYw4o6ZY++XTenf8Uk17NJB9Jh7MYSUnadTdLlDru1V99dl9Yu5NAv/Lv7mvkgf9n8qUcm8aQH69tB0z5ltV7n79pS9fBx127xsgdNUY2RjZGNkZyzPsJI9foRrcU6vOakCk5q+/aL8evJe001qzfWtC44tyZ0nGO67fzkyM7ucyuR99G4XLM9D0LqlkiS3K6vhIPyQ7uxMnxqcfUlj6V9opTPwpKvmp0BpLef2aXWYJPidXbp9v87mPiOfX3zzMQSfPwnOZZS7bcquPyzvrNdEbdXle/szZ+zKtkPM4+qZ/z5vPz+Eymqv3nSGTTtF0lgekaCDjN8tGM35R7E4ClnLI2P+e8LqA0ZWqMbIxsjGyMbIx8sDHyRi646BC+15rnPfjcoF7B07FZMvH/MyDS+VlC5WfyMhtTY6VbjzOlr8151ZX12vi8tToLSte5O10CCj9+HbmcLweatcBgO/qP8+t7g2cLgGTv2Xfy4HMxic10kkCWCceTbKoKuS597LXkzPnom25n9yECivPvDyOneX2hkxYaM/koI/lNSdJ9aDaOZHG9zOzkOr1OwvSxUsImeYVzxjd1NdtXnvpdRbQfY2a21SsBcrJJ091RY2RjpFNjZGNkY+T9h5FrdCMb8pMj8bMCSN/dOcS83+JOAeRCzpThYMM5r5Jjljicj7VE604hOjg42HsImsnKtwgk0PHEpM+ab+YEs8TtMsycN803S36zPe7qMwNhVka8opmSIKspfpt9VhH1MZhQ1wA9JQnuT05jejV3llQ1npOS+hqQULZUdUlzeUKUX5Ff+qXHrNq4X8wAZBajlN150RhXVa+TvCm/pL6zWGGc0S9oa6+cSmdqm3Q/44NzzXLUTHdr8lBns6087j+zXDt7s1jT9agxsjGyMbIxsjHy/sfINbrnC66Z0ySwILNqmyoD7pR0MO/HZMLjIvVJtzVnSZGO4bz57X/vT2djIKTbvbO+rjf2n83tAc3zVwWP6y3x5K+6TKCXEgyDy/n05Oe3n3XM+yWeHexSwHmgzsBKRPvLH5IcPqb8zQOQMju/7rdryTIB9xpgsm3yE/e1BADUn+w0S7Rqw34+Too9jcPPPu5VQEFdzBZnDpyJ1uLa307leuIca3LOctDaAoC8pf6zvMQ49D5Jh0k/Vy24mjI1RjZGNkbuj9kY2Rh5v2Jk4u28z/TMXdAaUzQ4EzS/K8gkKIMwJRf15W9KyMjpFi8DyZOBB18KpDH2f7SRSeEqo+i8eNQvj18VsMkBNYf047c8KW96m9AsAGbzaa6zs7M6ODiow8PDWpb9H2ZMOnDdcTzyLD34a0K9eqJjngCTTGrr9iAoqA19xP1nlrAkA/tTPp+fiw/5IGVxn+N8/EFI2WBtQcC+M/5cxymxeDXPibHAcfz3Stz3fJHh4ONbUyTvzIdEaUsL+dDnGS9s64uq5EOq2jqlBcFaHEiXaZGi8dzetG/if7aQYH/K6qScnCrTabHXdD1qjGyMbIxsjGyMvP8x0n2OdCPPcInRs7OzmAjIkDuvO4uPOUuw7gxsqx9+c6fyQJ4pPt1adMUrCXLv/GwbgMswk5F91JbzJvk53tnZ9kf6Tk9PL/2oHOVPfPhc5EF2PTk5OZc5VY1c1/49Jf70mtSq/NC1ZGNQO78u2yyYXdfSHwOYbRIQezWsqvYAQMd1zO8QcGHFOQggSrJaHHB8AvFVWz3of7Sd5HNe9J0/oujz8LsWSgKxmc/TX9JvVzCWlEu8cpwWD1y0JT7XYnC2qHLeUozquPNAnVEmByeXQX25+PTxyQf7ujzMd65j+m3KU9Rb4rfp7qkxsjHSde3fGyMbIxsj71+MvNGXZlTNH1B1BcyYY/Ki8jzR6L8SG6/MdcwNrl85XwMzzk+nSEl4dtWd5Js5ItvSoDPHZ3v+KB0rGC4XfxAxAWziw6udDs6eAH1cr4zNnJ2LEOrVK1w6zv8ER09MDtLiyf3PHzZPiYWV0AT+M8BhhXmMcQkIkq2oS/Er+7lN1EY/dEjduT09lhL4Emg4d5IxAbHo5ORkD1R8YeLkAEcdrcWf02zBQvlS/8SfjzWb131TCx5fkPh8szwpf5yB4UxGn0vtCMS+kEkL5Kray3ue168ClKZMjZGNkY2RjZGixsj7FyPX6J73hyTh0m04BX5ydDJdVecViarLwcXATkZKY/C8JwsmRfF4eHi4x5cHvTu5+qWH5pREnIdUIaSsnNdJ7Qi4BwcHdXR0dH5cMoi3WaJkRYnf6byeTPUjfEzS/Kz2nnBmtj46OtoDHgab65/9fcHhvOg79Z4WNEdHR+dbQdwn2J/bOtT28PBwz6+9okfbJzBOiY7jaQzNn8CQP4qY9sRfp9IqOWgzt533I6/0E49zjsHz7mO+3cJJ/uHk1cikX/aTrBw38cktA35XIiVXgoCPwZjiYjXN6+Ml4KfeeZz+kPRFnyYxb5BmgNN0fWqMbIxsjGyMbIxsjLyRO1ysLNCgs8TozpqcgYynpDob24OB7ejAbO8BytvbznMKtmXZ37M9A7qzs7NLe7zp/NIlndITqvPEZFdVdXR0tFdxYX8meQ8mJvAE5ilQ0+1u3uZ2nc5AVL7DZHadQCPJB11/Gsv9k21pq4ODg/NtIeqXKh1M9qwmz4I/JRCXgb7qiwWe1zltXUkgOOPDiX61LEsdHR3t+ZgnfU/U9FuO6TGlOdwHKavaevxSJo2RfE/jpvikP6SH21mtlDy0P/UhXpJuqurc1zmW68I/u+7chpvN5lIljrriYkLVXp7zRU5aOLC67PGv9k2PjhojGyNFjZGNkY2RDyZGjnsF0THGcnR0tHfMk7WYuyqxqz2dj4mFglHZLrg7LcdIV92sxHEuT9weQJTLk70r/zqAqDZ+u3U2hnj1W/KuV0/Wftx1n5wwJTW1cb7o9AJPjcPbyUywCeSTziSn28mrZAQUyp1s50HrPuu28UqOL2h03PXj9mXF0Bdg/hpu+r2ePVDbVI1RH5fD9cCY8gUM9cTFhuuFC6kEPGs6IT+zBZkDQAIrzesJkWPIVlrUcT+97MIxxXMa19vQH30M+qLsTn345wTSaXFJ3/NFnIOHLyA5l3/nOPR3jXNycvLyZVne7dJgTZEaIxsjGyMbIxsjHwyMPD4+rmVZYmDe8wXXZrNZ+ApIGYoCURDNx6vfZKTk+Pqckpq+J6fe8XkpoBJweVJIjutJKH13SgHKcy5/1eU9196P1RsHJI3J5O0J25NEGj/pRkGXErg+pyqey1tVlxKkeKD9PMG4D6V+sjerQrSRV14YfDrvAenBSdlYXfHKCOdne/r+bOGhcU9PT88fzp35QrFcwAAAIABJREFUOPXr/HJcjxMnT8a+4HaZ/JjzmIBlLY5dFhLjNckknVVdJEL3pbRIZNJ00PSE7rJxMSffIe8JyJO9Eyinlxs4n7QP5XN5uVDxRYrz4nGk9oeHh3X79u2+4LoLaoxsjHT5GiMbIxsj70+MPDs7q7Ozs3jBdc9bCt1IDKR0tch2VL4E5cOrrnRPLlQmH46lMelM7iApIJl0fC88+5MXESsBrFTo/1UPCjr5bxl4e+o76XCMUXfu3LmUkD2R+L5nOo54pZPNKq9ebSXRKRPokQev6rm85MkTVQJsB8xUzfNnCLTPV+cFdgKZGRiokk3fZDv5wVp1l7Zh1ZOVJM7h31MyT/7GuJEMm81mrwKcKm20h/PvidTjajaW+0fyFc8b5JufxbfHnid09mEeYgxr24z6Ozh5MiYPCQg9/lk1TbpKOvaFD3Vwlb3V33NpWhhwXvfjputTY2RjJPs1RjZGsm1j5AXdDxi5Rjf2lkIxy0TsRpYg/E7HENMMfhdylli5/5kJSmO5ghncnENt/Xa1G1JtTk5OLgEYnU3tr/NWFncI6sfbeyKS3O6EDnzkgbK449J52N+3C9Buvlda+lTAuM2pE7XhvnAGqTs6eXcbUZ9MAk6e3JSYtT9dc4h3VkrTfKqmOtE3HKipE+rQE5Xz6mBHOcWz2uwqLpeqsUzc7k9rb+1y2ZmkBLo872+5SmOJxxnYVO3H+GzRQB17ldlv/Xt/8u6+Rh2k+Rk78iO+CY5yERRpA85Hf3Ce0mKI/s7XFDsYsSrNNg7a9KuZ3ZquT42RjZGNkY2RjZF13uZBxMgb2VJ469atveAgw0nR7qR3QzNnpMJTwqQBqCzy4s7n7XmlLYdJoMC+bnTNycRFPXkgJgDkvKw46NWn6kN9qE9KHtdxUrVz2yawmtmI8yS5rko41J9eqzrG2PvMJJz2OzuvM6DjYkM8OiDNgN6JOqnar1Bf1a9qP4nO5Ep2pW5dh0n/Oj5L1pRFvPjC4eDg4NI2nuQvtENKYuTLEy91I+Cj7zi/a7qlfCl/kFceSz7istF3ZnImUHU5Ej9V+3ngOnKy8ugLNX5WvpBunZ/j4+PeUngX1BjZGNkY2RjZGPlgYOTJyUmdPVZbCslECi4PJDHmCvKk5c7hyvLbdwwMJk6dS0qX8fiqztSW4ySnSm05PvvQWXgVX3Xx8KwDsMhfZzrGxQO2qYKV9E2eU2WSTkYeSUzAa07PcVIyITDpu5IS+6axCaK0Nys3yff49h7f+yselbC5XUY6SP7oyUTfUyVPPCT/p5zpNrrbzxOdbMg5tG1Aek2LA/VnBZXjc27Jxv9KPn47Xt9VQfQY8cq560J6XwPrWYymPp7UNT4r3z6WvtMvqAvK5X0TD/RdtvfqZcqZnje5qHBiW/quj51ALYFOym9Nd0eNkY2RjZGNkeKhMfL+xMg1urqEcAUty7JXjaBDONNUiDuKG5hCsGLGeUlUngzOqpv6pMSQEuO5gjb7+2Eph59notR4foXNgEuAISfx4Ep/m82mjo+Pz9uzykGdun6cP+pzpgsPtqSPpAcGCKsdnkAcxJJeuD1A43hlWO30lyo/OuZvrhKPm83m0u9QuLyepPXnIKCxBXqam0nck5+DBMcm/2zv4Ek7ut+TB313PyHftLtXrHjcgUE8pIqU+HY+SQ5Q9BtPfvQ/jamtTJqL8/pnjxXZl8fXqsHs6zZN9nIfl29zXvdbj8sUJ2wzAwDGJvXvepzFddPdUWNkY2RjZGNkY2TtjXO/YuQa3fMdLp9olpTd0b0S5UnBb53quAcGBdTxVClg22SoNNZVMqkNQcvlTMbwQEiJPNHstrN4YSJkEHPsma74neNTfrdNSij8zj4cixUQEu3rOkvOzMTnVcZkOw/+pOvN5mIrhoOy693HpY1YmZWOtbDgA7ezCozzlhZrOk6/SPuZZ7YUX/oBUMrLdsmWyZe1r99txr70d0/qjMuqy3ujqVvvk2TzhZ4nR+qKY3sCn/mR5KGvrOUQXzzxvMuk/nxbEsdNbUWMBcrmMnG8Wb7jfncH8abrUWNkYyTn5/fGyMbIxsgHByPv+Q6XC5eCNZ33q3ZWZmbJ1at6biglOk8q7tzuMKR0PFVamAw0ZgpAOklKnnwolrI7SCSQct5YLdG8awn+qnFd1zOgm4E/5137dW6fxx3XdU0ZWYHy/sleDGIHKCZ52i1Vt9xGXpkT7fbz7ukp6dT1kfzt7OxsbwvGDMD5p+Pkl/6/2Wwu+aDIFyDuKzNywKE9WVFzWT1G0gKCOqSM7n+erB2wGR9rC1f29/OyWXql9Kwyqv8pt7ntZv7iNpnlFgIXjzs/flx68b5ehWy6PjVGNkY2RjZGsl1j5P2JkWt0I89w+VW/kx9z56ZyZ8bj+Kk/z6craZEnHVYTXAYGq1fmNH5yssTz7DwrmUxSdCiXVXP6azg53qzylHQ5AznqLFUQrpJdx5JdvULi52fBIZu43dSGdqJsvlik/OTFA1569nOaT/r2cSSjPzQ+xsXrdPkaVreXy6AtSQxoT0i0mYOMy5304PYgAHtMpBiUn7ByRr9MydLHcP+gHeTXtHGytYj74pNvenyvLYjIZ0rIvqCiXfy/n5/FzZpPJP35wsTjIVVneZzgQZ2x2r6mn6Y5NUauy9oY2RjJeRsjGyPvR4y8sZdmeFIj064I0ew2o/p6wnOnS9UF/p8BEol90tWp3zanQpPi09gcy48T3FJQc6xlWfYSl4iVGI7jttCxWUXB5SAf3Frh87gsSXafN9ltVh1YSx5pQZEC2vWYjnuQjnH5dvVsAcNkmvTnidX587a+7YCvMeV8M99LMjtf0i1fjUsdpphz3XmMMtn5XCm5Jp/3nCA5E8DMFh3pXJpzzcedb/q3A+xsITMDkhTvzosvYN1nEw8pxmfyp/mvE/9Nj44aIxsjGyMbIxsjH1yMvJEthZ4kZ45Lugoo2HcWDA5OKfnOePO+OpeSeQIu5439eOuTVYw1cKHTcNuDB+/a7V3XC//owLME5LKTh9lcHGsNxNPt6xm5zM6z7JHGSMkvybz23e2RQNBt7uDuOiFf9A/Rmj48MdDOrlfK7uMmsNcrl31BwnF0nHL7eeos3fonb7MFh+vJZaIdxc9sseCfxQe3Zfkcrrvk57Pcdp14dPnS+dRuxpv7aZKL9phtyUi2mvnnmp82zakxsjHS9eXUGNkYqT6NkfcnRt7IBRcFSIkqBZArY7YdQf89cD0R+FxpDJ/XeUwAwYTq4DdzMD/m1b8UMJybCcK3KZA/r6K5A+gtRCJVBGYOQd7c8ZLOU5C4gztIJptwzHR8dt6DR8fJawKlWUJz8FiWZe+H+WbVLc7rMiuB6XgCBvnsZrPZ+wHClCw4pxYW5NnHJj+ax99kxfazxUECdNer20j8XWUv6s7bpTmoN5InavdR6TWN7XpKgETeU0XXaZYfUvL22KJt05guX8pHKbekuPV+rByq/1VbSZqupsbIy3oQNUY2Rup4Y2Rj5P2KkTe2pVDEZJ6E8cQi5n0M9p+Rj89EmcbgbVfxQKMS2LQP1h/A9HlpSD/PQHLjOE9MNG48dx4e86DQeL4Ngw+7esA4r1X7D8ceHh7W8fFxdH4HxJnDOo/8Lv4oo49JG9Nu1ONsqwJloc7pK/Id7ilPCTQlMB6b8ZSC3o+Rl7Q1wG3m+nQ+OIf060kk2cITloNRsrXbjXZnv/ScQwLhxNPMpzhvqmbyRyaTXIlfb+fbJxwYZ8l9lstSLCd53DcSMM1ihvP6/B5XSX59Fl33x0ib1qkxsjGyMfIyT42RjZEu1xMRI9foni+4lJxZ7bmbW9keTDrPhyVnSucxJnCNQUrG9sBa49fHFzEQPEFxbvLKAGIbVol8bq+6zQw/c0TxmsCNbXnlrj+vVHg/Jgseo2wCZvkKeVyzFfXH/6pucF7NwYol+Zd8SffSTwrYpCfNR3KdzvzcAd/ndn9hxSnp6+zs7Pw3USRf4iMt8g4ODs7fijQDxeSLy3LxkDL1kR4edYBNsea8+mLTAWcGWglwlUtcZ1V1vu9/lgvcFt52Jk+ypVNK2Oy7RrMYd2CV/zNOOe8sFpLs8q+mu6PGSLyB7re+d519+CdhgKqllhpVdRYWVGP7pWpZahHPy1JF3NsOc9H3YujLOt71vfh/Mf9JWkQt2wGT7KfQzelmU4vkDnblWBdzDjC7nPdblqU2Y1OLpNrNfTp2+bvWc8OlxaCamxwu494YmnZ3/OEvelHVI69vjGyMbIy8AiPTXTfRjfwOl4iLdJ6fMUBhk/Oq/wyYZot29fHfPJiBRpLBz7nzMjlwge/BvwaaKTHMQI+Oq8+84PALGXd6B1kHR680qY0nzBTEGoe25AURfcAD0p3U7a3PXnF1WyUf4DYX6lby+u9xOL+sonEMD0j/LL2dnJxcsi2JCybXJ9v4PAIPf+OS65G8qA3bHR0d7enTwcaTEhMqq4tqR336toSU0NO2JvqKbML4os+73t3P2KeqLlVxPSbdf5jP6K/Ok7chb2mBMosr54N5YLYg8Hb8nhb1DtKsbCb5Z/mi6frUGHmBkafP+LW1vOdzL/M6+T47fp2+s/aJrjPPWp/1uvb15iQ9mvEeS1oOjxojqzGyMfJqjFyjG9lS6MpiQCZF06ET8LhiNKYoGX/tezruitdnfU+33xVUCmQHM/6ReJ7bcjyAXI8eHOSTQaV+6e6O28WPc0yep+4IntSLj+cOTz6kO73pJ10QUSfe32WYBSl51vi8hU7/mlXf1C/ZirbxZOTH3cf99bd+Aa253QYcVz7kF61s5/ZIfMuHBSqUw38Pxu/iineCkCcu502yu850TEnNwZOx5kDuslOnzoPznvxH/PB/0p/LkqpaPg7nlk+x+pkWJdQbK3AOmJ6vPHd6fqJ/q/3MLimmmu6eGiN3PnhtjTU9HqkxsjGyMXIdI9Ma9lz30zN3QekCIwUFhdSbX+iU7M+LCZ+HiiG5wjmf78/lOAx48eM/dMcgTr/urT9WKfxiYKaLqvx7CO6gfvElPauaw/F4cUFndH5EfvEhR08XRbSN21D6ZCWMSUJ65S16teWDqgw4Vovcdu4/vLiin/i4TE7J/w4PDy/duVQioO24TYj2IA9qKx1Qp7T1jGdRqj6Jz8PDw0t91gDQdU5deELmuPrxSm6ZoG/J3vpMPph8PSmKH46tvgTbJBMT9Awcqi58j8WKxKfHPW1M/TrIEuzZT/x7gcIXF65vT+BuA8a1/G4mg8+h/+nHdGeA4QucputTYyT883v/ftWH/4aqf/x/ZWX92ffenr/9xlWdPqb0LV++5eG7Xvam42GNvu1vb/n78N9Q9S++6eL4X3r2xfFH3rA+xn/6wYu2L/vc7bGv/MyLY//lh86betQ3RjZG+viNkVff5brRtxT6ot3Bwq/CGXC6YOA4dG4XigZ0Y/MiJilAfXlBM3NSN4SClm28suj80WD+yk0aknuMUzB7oiDPvGvEtkw4aifdMkBdzlSR8PPuxG4TJRXak8lLnxlk9Avpa7PZnPuH5Et346gL8iE+XY9eZebYx8fHlxY1tBUTYQIdt53L5YsYjctxNL90oItU6UP8eLXt6OgoLsa4fSMlYf75RTT9hWPLhtQB500JNyV8T4ySm7oQH4wT9U9yuF49T7APbZ8AI9nHc5jbzgHbdcrPBCPyk3Tl/u0gI/LFj+uR+kj68Xi5Ckya1qkxckdvfH3Vz/9k1etfkxX1ed9c9WU/UHXr4Xz+V4N+5bVbHt/wujcdDzP6p19X9X98bNX7fmjV73y/qs97YdUP/ZOqlz53+/+zvqHqV3656sPevurkOI/xEz+yvTh7299c9fyXVH3N51X9gy+ueu2rqj7sE6ue9Rurjh+p+ovvU/ULP11n8IfGyMbIxshHh5E3csG1doHB4PEKGits6eqdRpIiqMxluageXBJss/+M0/Hx8Tk/MwWRV1+M6zivzEVsr/+qqPi4M/25Hjkuj1HHLuuyXH6ejDr0YPAkwfEYgCTdiZvp3Z1TfXQBRqBX2xSwXIDwos4vHMmz8+568wUEK20eOMn+TLTi3/f2SqeqTtNWnC9dfBIUyIvmZmLQsxdjjHPgUzvKlJLR0dFRHR4e7t1lpN/OkimTjQDLF3Ocx+/I8q6n5wP3UfoBY58+QZ2mQoOPS13Ih3Un1yujlNl1S1I+4MKVBQ7PWeKNd4Eph/oQVFLcimZ3JTxuvSjEvJsKJuTdbdl099QYGS7Ovuozq5771Kp//o3b7x/3ftvvH/4OVR/xjlV3Hql63tO3x/T3ke9U9TP/df/Yc59a9bc/9WLcz/+jF8d/6j9X/dHfst/2g5520fbVr7w4/tI/tD32XS+7GO/LXrI99/Lv2X5/8XtctH/kDVXPe8vL/P3sj198/9wXVH3312w/f90Xbcf4ghft9/mFn5o7TqI7j1Q98vqqh9+s6qEnVb3htVXHd6pe95qqs9Oqt3hG1dhU/fIvzcc4Pdle8B7dqnryU7d3E3VH7MteUvVvv6/q4z+w6mP+StVb/brGyMbIxsgbwMgbvcMl4lW4vrtTuALUTv95dZ6CSk7JKgbnppN7hU2O5InDk4nm9qtgnhdPfrtS8/jFxMnJyfmxhx9++NIdL7XTfz5kqTYpgOnQShgMYurc+fWxZ7ZNCcGDj04qW0lfXCQwmI6Oji7Nz8SdEl2qrLLaI97cfqxwiEfpyqtSHkBpgSRA8WTgFTX3wwRMbgvOlcbyBJq2s6Yke3JysvcwcrpAJeBRV7Kd/Jg+7UBGG0l2jxHqIy3K3HddJwksOOaMdM6rjhrTfWuNZ8WF+urObtqKwnnXqn+qUFI+2kDtCEg87nzPwDUtoDkG+fc82nR31Bh5VmeMyRd+WtX7fWjVZz2/6l9/d9UXfWfVP/6lqjd7atUbX1e1LFXf+Mrt55M7VX/vv1b9zI9VffIHVX3xd2+P/673r/qYL6r6mv+t6u99QdVf/XPbC6ZP/9qqt3/nqhe9S9Xnf2vVweG2/Tf+/Pbuz3Ofsv3//Letevpbb8f7wW+v+sz/peoDXlD10Z+55fHFX7jl6Xe+X9Wf/z1V//lfVX3Fv6k6PKx63tOqvv6/bcfV38/82PZO0xd/z/b77TdWnR5vPx/frvprf6HqO79q+/3zv3U79jPfdjvX855e9YG3tnKv0R94YdWf/sLtBSu3Zf7Vf1b19r+16iPeaXun6ptevZX7bujPfknVt/xy1bt9YNUXfHvVb/7dVTuMboxsjOSYjZEZI9foRtBzdleBDKWrZyrbX5XpV4660nYlcE4qTN9p5HRlWpWrAZqLe0Epo4ImvaSChneHoEFVdUmGTIHHgNWtb12Ukf+qi19HF4lXXgDpO8c9PT2t4+Pj8/MaT/1YrUrBL55cJ7zwnC08mJC98uMXmAoizuX+weBNiVrjHR8f7+mai410QUkfcR51nElQvi5/cL0kH/ULdcpRta0QKZFTZs6d/FuxpnkUg0nfIgKKdJ5ebSs/ISirrfyR8/HZPl+cpUTHOagLt5Pa+xgO2uQvxZrr3kGZ84gvzxGpOkbZZQsWJNxmlDuBgfNOm1BXXol2OVzPzIO+KGm6O2qMPGcESjmo2my2d2WWperwaHvHhXR4dPnz6cnFhcRmbD8vZ9txNNbBUdUYu7ZHdf4QksbXVrvT4227g8Ntv9OTLV/n4x9u5x1jd0HzW6pe+JuqXvfft2McHm1l+I43bO8Uff3PbMcg36Q//6VV7/v87eePfZ+qZz9c9cpXbOf+x6+q+s5HtnOt0dhUPf/jq777TtX/9OKL4x/7+6te8R+rXvafq576llUf/IwtL3dDm4OqL3xR1Q9+R9XHvW/V+x9V/fxPNkY2RjZGXhMj1+jGthRq8nSF6At/Koy3K6l8BiUD04kKpwMw2MWXAnCz2Zx/TlemXNTPeGCQqE+SnX29CsixOZbm9rtCmpPB4HOzIuDOo/n8KpyO6HPOEltq59scObYHPG3DxOQLA6+sM/ClL+mQd66oF/qnKlf0C5dt5muUnce8CsM/JVFeIOs/bRjf6GX2pf50sc6kRH/VOKwCyT5+8eoynJ6e1p07d871zLus9FG3Lf2OdlV1dFmWOj4+3tOFA4f+z7YbeFtfWBLsE5FnEfWhvp6z+D2Bj8vi8zAx+4KXtnXZmD8SceFNubnIYFsnFjyc91RdnOm1aZ0aI3d91tYjpyf7zxydHl8+X7W7GBpSbNXp6fYiZLPZXjDoQmtZLt/h0fi6IDq/0DrdjrnZYdjYbP/OTqs++0OrPuBoe/Hxih+t+rv/peopT98fd/cbWecXS2M31vnYVXV2tv38v3719mLpnd99e/4j36nqta/ev2Bco+96WdUHHlV99eds20pm8X62m0/fl2Ur997zXMaf9PfXP67qvf5Q1Xs+p+pLv3d7gQlqjGyMbIxcx8g1upEfPnanStU8GcqdhQ7KLXG8kuQCekabzSbe1fH5fTwGgc8xe6BY8zm4eLJxXmb6Y1WBgMzxrhpL8vmFDqs8PiZ1445NuTkmbeLt6cyedJ1Xf4aBidNBfoxx/nyB8yReOE665U57iTcdc1uqD+8IpsBkAiJfBwcHl/Z+n5yc7L34I+lkNi5jibHlNkhjUnbXh+uP4/Di2auBKR7JrydLl8Pl9WPil/lCPuO8ur8nXTnfvMvLmEv8MLEmffP8Grl9vQ99xv3JY5K+Kp7SAoTk/p1yHsdIxYjrAErTZWqMtG1OB4dVDz95e9Fz+ND28+ag6uM/oOo//attm4eeXPU/P6vqW395+/349vYlEL/2Haq+6kcv3qD3/31z1b/6rqoXfFLVC166PfYrr91uDayq+r9/uOqt3/aCqQ9+q6onPaXq2167/f4PfrrqBb+x6iXvW/Uez676rH+4Pf78l2yfjfryl26v7b7g26v+7l/ebil80W+/4LFq+yzVqK0cY2yfq3rH31H1Jd9b9Yl/cLtd8uEnV33tF2z/SA8/eSvPWzyj6kOeWXX7DVXf9vqJBXf07I/abof8ipdWLVX1aX+v6j3+4PbvT7971R9/1y0/3/yarY7PzrbbH5/0lO2Wyqqqd3zX7R27T/gDVf/mn1Z92CdUfeSnXMzxfV9f9YnP2Y4jP7NFdWNkY2Rj5N1h5Jgly+vSZrNZ/DcKZgv3Xfs9hSVnooLYR20cAHyclOSpaFbikkN6WxKNJT7dGZPSZ3rmBRdv0VNPqVokvfCWvcY6ODio4+Pj8wDk1sWZvsj7smwrNrdu3Tq3YeJf8yV7ug04ti52Tk9P9xYQGkMPVHJ8Bpp0pYvJNL7zwkRwcHBw6fkEVm14S1/B7Hu/fcFDW0m22aKIPNIeKVk7CBBsPLn4nvi1BJsq5u4bjAHKk/wyVbdnsUpfoz7djqmAwDFddw5esrNsTL0ILPngtvOdyGM7AYvHr7dNADHTkRNl8/6UjRVXLYycT+qKMcNY8Lsay7LUnTt3Xr4sy7tFBTVdosZI/P+9f6TOPv3vX195v/yqqg95q+1WwH/4c9utcqenVf/+n1W95P23d2M+/euqnvTm8zFe99+rXvAO25dEfOPPVz3trS/uRDVdi57ywneo5TW/2BgJXvi/MXKfb//+IGHk7s5nTDA38sPHnEzkRkkgkhbnVfuVvQRSnG/N+HSutCD3edOdDndmju3zJNDT+QRw7lRyBD/PPpSH7ZOjePWCSccd3UnJJl2sSYfaLuJ8Jid03eliS1UttdFY7tz6LN5TpVdtkz8pcUpuyXB2dnbOQ0r21B/5SJVl8aSLXQYzberVNE+i7kdqc+vWrb0tQCIuaqh3feYeaOlecrFKyVhy36W+1U5bnVxntLt07L7ofu/ATJ34+O4THI9yczHAhULaWy9/9iQ6W1Q6eZLX+BojASfj323n+uE4s4U6/dR1xDlp++SjfFjZF83pAeqm69MDj5Fj1F1tSn3xu13cofqLv6/q7/zI9q1+n//R2+M//u+rvuqzql78RfMxPuk5VU9+yvbvxe9e9XWvuBsOmqrq+C3euo4eelKdnZzUqFG12dTB4UEtp2d1NrY32pazpbaf6vxu2LJUndZSy1I1NsonSx2fndZmtw1zbEadLrWfFzfy69reZVvk2ztf+uVfOv+dtsbIxsjHE0au0Y3d4UrExa9fOEhIvzA5Ozvb2zuu4/7dF/F+2xD8XQIcd7xUSfQ5OfbMmD62GypdtPjCmufIRxpPi3e2W5alHn744b07X7oAoD54dT/j4ejoaK+6oT6zizy3KZ2PD5WmJCPeHPCdP22t4a/A62KKgc+EIGLAuh5TUvILWwZUWpCkZEx9cuuodOn+xwUQ7XV2tv21e+5jn/HofuK8eGKn7dPCjDISiGh750EvXuEYnuzpgxyD+lA+cH9d45M8+Vy0hSdYXnBTV04pN1Vd+LjLlfRftQ9QKXcRZJKPcWy/0+y8u6yUOeUsju25tu9w3R01RoKH3/OH6/Ru7nA1NQV6s8/90Bo/8G1V1RjZGPn4wsjdC2YeuztcvMIj83RsMsgrRS7mpQC18d87cOE8qScH4yKcC3mBSDJOAi0PYL9Q8MBMDqQ5PdDl3CQFUXI4jUcHVPWBF0bUM+X0oKGt1IcXMR6ctCFtO7PT2t0uHvO53AZjjEuVpvRqWV7YkFgVdp26HBqHeku3ud3/KDO3IaQLTgYz9Zsq3FUXv5PjPPvFKX0mgYbbOtnE+8wqN/Q7b+d6pe7It/TCpEkbaJ6rEh7tpHlUYaSc7ruzPFBVe9tek00om/rNKv4+/lXxoz6+IKa+3c+cr3TM5Z/dcZ39b7p7aoy0O2Ov+tkar/yp2t4RGfh/Lgm+7+6QXPK9pWqM85sq16XtMBhb0+H83rFx8Vl3Xi61n/FwMQ3m3h2yuzfn/Oy+L7XsnwMfF3OPSzJIvj07j/3uY/dtyrfLsOzrbVmWvTHHJRvKV/Z5ujhnPjtpSFzdAAAgAElEQVQ0xs6nNm6ELZ299dvX8pZvs21TjZGNkY8/jFyjG3lpxoyoVAcQ/uddCh3323rpKprz8EJBY/pVcrqNOuN/tsjQWOkWMGXixZSIt2nFj45xTgfJdJ7zcS6/qEiOQL55zh+ATrqZVQeS7H6cNiLPnoiYFBy0tf3Qg9tlUoDRh8SbB5MvGmZJgPpy3mfVECYXfect++Rb1IeSGS/E3LfSRXxK4rqIV5LWnU/GGLd4Und+ke72Sknbn7NT/5ToaD+OI56oPy5eE6jwu/jgnL4VQnN6ccPt4+R+sLbdw/uJD/eX1N/5Snd/XY8+H+1Mvtw3XU9+kdB099QYiZy6a7b53q+rgy9/6d54M3/m6+c9V/tuC+osjSs9Clv8NfqeUzzfaNdHyuuJ/8QLxxbRJpSLdnDckww8no49Gox0fVBennMduz8ljHQbCjvYV3dsfJ43/onPr+Pnfcz2Aq0xsjHyCYaRN3KHi8bQZ09yzhSBhN8lqJyTgZKSofp4QtDYTBhUcqo2etJmAHj1IPXxJOwBR6eb3cXwhOhz6rOSjDsUZRagODC7fLKXfmuDFRN/FauOM/BdDtprs9n/hfIEmnRsPW+lcXQH6+Dg4Pz1rbrwEh/ubwJEvQJez5qdnZ3V8fFxfAU//TElErdf+sy+OpbukqVKSwJq3eF0nhgbBGbniYsrxgSrgBqfF4Aa1++UpoWIfyafDr4p0ZPScQKZxkzx7P3dDh6/7tMpztwePpcvsmaAlkCB43guSosszzMevw7WMwBnrPncvvjwxdF1QaXpMjVGaszd5+VijMbIxshrY+T5dftojIRtGiOfGBh5zxdcdCIX0tvNnKsqK6GqLj2fk8hBIyUG8kEe0tWukys1nedYakuemFzJ9+wCjeO6njQXAZxVEh1jpTHxnOSRE/t8lInPIxB4bt26dX5MbXVediTffozJguMzIboO3acODg72ftOC+jg93f6o8507d86B5uTkZO/3GKjT2SIlJUW/e6l2qUJDGzp5UnFdONgT8D2BOrBQRvHLH1tUdW+NJ313OXnMF4Yua1q4pTFSpWmWWDmv5yFfFCQw93auT+eJfT3uPbY8sc/mpJwpNgnsnFvAT7/1POf6WeNJn7kAEU8plzStU2Mkct7YvjRDozVGNkbeLUbyfGNkY6Tz+abGyLVceWPPcKWXMJBJCpgU7hUCPtOUgpfHfV4mlsQDFU4n8GOszLgD+/yedMnHLGmw/VpwpsTmfK6BUiKXVUHCxK15dUwVM722XbfV9d35EMgRSCknz4knl0P6S/qXjsWj5E7BSJuokndycnJesTw5Oak7d+7s/TiiAIjJl0Agn3V/0Xm1Z1vftuk+SNJxVui4AJn5DROOLywYSwRxzc+3KtGPkk+lxOoxOkv2nvQ8uaVkuAbm5CGdo52Szl3/vuhL8nhcJ/Dx+T0HzeZOcruP6Rgr7DMQ1pwpL6zlN5eh6dFRY+TuvOWCxsjGyLvDyJ18kLMxsjHyiYKRN/ZaeCZeMcLnk0gpWfK5FgXMDDxECZA4/sHBwflryxPAcQw38gxEnK+U9JND08gcz8dIQOug6WNTdiYxtfe3z8z0pf5MYA4Ut27dqsPDw/M/gYnriYn98PCw7ty5c4k/B2yCgN8Wp40cxNk2ASvbeyVC1auzs7PzX3gXyKjCp3Npvz/BI1VxXB8Ebga4y5p8gHHBqiV1QJnJK23r/k2wo4+s+br7iyfHtPCZ2YTHZ8fS/JTRfY/tkz59PtdVauc68aTv89DOnpjZ9io+k1zUc4rjqot9+ZSLdrvqzpXrx3XRdHfUGFlVs+PVGNkYeTVGlvisy77dGNkY+XjHyBv9Ha6q9a0FzpAHFRONqgguiPdxx6VR/PcY2IZtk1OynfefOfgsYFhxmQHkLBGRF5c/ASLnYwLzhOp61W1XndOWAx3XVoiHHnrofGyv/ngSp8x6CPahhx66VEmS/EzIWgykxOYPmXoQidIbvBxs+Btg3ELggKJtFmdnZ+fbLVLyEJ/UMYGAvKTPkieBPs97P/JASvGYQE59WQWVDanrGbAneTy+3O7ed1Ytn4FdWmy4PpJ+1/IJ+VjzL8oyi+kETLNcscYrz/u8XkF0HTNu0gLH7ZYW3lcBXdPdUWNknb+jrjGyMfLRYiSpMbIxcjbvrzZGrtGNXHAx2bqSZobU+RmA06BMWP57SJo/XcHy6jWNq/akZFhWRmaAqM9s6/zMgsS3hjhPybj672DMoNX4Mx37PmdW6Vidu3Xr1t6ec4IqjzEZ+Jzci84Hfse4eLg5gZGDoFfJpIdbt26d+4EnB9etv9FIc2g7SFVdephY2ymOj4/39rhru0VK0OQjJWDNzbePcQzZkqCaApx68+qok/ucgJkPCev/ycnJJd8Ur56gfHGSEtWsvRPBuOryG5Sq8gP9aRzyPvMJj2l9d1m90pz6Ub60eOD/5IP874sVz1u+gE56mOU5P+75zH1wpr+m61Fj5O5z8J/GyIu5GiMbIxsjn9gYuUY38lp4OnRyFAqbhPbk4JUntvf/TGQyDAHIleAOLxLPKUE7rSUql9mDyGVWvwTIAs8UvOJtrbri7akvgsAY4zyRKnkcHR3VrVu3aoz8ALASC+1F3dPWnkhdp9zrzotW9Vd7ysvjnJd+4GBEXryKd3a2/2OinOfs7GyvmidguX379jmoaG87E5F44K/Yc3uD+5vkSD7FxRR/j4eypQWVEgH7aV7akjpWP+ro+Pj4kh9xESc5Z77ni0DO43HoFWrqiX1myY26cpCmvjgm53agTvP7eB7HKYkn2/I8fwzbc4ODofPtfi4drwENSTKmvOaLvKa7o8bIi2O+MGqMbIy8W4yc+VRjZGPk4wEj12j1gmuM8V5V9RPLsrxypc05My78GuOzY+qrfdFy2LWrZSnZr/ipTCYJ7klOTulgwCtaVshYVVwzmualo3kfjkOj+u90uVOLF972r9oHaNdTAhIBh954pAqeJ6xZgmbSoK38dbdq72DtAOI8Mhl7BUoy6e1B/qN3rgMmXc2vh5PFr4P00dFRPfzww+eVu6Ojo/PfZLl9+3YdHBycV/b0RifazG9zkyduTXD/YEzQfhyXlVX3Ez8mku8sy7byuSzL+XYQB1m29yQoXdFPGYe++GMbLhrpT+TBY5Tj+ALKaZYcuRhhbkn5Rd9nwEke3Z5J7rTYSzJS3zNZCZSzNi6b+vEh85RbZ3ytAdKDSI2Rd4eRazpqjGyMTHaexUtjZGPk4xEj12j1gmtZlu9f7b0jMUMF0+FpaCpAe5bpiDIwf98gCaLxfU7+d2VX1d7rQWcO6AoUqKXbtt6Xi2Y6lR5MFl8cx52WDs+ExwRAHegckwDB1PXmoKGq3dHR0flnJlWN7WDgAMA5uBhIAVxVe9sW6Cd87epMF+JVPxwoPep4Cg7p11/ryt8xUaApQXuSlJ6Oj4/roYceqtu3b59XOh955JF65JFHzh8eVsXLFzySgdUafZccCRh4zv0nLbroU/Jj91F/hoN605uq0j5/T06kxDNtz6TrCVj8JXlTPHsCdbBJAOz+yrji4sVlZdwm8PcFDnnhomwGjL4wJb8OIjN/cPD2XEh/I9+zfOhjXQUoDxo1Rt49RpIaIxsj7wYjz+dZzuqwMfKSvI2Rb3qMXKMb++Hjqsu3zRlEYsaVTedPSdXHqNq/omdgkLwPxyGfLoPGpOPqnCpkybGTk3HOGS8pEdMpCWLUrUDA37KiihITORO/Erg+37p16/zhXyVi2nJZLvZ+a34fi8S91EpunkTUh1VCje8VOR+T/Tk3/cirQjxH4KD/8bv+BIriRwlb9tEvzrOa9/DDD9ft27frkUceqaOjo7p9+3ZM8s6/xw31yURP/0g28TmS32kBJ3n1fIAWINxawfFlUx3ntg0mPn33yhHtQR24nWhr+n2KJ1aHya9/Tj7iAEZ9UY4EIp7X5LvelvZMIKK48wWV95WMXnVz4ONihbaj3Ti2g1xa9FDnHu9N16PGyP2FFL83RjZG3g1GVlVtxsXxxsjGSPV5PGDkGt3Y73ClqhMdjOAi8qvGteTP8xyX39XPiRVBGoPt6cgECgdHtZkZ3oGRnx0UWMWhjC57AmYPau7/5X8lPbXdbDZ1dHR0XqHig7+pGqexmCBSkDuvVbUHDPxOoFR/tlUy97YelFUXFWDy5fMxcaTFjSdlBxbaXa/Ale7u3LlzDsq3bt06r+YdHBzUnTt3arPZnD9I7P4kHjUu/ZWval4DCZeHccjEqTl8EabqJ/3KK+JcQBHQnBe3qdqTZpUjfSeIub3Yh7InXTBG3Z+XZYmvwr5URQ38p/aUg3MxFlJ1VMQ7FSk/uLxa5Ig/+gmfsVDflKdSnlxbiKS81nR9aozcybe5vBgj342RjZGrGCm+dhdBjZGNka7TNzVGrtGNvhbegUPH6IA8l6qlrFypHStUDATe5lRQerCqn4/rRvUgpCPzSjY5gwNTAkCeT9UOtvGKhsZxp3Kd67scTbwdHR3t/R4Iv+tP4EI+uH1A46nSl3RctV/hVGVIx6Vjr76pWqRz1A8TonjyiuUM3NbaMIn7cQcTykpdMAlwO4y2T9y+ffv8oeE3vOEN5/vXZ8k38UcdkOSTs/6zxYeqkQSYk5OTeuihh84BzytbXslzfiS7J2ePlSSH80iZjo+Pzytb3lbEWGPi5HyJlzWATrym5Evy6qwDvM9N+6QEz8q8t2MuoA8LSNynU/U3gTXBibxzYXodUGma04OOkfrF2sbIxshHg5FVFz7VGNkY+XjEyDW6sQsuVo+YgB04eNXvFT+1kyJ521bCiWZJ2s/LCJ7Ak3N45SDNS4cgzy6by6RjdC6vzqSg80qiA4eIlSrKKyfQw726rS89M1hZlaKuND4dlYlWFS+CiD77W400/hijjo+P99qxr+SlvTxpaE7qkHK7PxAgdU7VPx+PoOg2ZsJ76KGHzqt0m83mfDxtk1B1tGqbdLWHnbZye1Pv9KHj4+NzO3qCdD/hednVH2Kl3wgQmDCoR68AM1a4ePHtC5zbY3a2WJSNHnrooagLxonHgx8b42JbB23PZO3ySt+uR+c9ATnn5VjelvPRdxmLnE+8c3GlMUS+3WWWwyivFuIum8d/071TY2TVyWTB1hjZGFl1DYzcudTZ6Wn50rYxsvb6kr/GyDrX45sSI2/0Dhev9KSYFOBilMeobDku99CSZoDgVYbkhHIiT/A678b2cdxZSUm25Oy80p857MyBPXEwOKouAmaMcV6l0z5wJXI5D986RBm575i8+FueqFPaW39eKeJt+ao6f5Uu5fIqkNtEMtJmrDZ4UiKf8k+2l1w8Rh7cPu4r6s8Hk/mKYL6NSW9qOjg4qEceeWQv4LUAYwWKsgiYKIt0y+qMJx2NJbBQbKm9/JFxRz1zXPmF70tPfFB/lEmfU6VNiZ8AR/9kfHtMOfnikbwy1zgfIr/w9UULF8z6rHYEST8mkjx+XHok4ElO9uX81D1Bm7mY9nAgdF+ZLbpnum66Pj3wGDn+f/beZDe2JDvXXNv7nnTytKHMrERONRI0FG4N70PcR9RYo5oUUC9QgxIgCYKABCLjxInTkfS+9ztwfsv/vWjuQUZQlcxMM4Ag6b63Nav5f9vLltkubBeuyxyZOfKxHKlF284cmTnypXDkufJsD1wqUO28KjsCROp7VX7qtCQVRjQSrkkRhi4RRiI5JTytm+sjsem4tA+qmJSx6nc4sxpfKtqp39GmRk2JFOGQROkgEH50nFEWyIPrtG8aGVQZcW0kJa1fjVn1SHvYjPZLo4ARQKPMdUwAJ86qpBXb0bZP6ZoIp9avhAVZs7xvdojqNRoNm8/nfqQwkb5qtWrNZtMWi4XN5/PSOFSGUScRrKNNRPBSoNe/q9VqKV+esSmpKhFotDT6Svxf+xonTtF+ol8qOFPq9XoJBxQ4qZO2NPrK2CL26D1m5f0PmgoT/ZiidgC5673aprYbcYu2te/avxTAn8KeOMGN96jtqG7pj8pI9aj7Ps71IZfHlcyRx6L1ZI7MHPlYjiQlNTVBzhyZOfIlcOS58mwPXDGCEqP1OkjtuN6jqRHUqXUo8SAkBTnth7YTDUoBJNW3c+CvbWvfo7GqkgE3rRfHicuiKeLV9jX3VMFRDQHwqlSOm3+pW4kqRUz0n35zr0aOdNyAGICq7UQi4HsATcet+kjJV3WppKXRIO5vNBo+HtULfaReBTy1Cz0SF5lrBAkCgVT2+2N6xW63K719vtPp2Hq99rYZ22q18nvjQ5fqX+0X0FIbYlwKZOpbmt7C+KLN7XaHTcCc3sXnqUif/n8uwqXXqHyRJ/0+ZfOq31hnJBX+VhlEf65UKqU9BJGg4+RN+6B9i2SnEWv1e9Wl1hH1G7FC66UP6DxGZjW6egqz4gQsYqjKQvFQ/TXV71yeXjJH7m1fPLw+c2TmyKdwpI4z/p05MnPkn5sjz5VnPaXQ7OESrF6jhsO1p0hdcy3VwPV3dDr6oSDG5zHXPQo2ZTjxu+igKYNUw4jRiugQfKZOl1JsarwKKgAVoAawaWSi0WjYdrtNOjJ9jX3RdAe9VpdplRyQA1GgSChaT8qgta3UZyl7isQYo096bZR/JKH9fl+K9ClRUFQvMZoFqPEOklQf+F7rgoAUOKPtRZ9SO4nEo31X2fF/zBVPTSiiz50C/EhgKluNrMUSsUDbSvU/2kv0+5RvxXIKuLkvtRdG+6HjVr9XXOB/1WVsS20hhX2xr+p3KVlHTI26ibLVjey0Fck5+locay5PK5kjHx6aYfYwop85MnPkOY70sZzQfebIzJF/bo48V571PVwU7ZgOKC7ZUVInvKgwIyhHgcZlypQgYhupfkfF6jWqJL0Pp1GAjH085ZD6fVwu1Xqiw6phE0niZKRms1kCSdom8qNP/Pt9OS0igkqUgxKCghT90QheiiSoX+uO4z43CdHvmGRobngKRHRMKaJSIo591nFwTeyP6pwIEk6/2WxcL0ocjBMCJgqoESicHsKKwI2P6TX0R3WvAKR16ffoN0bcIZ5zUXhkGJfd6Q916D2pSNKpCZy2GScKSlanCELtP/p3JProa3G8sY/6XZxgqg9riVFsla3io441BeSpvkR8iHJRXetESPufwrFT5JzL40vmyHtsCjLJHJk58ikc6bK+t+fMkceSObJcXiJHPuseLhWsDiY6pBpMFNQp0Oa7WKJhxihU6n69Th1Or1GH0s8AcByPa7ScM8j4fySc1FiJZOqpQLr8Xq0eT01S0IukEx1aZRVBCEMn2hSJg/8jyCixMA7dCKykoi9M1P5Eh+R/JR6z48bbVHRN5RoJKtapbUWd6pj4XEF5v98/2Liu0U++L4rCI6vImogdkVVOpFKdR19RIlE9MOZo84C5Tug0LcbsmG+vciyKokRySoIp0kiVVOQuToxSJQWAcXxqf6cmgOd8Su9JAXPsc8Sq1ATvMcAbI77IFb0ouZzDwlPEeOo6jSaaPTymOPabPilhP5ZUckmXzJFmFu7NHJk58ikcqSVzZObIvzSOfNZTCmOJYBNBWYuCjRohA4q5mNEgFZCiQ0WHiwpQZ9DrUtfqdTHFIfZFxxXv5bvdbuckoaQYDVRBng2+RXE8ZUmdK+ZCn3MSDFijeNTDZswI4tTHPdyn90a54kTURdtqE5HwtC6+5++45KvtxchUJBjkHm1Tx2H2MNqhdWvR66NuNYJHPZvN5oGe0KtOUuJkJzq06hyZ6NjVJ7RP5KHTlqbVoJN4DG3KZ1UX0TawvRQYxWvi+KJfqd5SoKnXpEBWVw317yjjVN1KanqP2q2Oh6Lkq3aktqa/qUsjoZEwU22fskWVZYzaUn9c9VCdqH89hkhy+WUlc2TmSErmyJ/nSH1xdiogkDkyc+RL5shn28NldhSggmnsrAoiEocWVRCDUtBTA9YJqtaf+u4UuUWDpO2UkUWDjP2PBnCu6Ds4ThmsEokeYwsYsQE2gkv8UbkrcCpRm5lvDo394xruVTJRIotFbYH/1dFKgBr6q9EOta0IdCqzVOQtBTZ6rxIP7bGn4RyBxML1Gp3DbvU9HvSt0Wj4tavVKum8OiFQANBImtpOKgKp+tUNy5Fw8DPqi9G6KAOdAKUmcyng5v+YDx79TttQ32eDtRKRykr1rd+p30bfT5HYOQBV/ErpiTpSxJCaaOpkIDWOFJ5pmynMUZLWPuq4Yv/0+tiH2OdcHl8yR2aOzBxpLrtfypGxr5TMkZkjXwpHnivPtsKljcUn7ygQiipcjTkSUswtTwG8GnaMpmgfzvX7VEkJW40iVYcaWXySV6VTXypiwbggDQikVqv5MawKdJoyQb/UyWmHeiN4aYQqphGorHFqlYuCpJbUpCKmUmhbSkoRUCMoRXCgDo34poAmRg21n5VKxaNrCqjx2tgX1Tn9gYyIyMQXSOrLEFN2FCNges2pyUzK3uP3tM+xt/pdBEedzGn9kZi1bv0/ji1OJvX+2HaMPKlPpPpyqigh6mQnhU/RLrTPqodURBW705LSTewb90bdnZoAUe85/cexRxKLvq0l6g/9R3/K5Wklc6SZWk/mSPO6tC/cT32ZI4UjrVwyR2aOfGkcGVNftTzbCpd2OEXKUUkKoPyvYEpJPa3GOtW5o4JPAdLP9U/Hpn2ITpAywFN9YIxxDCljUYAmHQLgrdfrpY2mGmEDxFS2+pt7TkXAFDy0Ph1H7KPWic5UVipfrUtJg/tjpEwBKjpJbC/WEZ07OqpGfvhcdR3vTfUx1VeVeayLFBRyxvVYYa7T8cRIXGqJG7nqeKIc1UZVLtFmK5XyBt4oH60zrsZF0FVb1wlAlKlOnlIgqu2nrtGiwJ4aQywRiE/JWH1Cxx/1HH+firZR4oRO/04RU6qeSK6pSeCpsaucU0TGddhXLr+sZI6871NxrCtzZOZI/fwxHIkB6VgyR2aO1L7+uTkyVQ/l2U4p1A6fazAaRzSi+F3KaFIGHgHc7KGD0U4qUnaOrFL9PKXUlMBjH2Pd3Kt1E7FjEyk/kAuRvFSUS3OgI2hoW/q3gkgKwKOjRCdSI9Y6Y11KArFPjFnbTEVqVN46CYkOn5pIRBnHd3CoTLR97UcE6qhnJTON2MX66TsTBI2oxehQBLdIzqnr6VMqFWS73Vqj0bDVavVAr/FI3ChL2o/AQ4QH//q5iSD9iKAYfS0VFTulb/0s2q/26RxGpewzjj3l03FifIpoUxMYtYeU3WqdqX6k+hiviZPilJ+oDHRskUhzeVrJHMl3pyPGmSMzR/4cR1YqDx9mMkdmjnxJHHmOJ5/1gasojnnjsTNcFyNfDAajiYM5N4BUpCcCTvxcS+relPPGPkQDSPVfx6v/p/qW+l2pHF/IqCkS+vd+v/fTe6Ihp0iPelM55GYPX3oYZaT1xLHRXurN2xFwkKemgaico7HznaZZRAdO9TE1huio1BFtLxJraozatvZN89q1LtVHHKeSqUbPYtsq+wMBVUrvMzlnkwoo9FP9NpXuEstj+nbK56MOUv6lk5JIwDHyd67O+HdsL/riuXFHHErpUeuNfT/Vlzg+lZf6SSqIo/iamkClxhX/TsnslEx+zi5yOV8yR97fmzkyc+Sv4Ejev7UL12nbKvvMkZkjXxJH/uoHrqggyqn0k1M5xbGeuNyZGiT3R5Ki6OlBkShSS+lxHCkBnhN0ivxiZDjKKVWnbvolatdsNq3RaJRIhbFpnnEcp7YJIGs0RyOcmt9OJCY1lpjjG/Wpf3OMqt4byRpgUyJVsABsU7KP9qPH68axpyYvSmQqG+0PfUnl7Mc2YhRQI3nadwUP7uNHZaa/U+RK9Etlrn1OpQ1w7Wq18rZiikSciKSIQ/UU24sAGOukrmhfqXZPRd2iDZ0rp/w29jE15lhPSqeRqM+RnU4gYqQz1Wez40rEKQLQz1L4Eu1WJxqpvp2S52NIJZdyyRx5/NtHXMhnmSMf3Js5Ms2RxX1KYe1et5kjM0dSXgpHniv/LYdm0DE1OgXXmK4QN/zqMni8J2WUpz5LkYaWc2kyKcDSKFkEUD6LjkY78YleFag52tVq1ZrNppNFo9GwdrttjUbDP+M6DIwf6qU9Tqqhz6loJ5+rzHTMOh4F2jiGmBKgslNHiPKOIJ+S7SkAoo+RaJABdqXyRVaAsbahJBl1C7mrvFSmOnZNk6AtHqQgMCYKTARiFDMVdYtAtF6vH7w/JF5/iszobySk1CptlG0KaDVKec5PddKn/p0iDW1P/TjlQ/q/YgV9i+Oi3tRENmWjKstYb5zARZlpH6NstU09vYsxqH70nlP6TMmdopPIVEn55ymyzeXpJXOk+aEHhT3EzMyRmSN/jiOj/WtfMkdmjtR7/lwcea786geuVOOnOsv1+j0bJFNPrwpAKUXp4BSwaQehpwglJWwFFdqJjq1GEiORWrdeF50AcMOZ+b9Wq1mlUilF6jRapySiBKEv4wPY9/t96d0WOp748j76gRypNzoE9akhx6ibjpn7dSleAZvx6jtStI7YfmxP9RMBVUGee1L9jtEmAD/acZxExB/VrwIM+o7EgS5rtZptNhvbbDYlnajelBC4TyO256J7fJ8CCrWZCBQq8xj1SYEt+oyTyBQRpPxDJyrRx/ENHUsEzWgT50okD5WB3puSm5JbSvexnIrK6SRSyVQjeupXkXCj3WtfTk0OI1bFicYpmeoYTq3K5HK6ZI5MR4QzR2aOfCpHuklmjswc+UI58txD17OtcKVAiA6dEnjsfFRmyjD1HhVy7AdCi0AfASr1WcphUvcpmGg/UkqM92iEEkDl75TcogERBVLHihMjBVvq1vY1ehIdMBpcHCsEoNEYro/57wqeAIPqJ+qCvmh6go5DSZ++60lGXBfTOmKkL+pQnZzIGMSdIjb9zRvnmbzEPG6iMNFe6Sd9YyxaN3XSN6I8yEEjp0o+cbKlhKSy10gjsqKgRV8AACAASURBVIqTqdSEiT4qEXN/qv14X7xOQVrTeJSko71pSYFhBM8UaareaeccuaTuPxURVnmkxh3/1nq1zRSGniL1FAaqjFJ/Rzmm2s7l15XMkTIpsXJ7mSMzRz6GIy2BcfyfOTJzJHXFa1L3xLZURqm/oxxTbf9cebYHLrOyYNWx4/f6P0as4KbXqtHH5T6U/XPRuZQjqLGo4UYl6v2pp+JT3ym4qqFE49b2FHD1M/2ce2IEjz6QIqCGgJOz5K9koiSkkREFWa1f/1f5RuKLUTb9n/7TpsovAh3tcZ86CqdM6Sbk2FftJ58R3VTAj7pRGWu6hwJftEn6wwsxVR7USRQvkjvfVatVW61WbtdKHhFQNKKjvqc2F2UY5axyiakTKRtWoDezpGx0khIBPeX/aov6Q6Ffqt9T9gfxxsmPth0nELQRv1ebSGGX6jhlP6dIQGWktnQK3NVHdCyPBXnqUD+KfhEx6hTJPqXNXB6Wv3WO3PGZZY7MHHm0s8dyZHGflLqXh6PMkZkj/1I48tkeuLRDugwYFa9OEU+yicCi9Z57io3RomjEKSXr/ZFIuE7rinXTB5wvOqk6j7YNQMR7kJmOkeV0BUpy1qkbAFOQTNWpOdH6HVEv+mV2jERxn5JufKmbyi2CnIIRMqbvyENBVevTz7REHQHcCti88FIjRuic/qve0BkPOmoz1KERR+0n8lG97vf70lG3OiFYLBYPAFPtv1qtev+V+HViEU/cUsdXkkiBJeOP9oluU8fdRr/U/SR8p31R/W+3W7e51AQt5StqL2pD8ZpzRKHXRp9X/NF+6HhTABvb0TGfwprUNSndY2MUHaeSt+pXfytRnCLdqCfVlX4ffS/aWy6/rGSOLNtu5sjMkU/lSOUz6s4cmTnyJXHkufJsD1zRKGKJSlOhRqEDBPqyQrOHIKV1RvBKOZf2NfYrBVzaTxWwtqvEeWp5UZ9+NYe3KAprNBqlJ2ttKzoQYLBer0ttkKu82Ww8v1plQRSKOgFg2or7AxhfjDCwwfiUw9IeY61UjrnnyEGNmH4hl5Qu1En4nzGpfhWsFIggXNrnvuhwRVE8yK/mfo086jWQiNYRfQDdoyP0EyNyGllEfpCSTjqUZKg35r5H3TJp0DFtt1snXfpFekjKJ1UmGi1EPimSJUrJ53GSF9tR248gGIFPxxgBOgXsGvlKjUv7ovdHAkiRSuxHSp8Rk6hDj1aOEzWK2nOK/FQmp6J7UW76ufqD6kKL9jOXX1YyRx4nnpkjM0f+Eo7UsWaOzBxJeUkcea486x6uKGhKBCX9PBqqgo1+p+kXUUgRzPRJN16nJWWgsZ8p4wEoqVOXrVPkxzXarkbTiJYBrgrC/B+fqCuVimwkPfYRUFHgpi42GGs/tM+kswFiERhTERTVYwRGPtN0FOQS5YuxpjYHR12qPBUkYxRQCU37rdHK3W5nm82mFMXTDcyqwxgZU1tIEQGkjU1qtCiVMkHbZuWN8lo/0dvJZOKkGlMOlFSVyBWUlDQiUUTg0Cikjgc70c3JEUAZN/ae0r3+jmSmkTRtI/pyjAzq30xOTxGG2lVsI+IV10XMUJuMRBzHGb9TsjqFO7HECV2sW3Vwrk+p76JcFet+jlByOV0yRxZWVMo+mjkycyR9fyxHUkfmyMyRsU9a/lwcea48ywMXRkCHYqejA2qnThmFXqeCjoNLKUD7pMarkSXtTzQsBYSYR61OFY0nFSlUQqhWq9Zut0vOZ2bWarUcSGMdSia0ud1uHUT1ZBoATyN15KTztzqE9g9QVb0QVdOIVVy2VTCM5GF2PH0JcNQ+Uq8up3M9Y1KC0OjnarUqAZQ6hAJmjAyqLehYFLg1BSLaa4z4KcnxvcpUr8OeqtXDscakJ8RDMrbbwxvuYzv1et3fC9JqtWy1WrlcVUYxSkW7KT/U3/pzarxqOzp5Ud85Ra4poIwTEb6HzDWyrORN0cg0E6tIAlpv/P8cePN9KkKv8vk5oopYkro22vop8I661b91YqETPo3oYes6aYikltJzxI1cnlYyR95zZPHQjjJHZo58LEdStruH+7AyR2aOpC7Kn4Mjz5VneeBSBUdDjAqIRLHb7dwhzI7Ly+RNU48KJ9YVC4pXJauSlGSiMUewiak0EbQUzGL/AGIAfbfb2Wq1cuKo1+t+nC338mb0VqtljUaj9D2gEwGC76JsaEMJCdlEQ1IA1EgR7VWrVW9Dr1eDIyKmEUXq1Hp1qV6X/jlSV0E6RQqRyBQoo45oL44RWWpEilSUmIpBPaQ6qEOrfJAXMqbf9D1eW6vVPFqKDoui8MmFRmDq9brt93v/zsxKaTMKJEqGcYzaP/VDzU9X0FZi17z01G8lTLUJlVskEp0QUBT81BbUv9QO9boIihEoGWe0k5i+orakaQwRc7TfjJ9rFNhpR+1WdRQJKMo3kojKREmLz2J/1J5V/9pOHJPiiUaEc3l6yRxZth/8NXNk5sincOT+/nRL3sOWOTJzpOpL7e3PxZGph0DKszxwqdNFw6CzKQWoc0dhKpFwrYKn1p0yYOrWiJEKMSUY/UyfdOMY+E77pk/KkAH93W63tlqtzOzhQxifmZk1Gg3rdDrWbDZLBsLSOtfh9Jp+xnjpU6PRSEZGVDaqC8al7Sow6Y/2WQ1XCQu5aN5zdHjVF/Vpe6moDEXTQaLs1Qb0PvQfiVivZ+w6GUHG6/W65JzUr+Adoznq+JvNprQhV/tIUfmxygWZcC+yiu0TyYo6UbslNSdFzKmJBO3pA6HqV+UXJxJKrlEPagtRzmZHouS7er1eWgnUvishRpmrPGIUDZnESWoE/Ig5yIo2wRjtk06C+Ju6I+FR9O9ToB3JKZboW1qv6lk/j3gdJ2d6rWJiLo8vmSPDClmlyBxpmSOfypEFr86+bzNzZObIWP7cHKl9jOVZHrjUsU6BU+ppUwWj1+iTdErZDAqgLoryE7YamRpyVN6pp3X9jChTVIwqSI2Yz5fLZemaZrPp4Iwj8+LGer1u7XbbP99ut9br9ew3v/mNfffdd9Zut+3HH3+0f//3f7fFYlEiOGSgYKZEBcCYPdxDEJ1MI02RgKhXgYrf0cCxA42UpCYCati0QxqE2ouCZGoioGPnOyVerUvlxn38DYDxv/Zd88nVbuJnKfvQCYGCkxIi7TSbTf+b/kPK1Ae5YD+LxaKkNyJ9OgZIvdlsPojCqt1qSo1OAoimqw4AeE6uon6NDqtNqP1EffC/7hWgAMpx8qb2HCdHsW2dZFB/TFPR9qgrVU+sP9pUnLRq27FEoNfrtE5KjLohd+2bErrKT3VHWzEyeQ7ncvnlJXPkPY7w2W6fOTJzZKlvj+FIVriq9ymD9D9zZOZIykvmyGd74FJnj8DCAKPDKVGogxFh0Dp1wDFCoYZFG5rnrManT+6RSCipHGk+p02tlxLBoiiKEnmwNByjGoy5Wq3ab3/7W/uHf/gH+8Mf/mD7/d5ub29ttVrZdDq1oigclGJ6idatBkP9qo/o0NE44+QAoEKH0eBjRNCsHGFSwjMzBzt17EhiKZJPRf60jagX7CmOHbBDvxGAAN5Yp/ZBZc7fCjjYIP+nUltixAjZIFNIkToqlYqt12uP5HU6He+LApXqSu09BcpaaFvHovfqJEGjfDH1Zb/f+8liGtlS3ahe9HsFT+Svk1RkoZOAKMMI7IwpgrRGXeMkKeocm9C24qRV+6WpMtQTT5SLslHdqM3q2PRv1YHaoH6+2WySJKbRR/4/RRwR43J5eskceV9fpRyhzhyZOfIpHMl4asI1mSMzR+rY9O8/B0eeK8/ywBUjC/rkbnZMdYgPJHFyyQBZJjZ7CNwp41MQVINKkZBZOZKk4BQdPxJJXP7EgbSvqrho1ACmRpr4+5/+6Z/s7//+763f79uPP/5o//Iv/2I//PCDLRYLjwSqvFSuRXGIBOLARGAiaGj6hjq3Gnw0WnQDweu9Wn+MBjEu+qqOTV9iVDE6VrxG/1Y5KjjpeE9NDk+NFdtdrVYlmcT+UreStYKjjol+Rp9AlpvNxnUMeTD2Tqdj0+nUOp2OdTodu7m5cVtUG2y326UJmOb/0xfVJaCmUb5o6wqk3KfXq5zVVhhzBGUliEhUUW9cx+lO0cdSE03NBVe/0/aV5Ima6+Z89XvaUhtQn009hChB8XnEGu1LnHxFIipNVAOWads6ZnAiNZmIPqj60PqQEW2p/vID1y8rmSPv+3p/aIYV6Y3omSMzR2p7kSN326OsMkdmjnyJHHmuPNux8FrikzIlGqFGycweOr9+HzfxaUkNWBWsf6sxRLA3e/geE75PRQKJuKWAiToU/NkAyj0AVK1Ws6urK/vXf/1X+7d/+ze7ubmxRqNh7XbbWq2WG308zY46iPAAsnqiURyjgp0akkYmNF2FehW4ozHzE50X/aXu0z7xvYJYrAuZK3BrH2KEIqVbnYzo/xR9maVGCrUuPo9AoxMLnbholFUjJKvVylarleuVSI7aq5n5BIHorRJ4tXpIR9ntdn6d6leBV8kiZZ+aFx7ll4qWoQddmtdIo0bzVA6pyZBGmKhXI1unwD1GxtS3VVdqc2bHHPhT+JMiv0i4gL/qIfbp1ORXr4v/077+rf1XncUJt7bD2JmYoyd8W+0jtqP/Rz/N5XnK3zpHmowzc2TmSLNHcuR9SmFFuCVzZOZI7f9L5shnSymMf2tH4/8KIAoC6rx6T8ogSuB9XzQ6pU/R2gfa0TZV+WqoKkSNDmrkTu/jJ0acUCKGx/Uodj6f2z//8z+bmdlgMLBut2tmx/dnEP3RcQPS2p7KVMeu7aosAKKoS34igSuwqmNHMj1lfEo8tKsRIZUJ12jEyaxMempH9EntQyNmGkVkIhDHmwIuBVO1S340OkPbeuIQ7en/6/Xao4TL5dL3HGgkstFo2Hg8tk6n4/cowDQajQd2eMrG6Dt9UJDX9mazmY9Dc8lVzwpkCqbUlYroQlYK9FqfpmJEH1LggzQiwAGUUWdxskI/sX1NL1Abir6iE5nYR5VRtBHGq2Cv/sTf6lcpbNLr9EfxRyN1vNxVJ+JFcXxvDf7AGDQNLEZd1W/i5CuXx5fMkff3hQlT5sjMkU/hSOqvVg4Hk2WOzBz50jjyXHn2Y+EpCmwqoNQ1Kkw+N7OS4er30WhP9SGShD4I6XfRcFLGFIEVwWt0hrxg7o1P6MhDjQtgI92hKArfYKnGFx2bCJ2mRmi/tZ0of61bgfLUpED1RElFy2K0BLmoU+jn6syqG0okFtWZAqL2/1zUmO91NUnJF4KJS9yxnthW3EAdc99Vz+v1uvSzXC5ts9n40cYAY61W8827rVbLms1m6QSoRqNhy+XS+6MTiGq1WkqXiPbPb6JOajtMUrDt1CRBbUHBDmLQFAedjEFumpag0djod4wzRq11MsJ9qh+1pdSEVidDii/UEfvAGGKELdrZOQKOQKyEqv/HvurkIO53SPmD6tDsGI2lPsbB5Ez9SmUY8VgntLk8vWSOvPdvmdCluCdzZPnzzJFljqxV7/e9VTJHUmfmyJfFkXEcWp4tpTAKBUXpdxEUtaMUBSQFCR2wGrTWo4bCdZE01Kli3059pkpSYFIgVIcByNV5tf9q/KokIi0K9lEmGBJH2uomTmRmdgQ4NYAUEOh4iuJ4ek/UoZ7AoyCl5ZSeVObxM3UElY/KPMow2oROXKJTxsiV6hiZY4Oa5qM6iEQWN5urLNAnsicHfb1ee3rEcrm05XLpefA8bJHqstls7O7uzt8Ps91urd1uO3nQtqbeqN1jI5pnT1ESV9mSz4z+GX8qhYVxa7tRtuq7Uc/6fpcUycVr9LhbbUNtRfUb7T5OWGO6TfR3jWzpxCGOOU4yo99GTFH/UyxRuwboqTtGraMuUhindqykrragMjlFehFT1fdzeXrJHFmxbagjc2TmyKdwJDLabraZIzNHvkiOPFd+9QNXBAqK5peq8+tA1CHjtQo4qchZfDJPCSQVDYh/IygFykhQ8bdZ+cVn9FPrAeB13GqgGkFBXkoa+ps+1et1j9ZplEUNWfuCU6kjaDRJ5brfH5bIiSAyPo1EIS9KypmiIyIjfqucot70vjgOrtX3XdCvFBjQZtS/gor+HfWhfaPvyBxA1igKK5u73c4jJsiYH81Fh2CKovDjbeNDupIChLNcLkuROewhRml0osP/Cra6qoaekS8vmtRTtJAlf2uqDbauNhhlHMen6SE6GYq+q6CpNl2pHN+5o/qLE5oIhhG4I8hGX4o2pEV9XIv6QapoX/lbZak2yXjjmLg2ksKpyaLKGP9Q4tK+Rb1FHeTytJI58shRu+CnmSMzRz6FI2mrWq2WXiOQOTJz5EvhyFNjMnuGB644kJQTpwbM51Ho55YkIxlEwDolZP0+FZ3BERDyKVJTBSsYRkNQQMLZtR7aTCkLEqY/Clw4kDqv9k3HkgJO2mIDJsSieiOKo23r/qJISgqAUX6qN9WXjl0ncCnyiBORaFPaf72P8Wr9sR4FW7WpqHeVMzJR8iQVQtMwNCK72Wxc5kTvIJhK5RiB5XqNqu12x82+/X7f/6d//M0m10ql4n3RsWu6q06ezMyazaan6GjRsauM1ZdUhnod7SmAIRtyp7H3qLNYl04+daKmNpJaeUEWOtml35EoIVKVW7S/OAFRMsGvIkbFFQz9W6PWSq6RUBX8T/lWJI5IcHElJI5P740yjP6Ry9NK5kjZ9yNtIYfMkZkjH8uR2JTqO3Nk5siXxJHnyrOkFKae2mOJAzAr582eW4JXx1KQiMKN9/IZQEe+ry69qgOlhE2b6lQAbbvdLgGJ9kfJJzpBJF6KRkS0H5GYuU+NXr/TfqqxEE0iz5loUlEUnv/MZtaoW50kYPjaLx17lOWpfuqPAmgkB+7Vcev7GtQZVC76uYKS2TE3Wx1eIxmAVZwM6LiIwEW9qby175VKxcmFPpEugQwrlYo1m01/UWO9Xrfr62u/ttls2ng8tv1+b61WyyqV43tHkCOnOwGQjAfQ4loAsNfrlZbmiSbSPkSousJeVe4awWu32zabzVwm+l4Z/KBaLb9PSP0ggrrqj8+Rr6aVaERO9Ug98Tt8WSc3qUml2rnWpRM/tVk9oUnbRx5qp/q3TlSUDCOgp/CUkpp8IVfuUXtGH0r42k+t8+cIJZd0yRy5cb/mPngxc2TmyMdyJOPlZMrMkZkj/5I48tkOzdCS6qwOUKMeMYoVAVYHBgCcAogYRcB4MeTtdmutVssWi0XJodVwzB5GosyOx9Xy2+yQH9xsNq1ardp8Pvf+0abeH0mK3wCYylCXrqNc6TPXKVGpLPSEIYx6u916jjSAAzgoEFBHyhmiY+vYoqPqZmUlUv0sEkkknpT9UH9Mo4myZuzaFg7D5ypPJQTNW1ebVSLBhvlc71Hn16hcvV63brfr6QoKtOgInS6XSz/6mE22amdqY6oztRuivfzoZIH+qF7RG/qFZPBVff8P7wAB9KhvOp36hmZsGZ+KLxnU/ujLKFVvatMKcGrzmrZSFIeoJvYRbVX9UnWfirSp3Wk0W9tCX9gx9aJLtTXGEP1MJ51q32o/MSKLf6rPxIkVnzMGfbkudcb74uT+VL25PL5kjjxgl070daWJzzJHZo48x5G1+kHfjUbDms1m5sjMkS+OI8/x5H/LoRmp/7VjCIBO8pCiS48ogbqiwCLInfoNcKgxc/QjCsQxiSaYWenzWIe+wwNAbjQa7oQ4cLvdtqIobD6fm9kxx1lloBE2BS6VHzJUsMLAopKjA6l8id5plEPbw0EYv5I+8tR+x/FEAuDFeZEYkQ+OHaNtWhd9U9lE8o3OH6MdEfBj1CkVeavVaqW9VXpiDVE7BQm1b50gqUzr9bo1Gg2bz+dWFIW9f//e5vO5LZdL63a71u/3bTQaWVEcoqn9ft9ubm5sv997zvhut7NOp+MrYEVxOLGr0Wi43jTSpnZbFEUpj51ooMpZN5KrjdXrdWu321atVv0lzZXK4WheZLZarazT6bj+Wq3WA9DCPiMxqP+rjejnKlPsU+1d/TliCSCqKRyqe/0sRtV0Ykb7ShqRcOLkWscV7UN9VydO1KM4xTVqayn7i31V+464eqoP/GgaRy6/rmSObNhGeCBzZOZI2n8sR/a6PTMz6/f7NhgOM0dmjvyL4shnOzSDQasjmz08UQml6/dxw5uCVSQR/czMStGFaHBmR6PUZV82vqaAC0Ur2AK+3FOpHHKK2aip5/mrEdO+Rv60vUi89E2NKipXDSxG0VROGIyCn+ZM01eiNSoD2k1FP6if/zVKSV28/0LzkBWgUuNGrkpY2lfapWgUAluJ+eEaRUNufBaji+ieyFmn07FKpWKr1crm87ktFgtbLBaehkDaQ7V62LyrY1U5QkwAGuMkurTf763X69nFxYW/5wNyGA6HHiFut9ulNEMdDxMhonlMXjRaqpFv5IOesFMIiT7GSDnAirz1VEVSPiCdeF8k3liXgjMkx7sy0BmTE+0juuVzvoMYqU/TQVRHXB9JAP1phFTHwHfq5xqFxG7pF3aj6aOMCTkr6VMUD3QDtE7mdEIaCZgftcdIhLEopqusNKqXy+NL5sgjR07v8aZer7svZI7MHPlYjqzV7lcy7m0tc2TmyJfGkazup8qzHJqB4PhfSyrywzXxiVZBSgVGG6oIfitZaf0YeCQhjJj7lMj0aX+/31uj0SjVYVY+jrXZbLpB6XIx0SIFa3LC43i5Vg0jZfRqyDgHAKERGl1aNTu+rZ17WN7WaEUcPwbJ3wAhzhBBkvuRB58rqNN//o4EriQGyGjkgWvVPojEofcYZYFI9TrAv9Vq+Xtd6DM/yGK73TqJTCYTu729fQCK3K+kqvLU1JrVauV28vbtW/vpp5/M7AAYi8XCRqORrVYr1+3333/vE5eiKKzb7Vqr1fL3kiBjxq4TJ9IrmMhwXbPZdDuh74xT7UllTTvL5dL1QmRTo0tc0+l0vD/IX+1GI+Cbzcb7p7aEPKPfRBJivLqZF79TEEVnqt9q9bDHZD6f+//YeQRcJacY4Vfw1zZpTyeiiovaN7V1xSydaCk2aR3q7+hH/4+TNO2zYptONGhLx6U4n8vjS+bII0duZRUGP80cmTlS9XCOI5fLw7Wbe1vJHJk58qVxZOoBjfJsh2YoWJiVBa1PlvoUqYDDwBRQoyCjkjQaQX0IQAdNX/R6M3vwslmMkX7wGYbAGKiTF87p+FNPxEQd1Cl0PEoecbWM/mjUAECGGFQPGqGbz+d+LZ+fijKpsdEOuiA6osCtBsiDppnZcrm08XjsdkDECdkALEqggB5kBOCblXNwFYxoW51b9agyYpz1et1arZa1223rdDqeBqoRLu4njx/bBHAqlYqNx2OrVqueDoCdYif0j5OWIBG1oY8fP5qZWbfbtd1u9yClRidReoQuwPTq1Stbr9c2nU6tWq3aaDSyzWbj0Ue1CQXBojik8WBz1WrVN4KbHUCJQzu4F3lCRApi6/XaSSmCtvqXRsV1fBph04leihywYY2SUacGOLA1bKQoCut0OqVTpiJ5qf6RGzhA5A9b4G/kpBO57XZry+XS2u22bTYbWywWJb/ChxTf8C0iumpP2LaSjxKuTvgi7ukeHCUtxRHVWZywakFHSjC5PL5kjmT8D9Mo6WvmyMyRP8eRu/099teP9pI5MnMkun8JHKkPl7E8ywNXHBidjWkM+p2SiD6Rch0ABhCqM5hZMhpmVn7zOu2jTAVxQIMnfzUG+qBP8hpVQ+gYDdEH2k+BvxoqfTazEpEocagyGYMSoT7B63f7/SGSqJEWlvb1iR1g0LaRK+0rKEXC0ze6s6SsZbFY2HQ6LYEwfaF92gV0IJV2u+2nW6FHxogesJ1UREHzx7GHdrvtm2vb7ba1Wq1SxJi6N5uN//D53d2db3KlrtVq5XnaEI+CPvbKd2p7vNBRD8tQoOVvUiQ2m41Hy5B7rVazq6sra7Va9v333z+wAQCKKC/+sVgsPC2CyBr6ZVKAjLlPT4hi2V8JQAENW0O//ADk9FPtR4FKgxb6vcqKvuuEBEJReeqkUXEHkIYkICAlL65XAEZui8WihBm0T906Fl6+ij2aHVMPsBUIRHP2sQFtXwk1Tm6VHNQGtXBNCoeVVGhfr9M6cnl6yRx54MjCyuk3jDVzZObIx3Ak46gUx5P+MkdmjnxJHHmuPMseLp6EGYQSggIonY3RHwVwMystreqTvAovLhtyvxKILhUTRcDRNLpDfjFghzPxgsNouMvl0iMeCsT8z1I19+oStBoHfVZjVrmhfHU2ZKDRO3VYfjM+ltbp3ynni/neyELlrASJE+IsGvlGdrVazS4vLz0SZHY8zlQfTHe7nYPrer22+XzukQzq5ndMPcF+AG8iZnxH/9nMSt45JAAgqI1CRvV63ZbLpZ96hHyIfh1SHJalSRHjjNE6CCraCTLGxtExUT/NY0fPjUbDbm9vbT6f2+9//3tbrVb26dMnlx92hmzpF5E/+o7tEfVjzwUnP2nUB/us1WqlcREtxMb5DFlyH0AY0yaQDTpQIsCHGo2G2xCfq8+rXSoBKfHirzohW6/X1ul0fO+KRuzQoYKyTgz5X6OWy+XSiS/6MBMz+hSxEf3rCgJjRX7IVNMZdDzUF+vWySxkrzgUZae4oxjLdbk8rWSOLKcfMabMkZkjn8qRKt/MkZkjXyJHnivPssKlkTM6hMPEa3SAKFENDEdigCpEBqrLpHxnVt7IpsvBMSqnyuHhqNvt+hIoQEvBsVxo93XzdA5IxOuVmKJh0jd96o4yik/iEZxwDo24cZyt9ol+ahQslWoRjVJlhU7ol/ZDJw/UoXn7/K0bhReLhdex3W5tNpvZcrl02S6XS5vNZh7dQH9m5vcqMSrQAhCQWqvVOpxw1Os5qWgaQnQYiABgxZ4qlYoNBgMbj8cOGrPZrGTrpDLs93tPYVitVrZYLNyRVQiCZAAAIABJREFUlWgWi4W12+2kbDWiggwA/KIobLFY2Pfff2/fffedrVYrm06nLgN0gV1Qd6fTcfkjI3SHTyrY6mSFOpmwkS6ifqd9jZMjdIu9xUkSn+skQ9Mt1DY0oogN6WRT29bC5BS/J7KqMsM38Vvu08mX2fGdNPo+F61DUz6YpMT7mfDSpral/+Pv+GkkInya8fIZeqe+uKKhZKSkyXc6GdFrc3layRy58fb5XtODMkdmjnwUR+6P8s0cmTnyL40jn+WBi8YZQFzCRql0CEXriS86IFUGAyLyENvVJ2jAixJJgT5G4+Y+XkSn4FQUhTtwpXLYUDmfz5109vu956Kqg2jb+kSu0QZkgzOoE0W5ae6zroQogUynU3c0Uhi4l6gEulKDpS410lOREU8NKQrr9Q5HtKrMAWiIlLoqlYqfJoTOkCd1LZdLq1YPuenT6dQuLy9Ly9n0XTe46phwTiVpxqfL2jg1QBLTGdA1aQ1qmzr+SqVil5eXNh6P/R0z2NLbt2/t06dPtt/v/QQniASARAbITNNxACLIiAgzIN1qtWw6ndpkMrGffvrJLi4ubDweu+6Rv0bVFDy73W4JkJgYNJtNnwjRvm6aXq1WLsf5fF5KY4i+hg4UlDUaHaP2EBUTK9WL1qHXcz/fk3qjEWBNHVFf6/V6btdE5Sn4MwSJ/4IFSvgxlQM/QacaZaMv2KuSBH6j0UHqpH3FJiVvJTMlZ3SlkUHFaXSSIhXVDXXqpDKXx5fMkQeOXIttZ47MHPlUjmQP194sc2TmyBfJkefK2Qeuoij+TzP7//b7/c3ZWmTg+nSJc9JBOqwTS1U8AKxgR12AAm2pIWF0GskhMqhAgkBRMv3FAIvicLIMb5eHvMiV3u/3TiLb7dajLoADCqcdBTOu0ydqM3NjikrlPmShMqY+TvXhKNZarWa9Xs+63W5JTjg1YEH0BkfHSPlbl++ZGJB/zTJ7q9Xygx50UgAIaBQDfWhdAKk6Y71et36/746hdtTpdNzWVquVR46InCA/jQqrnjXComQcSRQnwy6J3EEC+tlud0zzAPQhE8A2FX3GBgF+BX1kgT50AkFfIaB2u23L5dK+fv1qq9XKut1uyeGxKyUmBbft9rAJFX3xuUa00J/6FnrWKDZjIpceclL/UBDTyaX6HbJW7FD/Xi6Xrkd8n/6Ylfen6KQFuTJZISKmUTuO2dXIJzJRsEZnihXYB/iGvJjI0S/6gK1Wq+WTTcGi7XbrtsV48CO1UZ240mftu05S4wSffigu4QN6vf7WaGMumSOfypHaP3wtc2TmSOz/5ziSFa7CjoHJzJGZI18SR+pDeixnH7j2+/3/c+57igI/g4hP8xrBUlDEoPXpkM9TTq5P6ziuOgcvmQNcMRZygXGAoij8TeXcz5K22THlARBF6WaHl+5xLU6Fo3MtY9YX4Jkdl2EdQKx8hC9tx6iGgjNOCrDhjNVq1SNqOCv3M2YMEwDQo1HRERE1NTB9+leZahux39gGfSZaUq1Wrd/vm5m5M1arVX+fh26qZILHAy0n/rApk2jEdDp1OdGukhHyJv1BoyEadUN3erpRp9Oxu7s7j9wAbrVazZbLpednA5jb7da+fv1q7Xa79CCFDtVuzMyjhcib04Lm87nbMZFlbAn7I0K1Wq2s3+/7BEOjxfhSvV53G+A7xozM414C+oj8kUG1WvV3iUAW6/XaJpOJVatVt0etB5vTiSZYoSAOkQLGCoike0Ai9DVGvJlAMsnTVCGNxrNBmsnKbDYryQ0fpS4lSPX7aK9K2mZW2hegpI186/W6p87M5/PSJC+FBXwe/VsJX/0Q+cUVKpUf9cTVBcbI/7kcS+bIp3FkrXo8XZDJTubIzJGP5cj7M1es3WlbvV7PHJk58sVx5LnyLCmFNKq/1ZF5SuR7BX8GqEpqNBq+/KxLiSokvT8qXyM0mrNZrx/eYk6Elr6xkU8NAHCDRIricGwmQMB7FHT5EcLCQQEpfUrXvkfA16d0vVcn44Aiho08W62Wj5PoVrfbtclkYvv93oFN0xlUX/wdCUiJrFI55Gcvl0vPEdf8XRyg2Wy6/NR40Qmy4Rrkj4PSj1Tkq9FoOMkzeQB4eBu9bvDUaCLL4fSZSAl6Rs7b7dZPj2q3D8AOiNFH6uC9H3d3d3Z3d+fHDAM+9AVZMQZNaQHIdSM3/dOIEe9FAXQ0GoVNQFpqR5AofhJ9FN0gL/wVnbOZlYkRezI0aka92OF2e3wnED6jUapT4KTj1/0Mmkah4MYmaLPyBBBg170ATHohRbCFqDGTGWwB/CBqpmlSq9XKZrOZXVxc2HQ6dRCHCNvttmMSMqAPOtFWDNztdn7SFwXbwN6QAdFwMJPvlFiYKMZJa5yg0yeKkrSSiZJMLk8vmSP3ZoI3ZuW0qsyRmSN/jiOrlXus2x3xP3Nk5si/FI58tj1cNKYDwjARHoJAyBgGhqiRLb1PFcB96vwUBq4n9BABiCCp/cY5eOrmWpaLMWpyv7XP1LPZHE9aYqxEtzAOHD5GUABZlA0Amh0jo/pUbmZ+Og+O3u12PQeatonkQCSMVyM6kawxTI2C4VCdTsdlj0wrlUopWhSdlHErUBGhM7PSyTiVSsV6vZ4VReG6QJb0mZxxXeYH8HC4fr9vt7e3DkI6dvqq4ITTUcd8PrfZbOb6RKaqN8h8MBhYvV73lBqie+ie+9AhtqAkiB0hH9JJut2ujUYjvxcZagQH++VUMI2Kkk4BoGALtEU9Zlaya/TfbDZdJnGfmY4JO2GSpbJTm9VoluoFsgRUkUulUinZM3s00JvigmJIJBtsq1ar2Xw+9xOm8F2N0ilGIUf8RAv6pn0mqkQh8QFSlDgJDXIAx7TouGlTo/yKXcheJy460VYdxcm34gD1xiidtqnf64pMLo8vmSPvOVImReBK5sjMkY/lyL3tSzLJHJk58qVx5KkHZbNnOhZenUKNCwOL0QTu0WgcoImQNWKjUS4AiSVT+kCdCJon+c1mY9Pp1J0XJ9f+0B4gpW1rBATgISoAIO12u5LRYZxKnPrkHiN2CrY4Id/r30rKOAfLqwAqkR2NDDGBTxE+MqVPOIPmwQLA9LXb7XrELeZkq4HrGJEFUTUeUGmT+mezmfV6Pc8hRr+QENEhxgbx9Pt9q9frpXeXIE+NXqgetN9m5iQwmUy8Tsit0+nY5eWlffr0yW1LI8+z2cyX9pfLpZMe+tcoZ1EUvnyuG47VjvGboig8UkcEjwiPRkYHg4HbWa/XcxLCZpCLvrARnyMiRRQYe9SNxfid2TESpcSiAIis0S26pO4vX754FFZ9hz4Bumo72AY5+PQLEI82qylY2icmX/pga2ae9jIYDGy3O6aNaA4646FvOtnBJtAbtkDf6AM2rRNLrk/5uhKmjg+f0MkoOlAC0Uk10VMlmUi+SjA6aUYX+YHr6SVz5JEj1X70oTFzZObIR3Gk6CNzZObIl8iR58qvfuDSzsaIWuwQnymYIwgzezDR1KdbnEkFzI8OUu9TINE8bNrHUGgbgGY5kvxvCEiVQL1EQ8ih1qdvjQwoEWhKgBolT+CMV8eohESEjAgEoE6/FbD2+72Nx2OPZOG8tB+JjUgJUZTlcukArhttAVPq02gl7Uc7AVjQA/aCDjDs7XZrvV7PnYt0AH6QmYLB3d2d9Xo9u7i4sNFo5IRP1ET7RJvciwOuViu7ubmx/X5vr169chtqt9s2HA49d5l7ttutjwf7J51Bl6l1gzSkD7BqlAngBDQmk0np3RT0nTz/zWbjG7uxMQhR5Q+IIj/Vo5LEer32Sdd+fzxtq1arlTbLAi6QEIQ3Go2csBhzs9n0l2Ay3maz6cTIOObzeQm4iDqS0qLRvG636/6m9sVkRaN52Dc22uv1rNFolMg/RqyQna4u4G+kfXKvpg8jXyUyxQoFe8WwSCSMVwkEfMC/lDQiISgGM2GBzOLDPX/TXpxoqQ2pbHJ5fMkceeRItTnkkDnyqL/Mkec5snbfn1arZfV6PXNk5sgXx5HnyrOkFDIoLfyv32k0jKdnPsOoI4jyv9apT7AapUERGlUiwqUbePXpmCdu6udJG+PRyI9GKTW/WKNVOLTZMQ9aFafgrUTM//zodTgIy9eAFk5PvrPZMSoBMe52u9I7N8jBVblqX5ARfW+3256LD6DgMLqxEtLVSKDWjX4BUQCX6AvtEpXSiN9ut/P8aupE7gBVv9+32WzmUb3ZbOY6AHiwMyYO6A0i4USpy8tL34tgdgD6i4sLm0wmnpai8gP4yGdfr9cO8vSXomAAkCB3IsKAPcfSxojtcDi0z58/23q9LgE3Y0Q3RVH4ARwa0a7Vav4+EiYNyIRJG2TPJIl6K5WKR7oAoFar5VFFjfSgC8pyubROp+Mv1iSiCNFUKocNutgxEwLsic/UzxVvdBKl8laCIfrOKVFqpxqpJHKJPGhTU58UF9Q2FQ8iCNMOsiFNB9vFHrE7ZMnnTAaq1aqfhgaJKlGAj0o2fKd94fsYmWO88b6Y4pHL40rmyF3Jzor7PmWOzBxJfynnOJL26F/myMyRL40jI85rebY9XGbHU0oAEAAORWv0qt1u23g89vvMjptdNQ2BAanz6xMw96kxxeVEs2MESDfWxadpjSggaL2OsSjIxWVtjaRo9EAjSLQXyRMFasoE42s0Di8nJPLBEjQgjhzpJ+OChKjf7HjiD+PQKAP9Q2ZXV1ceGTQ7rsTgQGzIRLe0qRMC5ASZsNn24uLCZcMyNH1arValt7NjN4yt3+/bfD63yWRivV7PKpWKjUYjz8EmZUCjMxqZAGTMDidZcerN69evfYyME6K7vr629XptX758KUVSyE9fLBY2mUzs6urKJpPJA3tVAtHoFwTNBIUJhEattB8aScImlbCwRc1ZbrfbDvadTscnHESIOG2KfhKVxp65jjETAUWvRMO5logf9sbEw8x8vwRRJcAV3+MaiAs7n81m1mw2S+PmAVWJWH2Y/iiZbjYbrx870OjoKSBl8oJNKS5pdFgnh1yLjmKUDn1DnBGXdCKqEz5wSCOO2g7915SMFEZofdyrtkRdKodcnl4yR+5KE73CHr7HKnNk5sif40jX936XOTJz5F8cRz7bKYVaIuArCABaRBc0yqSRPhRNfXq/ArBGQxgw9/E50SbqIDJhdnQWnoABCRWwkobm8lI3ILnfH17gl8q3VSCLJAN5qvGrYbK83m63fSnd7JheooajIMgSMdEYokrIVI1D5Ug9w+HQWq2WLZdLu7i48AiDkjfOrUel6vdm5s6ufev1etZqtTzSCrjEqCh9Yol8sVh4xIqNv4PBwOVxcXHh+wyQK9+pnWy3W0+lmM1m1m637fr6upTfDumu12v76aeffJNtq9Uq2QppPJwmZWYO9owZ3TMpwLY0GsQkYbfbPZggQKx3d3d2cXHh0Uo2txJZhayI8lIPkTeIAhm1Wi3fSK7EzY9GDnXTMCCj0W/6ZGa+R4KNynzGBKRer/sLVFU/mkrBhmY2tqMvTgHTF2kS0VKZIldIgGN4OToY38MPdHKHjzHhAJ+Q2XK5LE2gIDXkUq1W/RrGxnHLGlWDbJSQwBT8MmIdOIdNENlW3GPs4JpiIzhDUXJRTIgklsvzlb9VjpzJODNHZo58Kkdik9VKtWT7mSMzR/4lcOSzP3ABAnRGDdLseMrIZDJxgOEJ3uyoUAUmQBPlqFBi1EJza6mfevlec0rNykKczWZWq9U8RcDM/Hp9+iaHmydws2OOOtEZlQPj5h6MhfrpB2M2O4Bwv9+3brfrjgsoABzITqOUlUrFwQwy3O127oCQi0a1aFfrhVT3+73nqJM2gTMAMtSpdUG6elqTygwA0EiaGjbXQTIXFxelDZw8oBDxxSYqlYpviG02mzafz22z2ZTyoYuicB1eXl7au3fv/DuipPv98QWH2B7L7bqcrtGU9Xpt4/HY3rx5Y9PptJSqAPk0Gg0HSnRmZp7moO+5wO41PWW73Tro4lPdbtftHVvQSCWEjA4AZeyn1+vZbDbzNCOO7o1AxkSQ8RRF4XJtt9s2mUysUqnYxcWFjcdj191+f3gxp9qjTqIYCz5yc3PjsqYPRHshGz26GpAnKsd3tFGv120wGNh0OvWJ7GKx8HeM6EoDY2R/A+1jE+iKI4BZtWCcyILJlxadLGMvRL91sso18T01TG4UN7ET7tPoP/0DX5XMUv3SiYROHHJ5vvK3zJEeJa8e32uk/cocmTnyHEdyLPxmu8kcmTnyL44jn+WUQjVeDE8jXxgdDyPkDJsdNj+aWWmpEOPHmHFoBoiBAfK0reQAkPGZ5k5rNBBBA1yVSsU3J3Y6HT/tRqM+ZubLuFzPeOiLRlwUFABRnIZ0AwwS0iHaMBgMShuXATwzK4E9Y9NxEoXY7/c2m81K6Skqa5XRdru1fr/vdXBE6GQysX6/7xuEcTIAltx5QIJoxG6383djYOQaVYJsIAROjELH3G9m9vr1a5vNZnZ7e+vkprbX6/U87QEgV9Lr9XpWrVbt69ev1mg07NWrV55bPRwO7e7uzuUCyRDd0+gypzKpreHsADG52OrYEAl2S0S1Uqn4yUJKohr1rVar/o4LooJv3761yWRS2gBMpFojNvRzuVxav9/3jb/D4dA+fPjgBMqLMrfbrf8NgWl0HGDFDprNpk9ems2mffv2zV+yOZ/Prd/ve+QO39PUHPYo0A8mPuv12gaDQemUsX6/7/pBn6RdqAywLU0NggCq1UNut75PhMicpkLoRBFf1Ykcfowv64QIUlKgR+eKTfzgL7pfQSPp8XrFgkgQMTKIDdEO5KrYQX1q0/qZRqBzeVrJHHnkSH/AupdN5sjMkU/hyHrjfh9eJXNk5siXyZHnyrOscCn4awSF7zBQiMDsmHNtZv7iQRU0BgG4IDAVqBonURSN1ul1umzJNTEygWDZUHp5eVk6AhRlzGYzByGWc5XUaI+IIXu7iDJAhIyXtAEAleiEAiH9g7Q6nY6P2eyYdhHBa7/fe9409at8KJBCu932aCHtt1ot+/r1q7cF2BPBATj7/b5Hb7gfvRExQZ8aLQGguU/lSZ9wHMa22Ww8YsWG50qlYpPJxKbTqTUaDV++3263NhgMbDgc+nJ5s9m0d+/eWVEUTjq8HwPnR3fkL/d6PZfzxcVFybZ3u50fNzwYDDytYbPZ2Js3b+zr16/+rgmigUQH9/u9XV5e2nw+9+gS6R462TAzB83FYmFXV1eenqAbs3VjNL7Efeiz2+1ap9PxyNt6vfbrdZJGdBZdQXLohd/tdtt++uknj/xyelSv13Nb0Qkk5KikSZR3Op16nWaHaF2327Xb21vbbDY2HA5LAQ6irdVq1cbjcSnSxf2dTsfTRDabjZ9Khkzpg5ImslJSUPzAFukLtgHJMQFWAgWD9JQz2tNoqZIKkz+iuIppyE1XSZRMlDRoB3tVvFS8AB90jJT8wPXLSubIVWnVClzJHJk58ikciT/U6rXMkZkj/Z6XxJHnyrM8cNFxBBGX2vRpUzva6XScSBgIub8aXQI49akSgeDQZlZSuEb5lJy4DqWqAjQysVgsHCRZKqe/RVFYt9u18XjsS9eqYIyEfumTL/1hPKQAEN1T2SnQYfCMGYdW0Fcjp47pdOr5uEQjNDLG9ZvNYUMu71igrzjlYrFwB1QSrdfr1uv1bDgcWqVyyMEdDAYeKcEZNUJK1EofLDabjUeMut2u6wonoqBH7OLz58++AXi1WtlwOLR+v2/b7dajnkRDO52O1Wo1u76+9n71ej2Psulxr6RiQPhMRsbjsU0mExsMBnZxceGn50ynU2s2m/ab3/zGBoOBvw+F9IbLy0ur1+s2Ho9LKwXdbtcjhESssE2imZ1O5xDduweGfr9vo9HI9Y98m82mE6xObiBKokf7/d6ur6/t7u7Obm9v7e3btw5U2JjmbhOZ4zNNN8CWibhtNptSHrq+jwa9MTaNSo9GIzM7HiFNwfY0lQAS0pQg9GVmpXQY6mw0Gjafz30fhUZ7F4uFdTodB3P8ER9iwgDh6qRwOp26jPEl7kf/ELLqHR8Ax/ABsAOSZz8JE0fFBp1gayROcVbHgDzBGPxJ69SoXcQi/s7l6SVzZPNBP8B+s8yRmSMfx5FgemHH/V2ZIzNH4gcvgSPPlWdLKeRHB6HRHR0IT42kTTBQM/MlSIyeKJ8CH1Esoiss6ZI/qtEi/iZKQMHgEJw+xRKtoK8YDsY2HA7t5ubGVquVDQYDB3Sz49vBFUC1HwBErVbzjbb6FM+mQUCcfmkOq/Zb+7xer221WnnkjxUTTY/A+HgwxACJAiC3SqXiGzGJXNVqNU8N0fSSXq/nS9p8plFJojXoQN/0XqlUnIxZymZCYWYuK0CNyM7l5aXbGMfVbrdbm8/n9t1337nsybmGGMzMXr165RG0i4sL2+12JdLWaKxGg/Q3gAi4ElECcH/729+6PCAQfc+HHqyCrWPPRHuZYEDYi8XCvn796lHO+Xxul5eXNplMfFyMWfcMcPISBD4ajazVatlkMrHZbGadTsd6vZ798MMP3oe7uzv3ZfUhfBDgZnP2aDTyfmnEfTab+SZtjUgr2QHQtNnv9223O+xhAVir1arXM5lM/H7y6km1MDtE05iAkqrDiVzL5dInELvdzq6vr0uTLXyPseMH+Jvmq6M77AFZ4cM6eQJvNMqmoK6TUY3w4Uv0h2v5XDEXXCXSrZFDMyutQihJMDbGpNgHxtOW4mQujyuZI4Uji/KBH5kjM0c+hSP3O3knV62WOTJz5IvjyHNByWd54FJh0KhG0TRKBkDx9M3SNJEJcm0VUKiP6L+ZuQJxSlUQm/N48tcnV340Egbwo2zAnad1TqsxOyzNslmRDZaMWxXI3xqtA2Ax8nq97qffkKJAfYAw40IOajBsgiR/WI8gnc1mdnNz4xGd8XjsgIA+er2ebbdbj04SOaFtQJyTnOjXdDq1f/zHf7TZbGbfvn1zkiDnGb2ytEs9lUrFN5qScsBkgo2P/X7fSQPCQ57IgHFDHKQMAAK8CwQAph5+SAd49epV6TvA3uy4hF6tVh3U9LhVolV6vZmVIn77/d4jvJBor9ezyWRi9frxBCI29wKKCmK8fHA6nXqUj3bMDkB8eXnpBEjEikgQk6Fq9ZCTfXt7a71ez/74xz/acDi09+/fO+BjN9vt1qOFTGLoE7ntKb2S30206vb21uvA7ni/TVwV5r0j+OtqtfL0lNlsZm/evPFJAeAK4bbbbfcxcuXREUTZ6/Xs9vbW5dDpdGw0GtnHjx/t6urK04UgTjNz/wfUmdSiE7CCCYYGXbBVfFdJo1qtOqZgP7rhnnaUfChglkbW8X10oRN57FDJTiN8ZuUNxJEsqFNxPpenlcyRx3GbcJlGhzNHZo58DEfW6vd2Wq04T2SOzBz5kjjyXHmWBy6cPTYMKEPS+qRLpARHwTCoB+fmAYbBAp71+vHt0gAMhowgWTqOGyt5qo5LgxiI5ssS4WKJnHs4hQglojja5n4X9L0c9IQb8liXy6XNZjN/O7r2hxPrAHd1bsagUZ7lcml/+tOf7Orqyi4vL83sGK2az+fuXMisUqnYly9fSiczERl6+/atk0y/33cHJ2//7u7ONwJjqM1ms/R+i1rtkKsMAP3d3/2dbbflDafYC/JCv4vFohRVYxJQqVQ8EnN1deV2hK6ZmJATjl2Ro4/M+BuZEpkholqpVOzq6sqjigAjzqv2SanX6x5FAoh2u0PuOkvr1WrVo160v91uXW5EEyGa1WplP/30kwMCJLfdbm00Gtnbt2/t06dP3k+NkCMz/t9ut/b69evS5KAoChuPx7ZcLu3y8tLu7u6s0Tic9sUYaZNIOZOvarXqef1s/oWo+RziJ/JLnri+BBLZ4Z/9ft9TUYhO6uQUnwLYsSXkwjtsAN3JZOIRRvwEQl8sFo4r6Lfb7brtICNkRl4+KRpgFXsbFNDNjies4SPYDBMAnXhyPTavugPz0KMSFbrU1QsmSnp9jNDRXw0Q6XWQPT6jJJTL40rmyCNHru4nu9vdNnNk5sgnc6Tdw89umzkyc+TL5Mhz5Vc/cNGwkoE+vWrHddkNRSkocswsxssRlxxN2W63/ekdwSlZYGAIgUgL/QI8zY4b3Yh4EBFaLpceSbu7u/OIG8YEIGu6BU6sEUQz8yhIp9Oxb9++ORmQx8sLADGg7XZbimjpEiUyJiLDJliictVq1b59+2aj0cgGg4EVRWFXV1dWrR5eRjgcDu3jx49u8Lrc/erVq2Nu9D3JFEXhubxspiQ9oNVq+eZMgLvf7/vSPJE3yPHy8rJE9Eri5N+yoZg3g+OoZubHw0K43NNoNGw8HnvUiWVxbIk9UURM1YnJ1765ubHdbufgtd/vnYRYtiedAYLmJCvNpa7Vap7njnOTqqKRJR6oIEP6ROS6UjmkjSh5spF4Mpk4UVcqFT9NCfC+vb21/X7vpKfL7SqL//qv//Kc7fF47CmMRPCI9jH5wSc1ckYaAhM69gcwydITuvBd7LharZYitkR0e72eH1G8Xq/d/29vb32vyGKxsHq97idt6d4VSP7bt2+2Wq18EmVmvn8AQgdgu92uRws1pYaJRoyAcS+f4ZeMjRQwAFi/o06ic9giaSUatdMoPaSC3OkDk1kz80kt/gXZg8vsJcA++YkYg1/GVQMmJvmB6+klc+SRI/dEie34wJk5MnPk4znyYK/z+dzMLHNk5sgXx5Hnyq9+4KJzGBrRGIRJB5UAACycDSES7UPoRLi4f7PZ+HsLADyeNlEcRszgUZoafrwXoNRIAoogeqb9jcAOILCRlQkzUT+AUZfxIaZv3755rjXLtLyLQSNcECfkx/hIZWg2m/bmzRu7urpyh8Thbm5ubDgc2u9+9zv7+vWrt8N9pG4QVWPTMwWQ10gsMkAObIBlP8Jut7PhcOjXsrTLuNGJkiuGj56U8JCamhEFAAAgAElEQVTHZDLxY1aLorDXr1/7/YDDx48fvS9sDuZdLbPZzCNKm83hPRuz2cwjkXp6DiDDQxAOySSl0WhYt9u1fr9v3759cxv74YcffJLDplciptgX+dH8j33zHe+6+e6772y5XNpoNCodoQx4Q1rdbtffl6H6YTm/2Wza9fW1ffz40Sdvm83GU0bu7u4cwPANIln0TyPv2Dy6Jjee+1arlW8EhjyYpEFImorEBAc7mM1mrkcmc5ycxf9MdCaTiddJ1Lzb7Xo/mcRUKhWXIfbFxFUjxOiaaDbvLdEIOTKlT9g4RKL4qJFSM3MZbDaHzdO3t7fuj2blY635nwk4EzgmpkTUIAfSL7AT6lW806gc1yI7tR3GoisyOrZcHlcyRx45kskQ32eOzBz5FI4s7mXbbrdt83mTOTJzpP//UjjyXPnVD1yankDnUDSD0ygeoM/GvuFw6IJRUmIJmSVFBIlBIAyif0SVdrudb8BsNBqe/81TPvWQt1sUhR8BulwufUmX5clut+skiQNhHCicXHvytAE+HIEUCZxtNBrZq1evrFI55HpzUhHRLJaL2QiJ0ROBqVQqnmML+DKWzeZwMtPV1VUpRQCS/e677+zu7s7TI1ju5ojSRuPwwsEffvjBx7VYLDyKQF5+JACMVSN9AFC323UZ9Ho9P/ocPeOcSk7YDWkatMfkoigOaQ2DwcCPuf348aNHbaifJfDb21uXGZE9dDccDm0+n9vnz59tuVzad999Z9Pp1KOkyIZ+XV5eOhgsl0uPqH39+tUjT4PBwNbrtX3+/Nkdlqji5eWl2yXgBChMp1OXcafTsbdv3/qGVU4LGgwG1ul0PMWBnPI3b97Yx48fHSixjVqt5vZweXlZmmQRLSUyzniwQ7MDULHpWiPzTKR2u50fdYzeP3365Kk0jUajlAZVq9VcTtgfUT6AiwkmZAMYMyHBR3kxJXjB5JCJBRMk3kuzWCxcr91u1/0PHeETRCyJ3seJqaZpgAWk9jBhVZ0uFgvP+dc9GpCfrnIS4dQInK5SKLYic3yQd7vQp1TKjJn5d0z48VWIn++RAeUxpJJLuWSOLHOk2XHl5r+XI5cviiPt13DkfQrhYzlyb39dHLlYLq1WrVqr2fJrM0dmjnyJHHmuPMsDF1EgGtbIneY/6rIjIN5oNHx5GOciFxsyUULSPHT9jFxa3q+AgNgEC9BPp1OPjkEYGkHUSKKZ2fX1tZmZzWYzX1r9/PmzO+DFxYV9+fLFzMzb0dzTWq1WymPudDoO0IyVJ+/FYmHNZrOUQoDRtFot30wKuOIUPGFryg/pDLQ5Ho/txx9/tDdv3vhyOhEJ0khwxu126ycK8XJJjJFxomPN0zU75uCaHaMA5HKzqZX6WOpFTvSd+pDZdDq10Wjk7z+BPEktgRzfvXtn4/HY88s5PQlb+Pjxo0dP6QORL7PDUvTFxYW9evXKRqORp56wL6HT6ZiZ2d3dnRMbJxkBQGxs3e12dnV15ekGrFrhE9gsusP+GXfclDocDu3i4sI+fPjg0VI2tQLy79+/t83meHwudQPe2+3hiN9Pnz7Zer226+trjwxtt1snBAURAJ3UESKf+KfZIbo6Go18gzwTlYuLC9d5v9/3d79AADc3N6XIL+k1vV7Pvv/+eyeV8XjsugHkNXrd7XZLRMdLWcEPNoWDV/RxNBo5+WDf+LBGWrFRjYQiF8gREhgMBjYajUpAT6oIbUOERDaZqOAzGk0jjYOJKxNG8JNJuEZUiZ7GfSIaLVZ/I1iitoiMkRsRx/zA9fSSOfLIkUywfvwf/8t+/B//6/9vVeTy31X+j0dc8/r5mqvf27JZ5sjMkS+LI8+VZ9nDpcCuT/Wa60mneWJl+VyfXBuNhk9q2XBaq9V8GVqXDTWvnDaJJpE/Tmk2m25A5G4TqcOwK5WKH9tqdkwLxGn2+70fD8ob12u1mudLIwcepNjwyfhZ/bq5ubHXr1+XJubIZzAYuEFwOhKOzRM/4yK3/f3797bf7+3Lly+eOsHyrhoibzLnwQpwwDh5QEE3RAKIvnG06MXFhRuxpoeQ40w/iXgQ8WI5n5xs+snDBZG/xWLhUR2Wzc0OD5+vXr0yM7PhcGjb7db+8z//0/7whz/4caY40NevX+39+/f25csXz42nDgDy9vbWLi8vrdfrudxIqSCXmZN5bm9vrdlslt43Q858vV63q6sru729dZl8+PDBut2uff361U9ZYowAO3bNwzUpqeyLINKMnfb7ffvy5YtHQoku9no9tzcil0Qq8RezY/48+dlm5hE6XqiJfRME0NRYdMy7ZczMX9aMzb97984+fPhg4/HYjzUGI8yOOdTz+dzli9wgncFgYJvNxv16t9t5O0T4mDhymhttKD7QL1KMmGgia1JuFouFjUYj6/f7nupRFIWnopBKRLSNPQikZvH+NCalBCD4gZQVjME4ItJ8hy+AI6TFaGqDEgB2bWY+WSfqZnZ80aemqDE28Ig6dB8FuKARe/BZ28zlcSVz5JEj59WqFdu1mfHgvrfCivuzEOL+wMNOr33p88LOPvIX5aPg/eoCXdz/WRybO9Qv9fJdqqF9uMZS1xUPqnh4W/wkXlnqve3v5RS7UiT+fnDNfRP7cOWD2AlfpcZo8lnieq/1VJ2pjp265LxIzPZmtUrhwezMkZkjXxJHnivP8uJjOocyeGpliZhB6ESaSTmgr4pCyQiMp1wiEpALRKQrKkVRuMEhEAyBt6qThoCgeQhgky1L+yxvsmkUo7q+vnanQGFm5u926Pf7vsRMagFRhvF4bJ8/f7br62tPEdCjPNnkyak1jJmVsS9fvvjqGSknTPp/+OEHu7q6souLCxuPx7bZbGw0GrlR6xJxt9v1ZWUz84ehzWbjOeS8iLBaPb7xnIesVqtVyhunz/yvhthoNDzlgNN0eNjiCGGW6DH29XrtUQ/ARVfgSGXhfRgXFxc2mUz8IWaxWNjbt2/9oQ3iZ2n86urK02qK4pBbzAZfQJQoGBMMHo6RJxOUP/3pT76hXDdzE2W5vLz0Fb7BYGCfP392uSIr7ifFUFNgsHd0w3WAT6vV8gdm3m1yd3fnttlsNv29OEzg0BMbnC8uLvxkJMaGf5H6o5t1V6uV73XYbrf2u9/9zk9hQreswpL6c3197bn2pK4MBoPSSUTk4ROR40H47u7OZrOZDQYDJy/SebA1bIRABxHQjx8/ur8D/Ho6E34MyFcqFV+R3G63boekfWCDugH98+fPnj6hq5qkUGi6GFFHTelBj6PRyBqNhkc7wQ4IlAd06oDY+F/THcyOpyzpJJ+/dZIPbiuhcK1Gc/MK1y8rmSMP9t/7f/8v+5//+n/bxcVFkiOr1cN7nfb7w4tnm82m3d7e+j4QDo7QqD2Y3ul0rNls2ocPH3yS+fr1a38PWa1Wsx9++MF6vZ5zJPu4wJKf48jRaGSbzcbbBouZENZqh6Pc2XulHEmmymaz8Yl8rVZzXez3e9efmfnq4H6/d45EZ3AT2ESmChkg8NTXr1/t97//vX38+NHTJ7GJ9+/fux3yYmLlyHa7XeJIAo1gxHq9do7/9u2btdtte/Pmja+WYFv8VnzkOHTmGhzIMZ1O7c2bN/b582fPIPn69avbHvZ3s7mx/f3qb+bIzJEviSPPlWd54KpWq346DhNmBWs6RTQMwEYo+oTJ8ikrK7r5U5dviRRSdDkSR8bAAAZyrwEDjIt6cR5ditXVCfo1n8/t1atX9uHDByuKwhVPFEXTMPhRg8CgWXHi4YnlTfLOSb9ATqvVyvOeuQaQIg+41+vZ169fPS/91atXNplMHCAwIsYMWQM6nNxD1IOoKECLQ7FpWCOxSvyQQKfTsfV67SROvnm/37fPnz/7EjiEjrzZmKsTE0Di8vLSIz3NZtMGg4GDFps5v337ZsPh0HOkzcydUceOjnnhMPnQmrMMwavcGDendpkdjhb++vWrR8Qmk4lH1zhRiNQe0hAhOCJT2DLph/QBgm21Wvb582fb7Q4nJiETcrF56aOmKmFf+M5ut/NN6fyPn7VaLX+I1QdmwJAIDuPDpyeTid3e3lqtVrPxeOx10Qc9zUtPDQJ4OSoX/8D2WWHsdDoeBAAgNWIFCBJlNCtveN/tdr6hH19CfnqyG3WhB/pAAWQhS3ACWXENxMbeAqJq+Ab6YfKsuMkPKWjUybX4CrhJUbyjfv7WfQWQoUZmlWwo9BufR8a5PL1kjswcmTkyc2TmyL9+jjwXlHy2B65KpeL55bpBjU4TBZhMJqW9SQDwaDRyIXJSUavVcqcCuBiw5pRj0GYHwOj3+/bjjz/6ZygNwfK0T3uAA3UTTeGpGME2m8f3Z7TbbRsOh54GBxHxRM7qDY6tGxaJKA0GAzMzfxM60QjeFM+SMH0gEsRKmJI0UZSbmxsrisJTRiAYyJuNqDp5Qkc4p9lxkzOkst/vPeJ2e3tr7XbbJpOJrddrfxEj4yLiQV2r1cojTqTTcQoToEakgqNzSV8BiLAxHB5512o1Gw6HNhqNXN/IQwGbCMx+v/fThjiRSeXa7XZ9GZmN4qyyYQfYC85MW6vVyn772996tHcwGPgpQrvdYb/bhw8fzMxsPB47SDPxgeh1/wQTHyJt+/3eBoOBywp9mpmNRiObTCald8MAnOzbIAWgUqnY+/fvPTrJkr4CECt4RPvQt9kxwmVm7ofVatUnC+Sh39zcuI2xKVYnmI1GwyORyIR8bWwI+7+6unI/NDumDKg96yoMGERbpMWiO+QOmdMvndRALGAboE4Uj1UGXUFA7kxUkJniDlE5CAPZQCpMzCDv7XZbSuFiIoCtQ2xcS791AsRYwQx8CdJAZugSTNS0NMaSy9NK5sjMkZkjM0dmjvzr58hz5fyh8Y8sAL+SCEACiCBMOsV1k8nEB6QpEESRUB7fozD+16f88Xjs93C06X6/9xf/cZ8aHsJmmZ80MAVbjB1DWSwWDhyqjFgfho3R8vJKooson2gVKQ0KIkVR2HA4tG63a41Gw6N8ACkRGTU46gRIOBVJIzHk1hKF0ONEOUEJQgTciTBxYhTRq9lsZjc3N7ZeH452ZaOqmTmwEAFjYyrpDwAYoAKJkGoRr9e9cWbHF/0RveI39kXKCOCkZK8v7sOOSI/BkbALdeBms+k6nEwm9uXLF7u7u7P/+I//KOU1s5RuZr7HjP0RkCipGaRlYhdEvswOm9jpz253yNe+uroq2Qt2O51OPfKGLJlMEPGt1Wr2+vVre/funb1+/dpJTE9LSoEmxElkVqNCl5eX9urVK+t2u94/NgiDBxz2wsQDPwF4sTF+aO/q6srevn3rQAvRsZeQqBg/SrCk/ZCORDoRkxaNGJKOontbNILOJA05Ine1Q1IQwA7aQKbgDvbJZIw9AuAM0TlkYWY+uaNNbBmfh1zBAn4gMXBYo7b0SVct1LfAPr0+l6eXzJGZIzNHZo7MHPnXz5G6AhbLsxyaQQ40HeDJEzBEMBgRIKeGxJIhudkYHifEoQAA1cz8M+rp9Xp+OAOn62Fol5eXVq0eXuKHQ6FglKgRGV1CBIhbrZZvdmRZXCNzOjYcgXEDPjghhMdeLXJ6WU7HkJTI+JvIHZtFAUYIB+NgCbzRaJSO2SR6imFhbGbmaQq8M0MjWkQlMEaW9TH229tb32CN/NEdkQecU52egy2ItkFgHB5xeXnpkR4di9khWkvUhzFARpBjo9HwqA0TgW63a+v18ZhlPgMQlKSZoBAl4nMAqNFo2O9+9zv78OGDR54B3+FwaEVR+EmIRIEqlUppzx9yIloNSNImEUKi4xH0Go2GpykQ4YPIdQkdO57NZvbHP/7Rc8DxK1KUsCPIgz4Q0ePloNglvjedTv09N+gPPySViDERhaMefZdHURS+Lw/CJ3o5n8/9OjCAqBapFsiIMSADJpfsJcR++v2+VSoV/xwf2mwOp2myURk8u7i48IgxusLeAHeNJuokWiNlTETBsKIoSgfOcB0Yo3gKUeDXkLwWcIeonGKz1gNO0kdwUUmePPpcnlYyR2aOzByZOTJz5N8GR55b5XqWlEI6qZsU6awuX+rAeVrUlAqz4xGf5LAjQOrRZV3uNzN3CI675IWC2+1ho+SrV69KT8YYm5l5vSwxs5zPNfSPtslz/vHHH+3du3fuaDxNkyfKtTg6S59slOT4XQiMI0eRJb9x7qI45MJ3Oh379OmT1ev10pvb37x548v/GiEh2qURN6KJy+XSc8QhQp7aAS5I9u3bt755ErkRrWKDcVEU9u3bN19Ox1DJeeZvctQhLzPzDaeAlpl5+0VR2Hg89nzu7Xbr0TGIisgGmz1JSen3+07kuryuQEAqB5uE6QP2xVI7h4YAeCx712o1f4Hk9fW1/fGPf/R6Npvj5lAicQos2LhGY8lBxg4gP2wTMNWJDxvKd7udR2wZJ3ZmdiB9yOzVq1cOjJy8ZGYexWMJnzQGnXgT0UEekMz19bXbOmPiPkCbCYeSbLV6PFaWiOZwOLR+v+8pKOqvTCABZQ5bMTtG04h46eQDLGJcyIxcdTbWYgO6oR9fI3KKfaMLM/MJh05+8CEiy9Vq1SdipA4xJrU7xsP3iqcUtWXGpVjKeBmDYjF9wYbAclYdsEuIGYLL5Wklc2TmyMyRmSMzR/71c+S5Fa5nSSlU5bCkpsuGOAwdQbFmViIKJRSigTi7CgCBQ1pmx6ghigbYtW8AO9EAje7oUy51Y6goG2AjqsgTOxETPY0QMuHJVzcED4dDj0gRTSLix0sAASKUTR4/wMiYcUo2umrKB44B0RJN4xrdwEiEgzYvLy89kvPp0ycf66dPn5yQkVe1WnUSBvh2u52NRiMriqK0KZYNvKQTMLFQECMCiH53u53d3Nz4Ua46Bk6NAuCwI/QKAOjGUJ0kcMQr0Sp0TYSUNA1stFKpWLfb9Q2sTIbu7u4cGNAL0Y7dbucnb5FCgI0hf7VxtWGIG1lRt066OLHo9evXflIkciDVgckLpDqfz63X65WiVrqsDhi3Wi0/IRMCRN/4V6/Xs7u7O7u5uXE5EGnklC9sGB83M58M0Q+IHxsnAsskqlKp+BjMrORfEDBACHYAonoPIE4UE1JS+eKDRNZ2u13JjnWTvaY/gCvUUa1WHQ/MjtFy9AYW8coC+kz71K+2oZMRcAk71Kickh8+gK/pyoqSExjLmBWnwbhcflnJHJk5MnNk5sjMkX/dHHnugetZVrjMzJ1BB61/6xMi1wNi8emap8xG4/ieIp5cNaoDKKAoAA2Hp/79/nCU5rt37zwKQu5mo9EovTsBobMUznI9dREhoH0iGnd3d94vjfIBMPX64ZhNPTIdGWmajkYjIT+uJ6JCtEojAkSSAFDahxy4j/Hr+xem06kNh0M/2pQlewCXjbE3Nzeed45xATQ6GatUKv6OCRwZ4ACgmHSQikF/2AjNCU8Q4+3trc3nc08LmU6ndnNzY1dXV1YUhX358sWGw6Ht93uP8kHWpAqQWz8cDj2dBJ3x/gkcjnQAoo30w8z8BC3SWiqVikc1GSfpB5A6gAdwmB33E2Dr2C37BZDrarXysbOUzpGxmkM/HA6d0LHXyWTixMFekHfv3tlkMvHIJ+9fw+bxOSYq2JimJpiZ7y9Al7Vaza6urmy5XNp0OrXLy8vS5ItoFptokXelckzBwSdIo0FvCnq6wRwb1f5xdPDHjx99MqqTXXTGZEiJGaCHmLBn/J0JGLbD5ADiVOxiIoC/QPDoSP/WqB7ta1/BUMVRSAL8AAcYh07O6SM2SNGJnP5PFI96ICadDOTytJI5MnNk5sjMkZkj/7o5Mq6uafnV7ImA9OmOJ0dvRJ6G9SlTB25m7nwsaTI4JSF+a1SQ+iECQIoX36HwzWZjvV7PnYe8Y5bKAQLGMp/PPbpAlA0hm5lHFiBG+o1xkHLAE7+Z+d+Ax3K59NOSttutDQYDX24lFYE66TObUpENxLTf7+39+/f+Fm+ioBAchILMAfJarebvNgAYiuJ4EgxLyj/99JODCdEpImY4Ju+ighiKovD3Rrx7985Go5HnzXN6FdcDBgA3dU+nU3/xHqkWRBcARMgIWZKWQB9JEYGEiWrtdjtPCYA89vu9HyOMfFQXkCPXb7dbPyUKWyWCQ/oJ7wFBVwAr48Zpa7WaAxnRzs3m8MJsPZkJm6DNm5sbq9frdn19bRcXF3Z3d+d2vFgs/D0n9BsQg1QBDPSoPsZRvUTI9F58Bt2hGyJcRXHMzVZwZIJCW+iR77EN7SOTRY2yKxgrqHc6HZcBmAE4YlcKxJoigX4pCsykURFBw59pB33wPfZB9BD5Uw8TK7ALf2bywSQbW+c76ke2+/2+dEQuY0a+TESRE31hfPymbl1pURLJD1xPL5kjM0dmjswcmTnyb4Mjzz1w/eqUQl2WY1B8ztKrCp8OoggGygDY7MkJNCgEgOEJVkEdZ+Rlb0QNOGWFVAEcuigKf98FS5n8EHnA8ZQkJ5OJRx4gCU4xioLWZU7eeaAbhVmSNjM/tYZlY31yJoJodozo0a/4RA4wcTINhomzcRoPBs2LBXmBHqc7EXWMusXgITImEcgDIICUcAYlMIipKAqPIk6nU79HAZgc61rtkPv95s0bJ9SLiwtr/m/23qVHjiy5/jweHu/3OzKZmSSrit0FqSFo0asRtNH30qIH0KeYb6PVbCRADQitRlU1ySIzM94e73eE+yxCP8sbVIn/ooqaRdEdKDSbzIxwv9fMjvm5x8wyGVunSqVie1Aul0377Dod0gjsinvkiJrAhWQCMJJknZxgcABFvt/t5sT9M+8D+YXrkNgGdQRcy+XS2j+7jDUMM98LA91sNi/kQfwuWv9arWascS6Xs8GmnuepXq+r3W5fdHtKJpOWQOVyORv6jAyD5AJ/df/zfV+j0UiSrCgYX8PnSeBYi81mo9VqZYmk53nWJelwOM+mcY/u2U8AhUQBAHH9jrbV7ukB8geXgef5iSP4Fc/IQFV8kT0FMNlv7AltOp2fWDc3ccD38VHsjfoHEjr3VMSNC+wDa+qCiasldyUk7ve6yTgggk3x3R/GWtdu4+vnXzFGxhgZY2SMkTFGfhkY6Z6G/Rcs+G//5Wde7uK6b62uBIKf4ecIQqfT6WIjOIJ29dM4HobD57AoOCIbgjESVK6urpROPw3R4/K8cwFlvV63N35+jqNqfs7zPBtYx/dhaLA6bIYbvIrF4kWAd1k/mCNaoPJ5HFmzee5xMGuGxIHZGLyRFwoFjUYjTadT+z2OPwGTVqtlIE+ggn1y5SUEkfV6rfl8riiKNJ1OdTqdFASB+v2+zZyIoshYRQIFQfrh4cGmuyOdoAsQjgBLBeBjxABZMplUp9NRIpGw1rzsF7+TTqdN8gCjBBBJT7MWPmSZYYNg/Ha7nSUagC2spPTE8hDosXH+LplMWlH6arWytZ9OpyZ/cFmdMAztWfAH7s9lU9lz1ot7wuaZVzOfzzWbzWzgZRiGarVaFrTwR1cWAfuGfwHEJFs8C4kTdRb8O+sYhqF9Fv4Jo03CgR0TnEkoCFhIIQA1hlmSFJHsuOwUsiXiiCu9QLvvMunED9htfJa4RDzBRmhxG0XRhf7f9S+3IJjPgDF1k0/3HlxGk9/F7tlzwBL/cGMnv4PvbTYbAwSS7w/Z/zAMLxIY1svV4WN/bvLjMtvx9WlXjJExRsYYGWNkjJFfBkZ+7PosbeGlpzkc7pv2hzfDW6LLTOGk7sPBlOFw7pu0718Wz7GxBIkPNaEMH9xsNrq/v1ej0ZD0NA0cfbD0xETCKqIBhkGgow3Huq7BuiCZzWbVaDTkeZ51ROKeMXwCCnMvZrOZMRtuMGUNYQ3ZaNaK+8Zpfd9Xt9uVJOvWBAjC0FFwCWAcj0f7WUAXJ16v16rValoulwqCQJ73NBfG8zx7NhyL+8AODoeDGo2GMbhMup9MJhfdkaIosrkZyElWq5WWy6UFEeyG4EJXo+12q3K5bGC+XC5VLpcVRdEFw0gg3Gw2tu9ukTB/T/thpBbT6dQcE6kN+wnrRHAAPBKJhNrttjk6n5tIJAxs2HtJNuHd930r8KZOIpFImK5eOjN8FLA+PDyYbyWTSU0mEy0WC6VS54L28XisWq12kZDAADOHh3vn3gAWN4jAygJk+/3+Yqij5507ZLH3FEDTsprPYbBrrVazmAEozmYzkznk83k1Gg1LNPD9w+GgarVqSRd25spTACliDLbEcwKyJBcwXMQZpA0uwwhAAZ7UgnBvDGKVZDUGsNbEB5JKPp97AARI8mDk3ToU/Nz1excgeG6SW/fkxJVTYZ8/ddIAuLksnctqkgzH16ddMUbGGBljZIyRMUbGGPlZuhR+CBIfsqEYMpvPTfPvrsNjeFEUmbGh/YUdco/x+H0+bzKZ6Hg82hu3JHuj5WgeJoAWnSwabFCxWNRqtTJWjyJUghGFf2EYWoEtgRDmyPefZmjApGEkBMvj8aj1eq3dbmeda9BuM5wwkUioXC6bRhhj4+ifgJdOpzUajZROp9VoNFQsFi/2BgbjdDpdHA/zjI1GwxhGZBmr1Uqed571EASBMTnZbFblclnPnj2ztrMEHOyAQNlut9VqtYwRxcBLpZIxpVdXV2YDLjvl6uOlc9CqVCpmE659kWTg3NgG7B7ORuBjP5LJ8/BIgjd2mEwm9fj4aEGR4OoWomI3/B6SBhgeEoQoigwg5vO5BVWO1nkOipDT6acBlkhCCOYAXyKR0GKxMA04SQrdq+7v7y+OuFutlv0OmvtCoWCF7Mgr8BcGpAIgJFTu+khSs9k0trZSqVgwm8/nZm/YhZtwAX5IP/CHTCZj68Jk+Z9im9g7WDtiDiBK0jGZTNTtdjWbzYx9Ph6Pms1m9mckKOv12mIYfybYA6y0UfY8T9Vq1ZLM6+trNZtN1Wo1kySNx2NLNlzmFFum3sCNge6JB8/kBnr2ivbEblx1mUtiJXUx+Nj9/scAACAASURBVAmxz0362Qv3hQAf4vc/BKD4+rQrxsgYI2OMjDEyxsgvGyN/8QuX53n/RbfpHrURXHjzc49/MTYeCMPj2BRGA8Pg3/gcFoTP52f7/b6xgsyNyOfzNmeE4C7JJtTDlHEUXyqVTDM+mUwURZFtfjJ57kzT6XRUrVZtLWjLSVCUZEEc+QTfwRs1nVY42s9ms6rVahaQMDqKawFDPuN0OqlSqVhb2jAMTedOy16cC4dFD3w6naxlLJ2X+BnYMpKC2WymZrOpZDKpdrutZDKpZrOpr7/+2oAW9qNQKKjZbJochc8EYHe783yOcrms8Xis8XhsNuGyTLAb0+nU2BKCOkzb4XDQixcvTLsNC7Ldbi+O2kks3I5HrryDble5XM6AkI5OJC8UqLrH8slkUsVi0eyGgaTYEI7KsEDXQVer1UWHKDchw2ZYA7doGkZxvV4bsMAmY4d8TyKRsFk2o9HI9gGWCtnMdrs1sIZRJNEB2Pv9vhaLhbU93u/3Gg6HGgwGtjfYKcGRPYIdlJ6KVKMosmGQsHHMeNntdjbQkfWma9RqtTIbJ7mSnlhsWDISQ3yfBJKfo66DZJS6jXw+r0KhoHK5rEKhYHp5N5nyfd+Y7uPxqJubGxUKBV1fXysMw4uZLcQcN2YSL7E/6akdLnbMxc9++Lv4UbFYNCkOiS/Pst/vtVwujcX7ULaGbyB5cWUR7qkL/x9QjK+ff8UYGWNkjJExRsYY+WVg5Mdeuj4begIMrrGyQWwWLAz/jl4ZQ3b1r8woIPDzsCwmD4VT8O+pVMo69mBoMB7H49GYuQ9BhcBdrVatc1MulzMWz/M8jcdjc3IMlGN/JoYjqfjwaBag4PjbZRtcJtNla4rFohkNGw8bQIEnAZx7Oh6P1r4XJgdD47gZFkuSFUDDRqbTaXU6HXMqWtx2Oh3VajX96U9/UrVaVb1e1+FwMJbpdDpZQC0WizoejzbwEm02Gtp8Pm9OfX19rVwup8lkYgwaz8nRPS1Ms9msVquVKpWKAYXneWZjbqJxOBysPgBAkZ66J7ka+XK5bAEpl8tZUS17CZMDW0NNBTbFs7gyFkCVBIBhh2iMsV0SEDcoEkhoq+oWlbZaLc1mM9Nx83zoo/G/fD6vyWRiazAej61QfLPZ2IwcmPHj8aharWYyCGocoihSqVRSNpu1ZAoZy83NjQV86Zy4tVot05673bhcfyDQucyQy0ifTieTl5DUZDIZ6xIGsMP6FQoFY3P5XPYI6Us+n9fhcLBif5fJhCVl7Uk2+E4YUt/3rbMbQTwMQ/X7fd3f31s9A/UB6/XaZBQusGA7PDuATfAvlUpWTI7/w+yTcH0oV8JWsQHWmJgp6aLVtHsvJAYAPn4EqLj7RCIQX59+xRgZY2SMkTFGxhj568bIj12fTVLofhEGzmLwM4ALG8MDe55ng9ZcVg5jOZ1OJmvgwWFF+Gw0qQRPGKper6disWiyApgVQIujRe41kXhqTyrJOudkMhm9evVKzWbTQGE0Gmk4HBobANux3W6N4ZNkwY6j1MFgYG/KnU7HnhtpAiwSIDwcDjUajczROXZPJpNm/G6AzefzqtVqJhMhyLlH+xheIpHQcDhUt9s1LX86nVa73dbd3Z05wt3dnTkl69Jqtez70JrTBYf9IVi6Rg6wclwLMwKbVS6X7ei+3W6r3W5bR55KpWIMp+d5arfb2u3Os1Nevnx5oSHGOWhnjE4Y3TBdnGCH+fxaraZWq6V2u22diLBPbA6QAVzW67V9RxiehzjC8gCE3Ec6nb6QAABqy+VSk8nEwIuhmJvNxpg79rPRaOj6+lqtVkudTkfPnj2zOgBYPDpbVatVtdttZTIZK4LFx9gb1oVORjBY2Ml6vbaAUyqVVKvVjJ0tlUr2PC6rRNImyRhCWDXpqSMZ9wAD2Gw2lUgk9P79e7tffIS1QcP+m9/8Ru122zT4+D2yJPYJnX673TYbRNJCQGePYVIBG5dl9zzPEp90+tyJqlqtKggCDQYDPTw8mKae/SKuMHuG2AJoEdfcPeGzqSkhRvFnpF3YB8kwjDLrj3QMG8cWsUtkLIARa0OSLsmSOWpd4uvTrxgjY4yMMTLGyBgjf/0Y+f/LCReGJj1NtHdZNZwuis4FgLB73CAbyGLwlg7TxFs5RsuiwzrxvRiuK+FIJBKaTqfWChVmDi0tx7B8BozUarXS1dXVRUcmAiYDDiuVir09f1jcTJCpVCoqlUrGOFBAO5vNDNhgzhKJhBqNhgaDgQEK6wA7uNvtDCAbjYYFGGQXmcx5fgcsJIGAe3MBJgxD1Wo1OwavVqtaLpeqVqtaLBZ2LJxIJEybS+CkMBM2ZbPZWDch2MbJZKLdbqdqtap0Om2aataKe8PRjsfzPA0X/On2czqdTDrRaDSsuDidTuvh4UH1el2TycT2qt/vGxiwljjo+/fvjS2EWUE6kkwm9fz5c02nU33//fcql8va7/cKgsBAG+aXTkXIIZgoT1vZKIpsoCGsG3/nsiJhGBrrRXCDbaI49nQ6mSxmv9/r5ubGfAzNNGsoyX4H20Hakc2e2xwTZIbDodkhQQTAhw2G7SSB4b77/b5evHihbDarXq9nzPtqtVK9Xre4wPORVFGkD+ATF9iHZrOp5XKp169fq9PpWACGjfQ8T61WS7vdeSglAE2iByPNsEvskoJu4goaeZi+0+lkMhVJViuCD0gy1ng8HlsyiD9LsmDNoFQCOc9AEGdvqJlAQkVnLGoE6vW6DalFJkQCAJMP+wdrCngT/9xEnXjrdrPCPpAQSbqQuODPMMrx9elXjJExRsYYGWNkjJG/bowkbvzU9Vm6FHLci+SEQM/CYywc1RGwT6eTDZwrlUqaz+cXx55uwavLrG42G9twGDsWiP/cVrK5XE7VatUWjInkSBYo4kSeQKec9+/f6+rqSl999ZXevHmjIAjUbDZN895ut7Xf7zWbzezYnKP3w+Fgx+6wVqlUSvf392YcyWTShhXS6YnOPovFQs1mU5JsrdDTUhQLEzCbzezoXZK1osXBMAZABJ0uwdBl547Hc5Hyn//8ZwNw1mk4HJrevFKpmJQE+Qe6aBIIZpc0Gg0DbphO2nhyv+l02owfDTtsE4AC+5NMJjUYDCw4sR4EPPbXZQpxTgLparXSbDa7CBywMAR0CsbpFkQCgjwG1hJ5Ap2pYLwIZKlUymwMh0QzzBG5q8tH7099ButULpct+Dw8POj9+/dqt9sKgsBmh0jS1dWV+d5isbgAZtYnkUjoN7/5jd69e2c2tlwujT0GpEgMs9msqtWqBZvZbGZ7NB6PdXt7q1arpYeHB0nngLnZbKxI+eHhwToNudIqbJ7PZu9Ho5Ha7bZGo5Gxru76NJtNpVIp/fM//7MymYwVPHO5TBh7t9vtLiQfhUJBi8Xion6BdUOi4fu+SRLYV6Qmy+VSvV5PnU7HfI06CWy2XC5bcoS23QUWEnA3nuHn1EwghdrtdsbmMe+GxJrYCkC6HadceQQMJOBNAuu2NAZ8XCCS4sYZ/9MrxsgYI2OMjDEyxsgvAyM/dv3iFy6O3j9803MfAjYBI8K4uUlYIjShfCaBUJKxW4ANhoVz8vMwWAR0Nr1UKhnQofUloMG0oJPmTbhQKKjX65n8IIoia386nU5tcB3Ho2w+b9+dTsdaX65WKysyRtf97NkzO/qEVSAowgIBLjgHmlPWhgCKQXIvsCxXV1dmfMfj0VgTCjc971y8/OzZM7VaLW23W3333XdmYGjOE4mEvvrqK2M3arWaVquVSWAAEI7WYWIAUZe5pR0tR8+TyUTVatXmh/C/OBT3xRrBVuF0ruaW/XULWYMgsAAA89Rut81+UqmUMXfusXIYhmq327Y/mUzGClavrq7MuagfABRw0lQqpcViYSBAsMAWuRcCBECH45JEwES5pwuwqGi2+/2+ttutKpWKyuWyttutgiBQIpHQ27dvTQYEqxaGoWnLff+paDwIAgvcyWTS9up0Ohee48vVatXsE105do0vBkFg4EXAjqJIo9HI2CvWYTgcGhDD1nqeZxKjyWRibaQ5CfjLX/5ihcF0a8Im7u7uNJlMLEa9ffvW9qRYLGo6nZpUJplMajgcmvwD6ZX778iEsCt80vfPgyw7nY7u7+9t//iMzWZjtSSn00mTycRYVp6fE4RKpWKyDBIUkhokVPv9/iI5LRaL1g5bktkRvgDYRFFkw0qTyXPBPEX+xEPiLvEWOyAhjl+2/mdXjJExRsYYGWNkjJExRn62OVw4JiwEF2+Q/CxBhCBEgSpBgEBOUSkBUtJFgR0LJJ0DqlsQx9E6hXuz2czYOe6NhWaGRaFQ0GQyMcOmAwsa0EqlYmxOqVTS7373OwsiFA7yhs5mo+WFDSG4+r6vyWSier1u9wtLFASBrq6uLtaTNeLo1tU7Y6AcdwNuGDrGjEQlnU7bYEeCGABIRxlYQHTGdMfhXpgLgm4fNuhwOLd3peUn2tjRaGTPin4cxjGKIlWrVdN2wya4Om+AHeMuFovqdrsX81M48m6321qv11osFuYgBFcCb6PR0HK5NHBqNBryfV9BECidTpv+mlqD8Xhsx9CpVMqK1avVqkajkWmUsTlJFuAARrpqdbtd0wHD4A6HQ51OJ6spgDnBJ7bbrVKplEajkTGksK6wStlsVv1+X/v93maMNBoNTSYTZbNZTSYTffPNN9rtdur3+7q9vbWWrDwTnXxcRhEbxDfdfY+iSA8PD1YYm0ql9Pz5c3W7XbPh0WhkDDvrkcvlzJ7DMDTWjqSUzmEUf1cqFWNaKfh/eHjQdrvVV199Ze2DXSkWbOrj46N1W5LOiSvxoFarmRykXq+bpIKgTIzhmel6hd9FUWSs+2QyMRkIyQSDV4kJ6XTa9hz/PJ1Oxsy57YeJXbBxSCeIOQA2Ugu+h3oQulMhJcHv3CJvt3BaktVOwC66pyPslctyxtfPu2KMjDEyxsgYI2OM/DIw8mMvXZ/lhAtw4MsIJBgfm8FxufvWigwCVg99KQ9P4R0XAER3G4wY7SaFa2jBWUyOoHkD5nPdIEtBL6zecrk07fl8Prc3dooSX716ZcEDTTjFo4fDQUEQKIoi0wlToEmA2e/Pg9tqtZodq1YqFY3HY2Ni3I5NrjSBYsXlcmlFgrVazQwAMBsOh9rtdjbRG+Cg1e9oNLJ9QDsPkwYo04HneDyaNAGgASBIxD5kxFKplMrlsjl3uVy2v8f4WdswDO1zWZtsNmsBm8TB8zzV63Vrvdput3V1daXRaKRyuaxarab7+3tjb0gkcFAYIoLPdDrVzc2N3UMul7NCUZcJ5nfpdBWGoUksYJWz2fNATzTfjUbjwjZwVKbcJxIJjcdjSTKpCAwbQQaJCqBCLUCpVFK9XjfZDfUOyCt4bhgyJDg//PCD3r17p9vbW5vzwu9UKhVNp1MLogRhujJtt1vTnddqNb19+1bj8Vi/+93vzEbcRADWCXafxAxma7FYqFAo6MWLF/J9X999950Fr/1+r3q9rul0aq2l379/r2q1Ks/z1Ol0LOjDcHHfnAzALO73eytQRvJAong8Hq199Ww2swSTAmnYceweW0deVa/XrXjb931LNjiJQIpCTOQ/AAC5ECBFwCYOJhIJS47q9box5STPxF0Ai+Gr+DXaekDWbbPs7gusJHEUm0Q6g+Qkvj7tijEyxsgYI2OMjDHyy8DI/9UXLhg0l8WzD//P4/PT6WRvhrA8aNMJ5kgeuGn0o0gsACCOGVkoPgd2xZVusFjpdPpisjpvpjAUfA9acxgT3nrdAASADYdDhWGo58+fq9FoWAHuYDDQ4XAwPTzadd/39fLlS1UqFWOemGDOd0qyIAw7gPQEYJ7P5/ZdvLW7xa6wYYAzb/E4F4boslqz2cyCPm/4gGSz2dTV1ZX6/b6xXBgXQYzPjqLoYmYLCQPOR3cmAKNcLtu8CPTKOASSj2QyaYkeTg2rAvjBsMGuUuyK3bXb7YvEYL/fazwemwPyrDAuMPmAN8kRPyPJkoZKpaKrqyv7fgIMwISMAMZmuVxqt9vp5cuXFqyZdj+bzUzLfzqdDNATiXNBu1uMy14izygUCmq32xoMBgZy6Me5Dwqg/+qv/krv3783BpL7jqJIi8VCq9XKillJDGDNKFButVoaDAZqNpvy/XPxK4XQbgKAL8IYufIWgCWfz1ub2FarZUGebmcklgAMLDJscTabNX8hcZTOkgUkFhQ1Y48ESeIUdoUUiWSDxCadThsjCZtNzUWxWNT9/b3VefR6PUuwSG7DMDSfdU80iGX4OMwojDUyJ+Qj2CS/j6wjkUhoMpkY6MBop1IpS6KXy6WtEbUCJGkkhvwMvibJkmdJ8QvX/+CKMTLGyBgjY4yMMfLLwEh8/aeuz9KlkJvihgg0vEUTRDAkHNQFHoIIoMIbuu/79sbMGygPD9vBw1Noiu6WIMUGNJtNe9N2Nc3cB8Gbe+M7OdZkMznS3Ww2Gg6HqlararVams/nxkZks1nN5/MLnTUOi4PDagIK6IYBZjqp8FYvnQNysVg0QEY6gUQD3XgQBJJkR+kAKGAIyAF0aHDZA8/zTJOM9rfRaGg8HpvjUnDr6v9hHTHaSqViQe94PA+L7Pf7KpVKKhQK6vf7ur6+Vq/XM+YRllGSOaW7F7Ty5D64UqnUBTCzVm6w4bh7NptZwMjn8xqPx8pkMup0OhawYT2Qu1BYClPk+74xZtQvrNdrCyLUS6zXa9Mow5oQfPCXWq1myYrLKmGXJA3ZbNY076fTSff397q6ujLggG08Ho8KgsCSjEQiYYlCuVw2XbJ0WU+C3SwWC7Mfjt2TyaSePXtmz+BKAYIgMFDGN/A7twaEzmEMWkWnHoZPLXGxO6QAsIzYNwkDNoAkCYBxWTQCJwBDQEf7T5wCJJBMIZ8iqMMSUqvAOqNNxwYoVMc/kZ5QDE68Yt8lmWQCMCGZZX3ZA+lJN46v8HPFYtGGpEqyWMg+RlGkSqViBeLYCsl2GIb2vPgdPst3foy5i6+PXzFGxhgZY2SMkTFGftkY+YvncLk3w02zUAQvl8EhyLjFahSeAhBsAkV1BBECGIwfgRL2jXtx742F4ZjeLXIkUAFg2WzWAjObipHDpvBG3Gg0VCicp38ztJCAy3PR6pTNWK1W6na71vml3W4rm82q3W5bIEZbjIOjNWdTXWaLQArjgAHQZhZHx5EJbHwef4/GHA03yYD0dFzKbA+CKCwowMn9HQ4HC8I4AAwILBTOw3E5AwQJDqwztrRYLGyWC2wu3WKQMpCweJ5n3ZU8z7O2uNghOt9isXhROE2Ng+s8x+NR1WrVEoIwDE36QDIgSUEQWMFqGIZWdI1spVQqKYoiC7bJZNJmYSwWC9tzik7ZYyQOaPm32611qapUKmo2m1bbgO6YPWSdJVmRMAAEUwurhu8QfA+Hgw3z5DMymYwxwqfTyZIR5rCgiX7//r0ymYyq1arpxumsxjoDHplM5mI/ma+CvUm6aCd8PB4tIePeACP8gRMYNPx8J/vA0ElmiiCbGA6HBkT4HFpySQbQSBZgFbEht/DZHUgL+4/kCC0/AOEy3QRvEleXlYcNZp/dhG6326nb7WqxWFzUhrj1AMReCorxe5dFxK/53dPpaT6KG2vi69OuGCNjjIwxMsbIGCO/DIz82EvXL37h4tiawMVC4LwEIx6et0AWTpIxb+5R5GazsaNrAityC4IdwILzcbTM58/nc1WrVVuk+Xyut2/fWptSjJH7gymDxWMBmWOwWCysGLZQKFjrTXTcu93ughEARBuNhqQzE/L4+GgMVTKZVKfTsSNntMU4GYENYwVkAMH9fm+AgKZ3uVzasTiFvRgDwUZ6YmwwaoL2w8ODMTXNZtOcnuJBAIP1cvW2BHvWlHVj33e7nbV6PR6P6vV6Wi6X6na71qaXe0skLgfaYV98FmvB3vF9ODrSGgp/YUWfPXumSqViUgySEkm29wT9drttQRibg92kNSusMKwONRXYF+BDXUKtVjNGk3XHjmkjC9C7PsHPsL//8R//oSiK1Ol0rJAXWZEkYxsrlYpNgc/n88ba4U/r9Vp3d3cWxMMwVKPRUL1eN7YNW0XDDsAej8eLuRrb7VatVsv23/fPxeWVSsV09e7zFgoFK3hNJBLWTclljrBf5Ee0lXalGKfTuSsY7WzdOMQ+07Y2iiJj/oIg0P39vfkts2tgWSmyJzngz4AMrapHo5ElLPgJEgueD8aQpIG9TaVSVjtB96XNZnPh7zwnbN9ms7kAl2KxaInI8Xi0xOpwOBjo8Xx8ryuXYJ2J59ITS/hhzMZX4uvnXzFGxhgZY2SMkTFGfhkY+bHrs5xw8abMMS9vlxzDS+e3X5cF4fd4Q2QhKECUnqa++75vAex4PF68zfIfAc0Neq4Olf8Ps8VxPd8NKwKD5wZfNp2AlEgkrA0pm87nS+d5DTg4a8IaNRoNjUYjeZ6n+XyuVqtlb+qlUslADKdgrWAA3E5TGI77777v2/BKmAsc0j12dcGdY3468sxmMx0OB9VqNdPH4rzL5dLYGBgA1oy9QAssnaU0zC8AmDKZc4eZyWRywUrB9rh1B7BNOE4YhhqPxwYYURTZ92YyGfsdpDKTycRmXXDEDTjDOLOG4/HYJAZIRGBYsIn5fG7FtxToZjKZiy5PrVZLz58/1+l0bldKkGi328rlcrZ2rBFH9K4Ofr/fG+iRaDBD5HQ6D7f87rvvTKPNZ5K4ADqAU7FY1MPDwwUjDnNKMgbzid6ZVsduUCZA4h8uk8exPb6USqXML5ETACz8HQkBvsj+E1CRQ/BdAAVxAlvBRtG7AzwE2mKxaN8lnesL0JtTgM36uj7C/ruf5xarU9NAHQhDRMMwtP/vJqmw2NgNySI+T20KtQAkUMQe9g9dPKcVyWRS1WpV+XzewIO9ItEiXpMsk1AANO5pCzER32K/4+vTrxgjY4yMMTLGyBgjY4z8qD7E87z/S9LrKIr6H/kZJZPnAX5hGGowGEiSsVeSzKDQihLEMAr3zRQHZBP4Dhgy3spdVo+Wq4DXer22Y0OmqvM7ODfHjCwugY/7ZQEJYhh+Op22t/1EImHHrhgIUg4MBN0pzsbzwJDV63W9fv3ajJPiV0CUYIDswtXGSmfWZLfbmRFLMkBwJSYUw/q+bxpV7pn/TwtZvov1D8PQGAm0wNK5WJh1QIpRq9XMuTFkHAKGCoaMQAE7Aagdj0crSOQImmN89vlwOBgLSwBAbsL6uA4Fszcej629Ky1tU6mUsbyTycTABXsjkBG4M5mMBT9AZjabKQzPtRiDwUD5fN702bRQBhTdWR10c4Ihmc1m1n6V55Zkg0lZS4LYfr83PTXrGkWRtdwNw1DNZvPClg+Hw8Vx/Y8//qh0Om1ghk+xf/gH95hIJKwwnSCIBAif5+/wMbTnJBbM3Iiis27a7ZAEg5dOpzWZTEwSgtwJZhP9NskB9+6eIBSLRS0WC5PflMtls21qCXK5nHWFItBKTww3Ng57xRoEQaDNZmP3jw+58geYND4TmyK+ELvQ8OMHh8PBmHISD2IYiSZAyt/B6uGz7BlSDpcd3+/3BmjsIX6Mj7kxlvt1k/34ijEyxsgYI2OMjDEyxsgnjPzYS9dHT7iiKPp/PwYk//kz9kWu8btyA3eB2DCMAAmEJCv0o5DN1WASPFkoWA1AiuCBERHkYWLYqDAM7YiXoAcjwO/DhhAQWVh+xj06RhcK2+JKFjA+9w2do/8wDI0Fevv2rcIwtKNV3s75Tnc9MRyOh1OplBXerlYrLZdLTSYTeybf900y4BYCEhiSyaR9jvTEsrryBOanhGFoQQCNfxAEpod293c6nV4wechMSCQ4buY4ndakMBqJxFMhLmuWTCY1nU6NLaIIF+0zTsAxvsuuTqdT03oj/yD4EJxxPJc15l6Q5wDMMDewky7oIxPBjiki7vV6BqA8K4mYu/4EWIYyep5nvsH9UhzstkmFQWY+TqvVsu968+aNdYlyJSLX19fGksH8kgRxj9JT0T/MHvuHL/j+uQtTvV5XsVg0JowABrC6ds56wQqHYWisaz6f1+FwMEkKiUaxWJQkY9SJLwTX4/Go6XSq/X5vcgeelc8nbiDHQgvPCQFJLp8HIJJAIKVgTUh88ZtkMml+AsPtAgP+zJ65iQX75SZ2FMi7+wYgAnLcu6s/Px6PF6cS2A73AAC64OYm0viC+z38bnydrxgjY4yMMTLGSCnGyBgjn144/7vrs0gKXY0zGw9zJD21Eua4j4chYHjeU8Eob5MsPAvIQ+NcsGQsIm/ZrmyBzk0uY8FbPw4jyY4LCSoYFs4FqHFUn8lkrAjyeDya/pf7x+lZA4pscZ5araZqtar9fq/vvvvODHi32xk7ASAwR4IZC6yNJDv6z+VyFtRhtWCgKNClaJV75PfDMLQAgHYdffbhcDCtNfvrTlknALiBDHnFer22uQrr9do6H3HPyCywGRy81WqpXq9bgGbdcAiGfTJdnQAFexeG4UVdw2Kx0Gg00mq10mKxUKVSsQGTJCTYgeed55VgtzBIgBkFvBwtY48f6vZLpZLJNPb78ywOV+LCsD8kJNhlOp22IOUybbBqgDU27fu+ptOphsOhJV/UPpxOJwVBYM/P9zAt3rUjNP3J5LlYHl/ATrAZQI3iZ/aNjkuu1pw9Zo1gjNCowzoS8EiQAElYZXwJv4chhfF3WVXiDvIdNxjTFWwwGBh7j+wDLT8nB3w/rK2bFBPkU6mUBX4K2CXZM6TTadP2k+y4NoesC2YzlUoZ+0diw3diy+6MD+yPBOdDoMCv3D+7AAYQs27EV5dl5PvdOM//j6+ff8UYGWNkjJExRsYY+WVg5MdIyc/ywsXbO2+OPBTH7uiKeZMEuBOJhLFcvLEjRXD11Ry1ujpO3ipxKv5XkjFybpDDIHmLZRYCn0sRLsbi3uNut7to4UlQlmQFljjFx8dUvAAAIABJREFUdDo15oOg6xZ5ugCAobE20+nUjAOG5urqyiaJc2xM8CDIJJNJ9ft9C9Bo3jEYjnCRPWCEH0pS3KCNlIACSJxfOks0xuOxrUE6nTYZRSJx1jWjpyYgEwz5OYqZYSpdFot9JjC5DkOBLQWphULB2Eja27qsMFKbSqVyAer7/d72n8CI8xMkAEeYQO4Fh9rtdsZOEmhyuZyKxaKGw6EB8ePjow0KPJ3OheouWMIKEcAJvPV63fyMWTD4Bs8GW0RgOBwOtu4APsWwsKWw2/w+98/zEawBZf4DDChCX61WlgTANiPxIQEheWPfeBZiBYDK2oXhuXOZy9Div6fTyb4bf4ZhA8yxd4p2AXFADZ8cjUbGRGKjJCSe59mMGvYEeVUul7MEk1krzOUANFgHYgdMIBf+zj25cgXAEPYdJs0FJ+ydhJvnBHQ+ZP5IhGBoWUvWAvaQxB6AYV0NLP4T2OLr064YI2OMjDEyxsgYI78MjOTl86euz9KlMIoiAwGAhcVhM3hDdI2V4zr3zdwFEh7GXVgCHkfxGC4bB5vEG6vLAMGscaQNewjrwPOwyRQuwiAAbI+PjxoMBnbftVrNvpuOQjB8HDXyth5FZ40s60AHJ2Qiy+VSrVbLDHu/f5pQD2PJ/cNKYITNZtMkGNVq1WQM6K/L5bIBZ61WuwiQMA6Hw8H+F+ccDoeaz+e21mjMcW7XgWBWAW1JJk9wAz5H2QR9tLYEOYIRbAYgwd8RxAEhZla4DDL3QoF2IpEwppWgh3PAVnieZ0fy+/1e3W5XURRZMTY1CJ7n2TNwzwRUkonBYGAsFADLvXEkjiQB8AfE3OQIppnvn06nF8+G3AT2BdYUMGKdASrqGZDYrNdrkxCx59g6QyiZ28OwR7djlptMYjswYewZdlar1Ywdc6VSuVxO7XbbBnJyr7BeyE9gx1yJCfGB9Sc+EEdYR/T4icS59TKsGaBG0gcbSxxIp9OqVqu27zCGo9HI4htgRqE4n0cdBhIeaqCIJwAAReVo5vnuDwEDMHGZPGIjzw6wuIweTCp7A/MJYLIPgDz/7mr++fz4+rQrxsgYI2OMjDEyxsgvAyP/V0+4uHgQtM68qROocAgCu/v2zbE9VyqVMkPlYd1iWumJOeHnMSAYExbydDpZEGYRYX5gcNLpp3ahfAYFunwHm+l55+nvMCIcl6IX73Q6ms1mms/nBnIchXOvFM22221zvpubGxUKBQVBYGzYer3W4+OjTVNnTRh4hyZ+uVxaNxm06gRL5iq02221Wi2752w2e3HMjJPCXmSzWdP4onvFyThednWsFOe6QZYAzt4UCgVjMAmMAAOGjMESTIrFommKcVxAEVDdbDbWxWgymRg7yf9K58LISqVi30V7ZECbn/E8zwpPSRYqlYoFIRwfxgMnxuETiYRarZaur6/NZr/66ivVajW1222TXPA7BDOCNnURODSs0vX1ta3tfr+3QYWuTINnoBMPjBk+VCgUTFOObVJc6zI52A9BmOeExSKRo+CXAAmT5vvneTk8PwMUSQBgZqUzY0QxMwkSa4OE5/Hx0favVqtZMbQbR9zkhgDO3rBnlUrFgjgMOqBJMshzk/whMTgej9bdaDQamSyIOhi+HxDie4lN2J0kk3W4JxDEK1ha4gb368ocSH5d38V3+Dy+g2dlv4mhgA6f6/7Hz7onD+yJaxPx9WlXjJExRsYYGWNkjJG/boz82PWLX7g4HseoMDpuGgBxWRVujsV23+L5PTbHLch1N8fVusImsLjuESBOwhEkjJj0FLzo+uP7vgHLbrczo+VzwzC0wMkmMB+Eo2lmFGBgvG1L59kMxWLRhhjye6vVyrTU6XTapBAMtHt8fJQkK/aNoqcJ9oAGYNZoNEyS8Pz5c2sXCqBxhM06rNdr9ft9k4BwH/v9XtPp1AbfEUDdt/r9/tyRqdPpGAMEi8vvuIMw2UekJ2joU6mUdRPCeVw5BYwW+0Y3Lvaz3+8b6wAAlctlY0Vd2YXv+8b0NRoNaysLu8IMFBwONgmnxaESifMMFIKcy1CtViubVL9arXR1dXXBPnqeZzUSBJfD4WDFvQA+vkXBKvtdq9VMUsGRP0wkvw+gtNtt3d7eWiCi4Nb3fSuSP51OqtVqurm5MYbI7VSE1AL5UzKZNKAAqJCiwP7CLmFr+XxetVrtQhdOm2PugfUnuPu+b3YB+8ysEfe+kFnwd/wu9gX7RAKCzIP7BRRcQCa5zGbPrZGDIDA7xD4bjcbF78Akw6oC0tiN+7sEedaDpMdNYDlx4PfcGOh+ntuJivvhO7F5WDwSuQ9jLfZOHHZjqPszfH58/fwrxsgYI2OMjDEyxsgvAyM/dn20LfzPvVytIw+y3W5tkVxJA4aI0X14NA/rQvBaLBZWPAtzkEhczgKBvWCxXZkGm+we97mBnADFWzdvxrAL+/1e5XJZk8nEjAlpgud51hUmm83q4eFBr169Ur1eN6OBQdlut8rlcqYXzmTOszaQi0jnDlTVatWKeD3PU7Va1el0npWB8xHENpuNzbRwW/ny9l6pVMxBOYpnj2AfPpQLZDIZG8QIE1soFNTr9UwrzHEtcxtg9kqlkjF6MLDtdtv2drVaGVNzPB6tQxGOxBG7q7tHZiPJZAkEDFguGDuO9LFBmAy3yDsMz52v0PKjwXbZmmq1aowsNuMeocM0J5NJG2LIfhGsJBlbCCOLnWMPMKWAFhp+EgSAAbYMdvVwOJjNkURJ50CELcP8kHDxDKwtweZwOHeQ2m63urq6Mp9mD6WnQlj8jEn2BLv1em32zWe78ynYD5hyEprdbnch/YAJJjACrolEwiQnMJzYMlIJ4gvyJbpQeZ6n4XBoMgYK2t2EF6kQzCmJoCRjMLHv7XZroFEoFDSdTi/0+Ng9+wrTDXhyny5TDRiSjLgF6qyfK69gD/k71g2QJQ6GYWi1IawpfwaQuAcAF5Bi3d37B5Dj69OvGCNjjIwxMsbIGCN//Rj5sesXoyfBHHaoUCiY07iLy9uwJJMawDzA5gEgHOFyVL9areyokw2GYeEzWUDYPhYDo3CPhQEXFpZjdwI1QXwymVig4m08iiK1223NZjPl83lNp1OVy2U1Gg153rl1J8WnMDt8TyaTuWA52u223r59a0fgjUbDdL8YcaFQUKfTMXBEw1upVNTtdlWtVq2jz36/t+NbGBnYA4Lm8XjUu3fvVCgU1Gw2FQSBGV2pVLKhfsgZEomEgcDt7a3m87kKhYIFGZyC+RcuA0BAciUF/DuMDUEVNpLAStDGgQBlAlEqlVKv17NBluwrQEPB9263s04+2AQgVqvVbDYIQSydTms+n5ts4HA46PXr1xfSHlgvJCwwpCQXQRBY16xut6tut6tvvvnG2CCYSZxakunPgyDQdrs13TjH9LBIzEbhXth//IggjQQEAEaGQK0A//7NN9+YxADW6McffzRmlCApncETXT176hZ1kzz1ej2t12u1221LWug45fu+yWh4Bhhy9PvEitFopFQqpWazqWTyXPSOH/Oc0pkVT6fTms1mGo/HFyBG4X2z2dRyubR1Zw1hto7HpzlCxCYAh+5SJMEvX75Ut9s1xhhdOckp/pxMJjWZTAz4XWkO4O5+v6QLZpNE07U7EoLD4WAJB3HQZdY/9DOSPAAL7T9+DENHQuBKOPBzaini69OuGCNjjIwxMsbIGCO/DIx0lQgfXp/lhatYLJomlMCFAeM4MDA8qPRUfJjP541RYfFwAtgA3iDdt9RUKmWyCd7ApbOswPd9OzJH70kRsOedNccc5eMIMCzc03A4NAAJwyd99XK5NG1vKpWyo2PXiNy3/263qzAMjT1Ip9PWZQZdcq1WMyaEoMPled5FEF8sFmq1Wmq1WrZeri4fdjCRSNjvZbNZKxh2n7Ner5tuGGYClsU90p/NZrq5uZEkO4KFQSNxQJcMo8L+0wEKJ+H+cAaCjauLRrbyISsMq8NnbzYbYwhLpZKq1aoxmeinF4uFNpuNBQv27HQ66ebmRt1u14CfbkvSebo97BhMM92osOnBYKBWq6VGo6HJZGLMCjIP7IxAnsvlLAngZ9lrGBVA0x1uSjBIJpOq1WqSZCwTiRX/GwSBnUZgE9gxdppMnlsrVyoVnU7nNr2SbB+QMcCYkaQBmgQa6Ym9p5D6dDpZi+blcql6va5k8lyoPJvNVCwW1Ww2lU6nrV0u9w/gTCYTSxAPh4O1rEUuwd8T/JbLpa0hDCfMFs9CEGUt9/tzlyvf983mJZld7XY7tdtttdttA810+mnQJEkHa1Iuly1xdpO64XCo4/Hcvpo14jvdRAoZBWuCVtxdbyRH/Nll3Ug4eH709O5Jhe/7lryTXBIT0awTJ0kSALqfI5mIr/96xRgZY2SMkTFGxhj5ZWDkx65f/MLF2yEBH401N+MavCtl4DgclgqjpePOfD63z8UgWCz3TZQFcI+hYTESiYSxRq78gTdzWBIYMTYG/SaFp1F07gDT75/nW/KGj6xhs9no66+/VqFQ0Gaz0fPnzxWGobExo9FIxWLRWA+e9+HhQVEU2ZTzfr9vQIMkAlaGI9jdbqfRaKTb21sDW47qWVMYLdin1Wql+Xyu+XyuMDzPFPE8T+Px2JyoXq+bHIA12u/PbTLR3O/3e2PyxuOxvdEDiKVSyRwjn88bA8TzzGYztVot05APh0OVSiULJq5GGKagUCjYcT5yiGazqdlspnK5bHYG+8aMDMAZBuRwOGg8HhubgwTk66+/VqPRsH3hM0kYJNm+8ow4LKzM27dvjV2Ooki3t7fGvqVSKb148cIkD/gMDBE2x7BF1t89xneP09kjWL4oivTixQsLLuwTrLpbB7Df7zWZTGxtaN2KbISaCIIdgC6dk8B2u63RaGSAgM8TbLj/6+trC6bPnj1TPp/XarUyXf18Prc4wfePRiNJTy20qWshMB8OBzUaDQu4h8NBz54902Kx0Js3bxSGoV68eGHd0FizKIrs90lWSAyQBO33505b7Euv1zMZEuvYaDSMJX94eFA+n7ei78fHR5NmkFQRC5HEAKbL5dK6OVFTQFCnhgbfdyVlrnyIP7sSGBJEkl5+DpaP54YtJ96eTk8dmEjY2UtiNTGUZCK+Pu2KMTLGyBgjY4x0Y0GMkb9ejPzY5f/hD3/4JViif/qnf/oDg/BwQAwYRo+AyJE2b4V06gFUOKLzfd/e/mFZ+Bl+zmUROALme9lMpBu+71tQ4ag9iiILEPw8bFSxWDQmBmaRt9jhcKjnz59LkhkqbGG1WtXDw4MymczFNHGOf6vVqrEvfMdsNrtg0HK5nOnbaV2K1jYMQytgbTQaNuAQJhNmAGaItZJkE+d3u53K5bJ9B78P61iv1+34nCnsu91O9Xrd5BhupyYKwTH4/X5vTCy1BqlUSsvlUovFwtioWq2m9Xptx9fVatW66xBUkOGwFrvdU4ceijpLpZK63a4FUZ6FlqjcH+wV63Q8HvXnP/9Zw+FQf/M3f2OSGRhInFWS3TP2kkic55EgKeE4Gq36brdTv9+3gHBzc2OOLZ2Z0NVqpWKxaKw2DEyhUNB2e543A0gCkJJsSCM2SZE5jBzFqDCCAJIrJ5rNZsYUk6ghd3LrH7AHLgq+SRKiKLIgut1uDQiR8ywWi4t7x39Zl9FopMPhYHUWSGH4/4vFQqVSSd9++622260VRrus/r/9278Zg3p9fW3SpmfPnmk6nWo6nV5Ilij4hlErFosKgsAKkaktoDXv/f291Y0cj0e7z0ajYTUbV1dX2m63VmieTCbt3gFA4gXPvtvtLtbSjY38OzYIYHOigayHOEjyTuAncZV0kViTHCFLgdUEeHihcuVOMLNIzTzP02Qy6f7hD3/4f34RcHxBV4yRMUbGGBljZIyRXwZGLpdL/eM//uP//VNY8Fkkhb7vm3Pncjlj2dwOMYA1mnWO6KQzo+a2oyTwoyWGeXMlARyd4ww4JItFkSIbwv1UKhXTk/L9p9O5YxAbxiahnS0WixYQYaJgHG5ubhRFkb3tF4tFjUYjO7an4BW9Kdr41WqlbrerUql0MQsC7SlBBV06Gmi0wvv9Xu1229icTqdj80RccHXZTQwLmQmBaL/f6/Xr1/ZZxWJRi8XCZC9BEFjbWRym0+lotVppNpup0WgYUOXzedN8E3TRvk+nU5Ou0PZ1Pp+rVqsZ6BEseO75fG7Gjo4epyEpQYfvarjRvsM8SGdpSCqVsnX67W9/q263q7/85S/667/+a+uExb5JZ0kC+uRyuXxxFL1YLHR/f2/Ovt1urW4BhjSTyWg6narf71uihN6coE2ygf/sdruLZINAAxu5XC6tIBaW6HA4qNlsKpPJWFCH+UG6Qicpl4XBz5BmkBRxbI7Pksxxn+jOAVH0+uv1WrlcTtPp1KQM1KOsVisDVGQhrDf1HRTe5/Pn+TsEXUkW/LfbrRqNht68eaPZbKZSqaRMJqN3797pdDrpq6++skR2u91qPp8bQz4ej1WpVC5aPi8WC0t4YZ5hoj3P08PDg4IgUCqVsnsjBgRBYHZOQuj7viUzm83G4pdbt8O6cJIAQ+2y9cQhkjiYefyBJAEbR67BfSQSCVWrVZP8kLAAbiTVsMWsMSwg+4Qdubr/+Pr5V4yRMUbGGBljZIyRXwZGfkwF8otfuLg5tNMc67utQwmiLByBniN6ijcrlYp6vZ5JFnjTl2RvlRwPcmSNXpMOL4vFwsABZ+U4kWPw4/Fc/MfRMCACo8jiDgYDVatVVatVzWYzLRYLJZNJM8jNZmMabjaj0Wjo4eHBptpLsuJbmEp08TCU5XJZb968MWNmrXibJ3hiLLSTPZ1OxpShz63X63bkj2SD5yX4Iq/AcDebjbXuBWhw2kQiobu7O2th2mq17BgX54ZRq9frJnfhuH+73Vphq1v8CThT6LhcLm16PckBwXEymVhQBGyRacBiUTROMsNRMGwaLAcAWSgUTCbB0ErsB8BCrsM8EuQr7DV1DtjPYDAwGcTd3Z3evHmjcrlsumCCOTUWPBMthtPptIIgME0/iRRMHMEbBqfZbCqXy2k+n5sPsfbuUftut7OCcJg+gqDLrBNkZrPZhWwIWyK4EtypLaCYHd8nGWAfWWNXd48NYPMUWWcyGeuAlUwmNRqNNB6P5fu+Xr9+rUKhoOfPn2u/32s4HNpaUQwcRZHu7u6sfiSTOQ9XXS6Xajaburu703Q6le/7NvMFWQ5d3ZCNcHIAc85cE4DIjW/L5dJqF1ztOPaP/7rSg3Q6rYeHB4t9rCt1DEhouAj+7AOAQoxkH7FNN4l3texujZCbwJJ8RVFkf8+fXcYwvj7tijEyxsgYI2OMjDHyy8BIbP+nrs/S49ct3OMYDmdhcXhLRBtMgR0sGgwGumbeHo/Hox2BptNp06fy0LzhUpRLJy2YPBaFgMPv0apWeiqQ5DsTiYS9KR+PRwVBYGwczrDdnucmfP/992q323ZsnM/n1Wq1tN1uTXLB/cCALJdLm2LOkTGSAJjCzWaj5XJ5IU2oVqv2tg5rQcHr8+fPFQSBZrOZFb/CKnqep36/b4GeI36YiyAITJKx2+00Ho/V6XR0Op2MmZlMJnp4eNDd3Z0xE91u19gc1hVGkISBfaYL0mw2Uy6XM3aMZwDwAQOCymQy0X6/1/X1tTFCy+XS2LB0Oq12u63VaqXlcmmSizAMtVgs1Ov1LDEheDJUs9frGav1L//yL/r9739vNkeyAwPU6XQsOeHoGIBEC73f7w2I3aNvl3Epl8u2J1dXV5pOp9rv95ZsAO7FYlHValXb7XmoIc9AATT1FcPh0GQjaJRhkFz2zWXNAXHuG9ACDGj3i52n0+e5N0htCEwkdLB2gIGki+RuvV6bjImgiFQjl8tZ1y98kHUlCZJkUiQKcd+/f2+yG9hpbH65XOrm5sbaRUvScDhUv9/XN998o+12azYwnU7V6XRsHfg+ng1wuL+/N7lOo9EwkCfhmE6n6na7evXq1UVQzmQyliCRJLpAe319rdVqpSAI1G635XmeJYjUwJAIH49HA18SHjqvsW6AID6w2+2Mlee0g1iLP7gadfYAcHFlaUgr4uvTrxgjY4yMMTLGyBgjf/0Y+bHrFw8+JjCzWMlk8kJzS+CHAQiCQJJMNiA9aZH5DByB4I8jcAzJ78Lu8RadTCZVqVTMcOfzuRkDbACLyvdJZ+kBAZv7hW1BA0wQ4a2dYlgYgH6/r/F4rCAIlM/ndXNzo0ajcWGg7969M7ar2WwaG8UxLvKBfr+vXq+n2WxmbIJ0losMBgMDPYIbMgaKYn/88UcbLAh7B6uEgXCkvlwujQGAMaBA9cWLF8ZOUAjc6/UUhqGm06kxTRR1wqpSBJ1Op23SOyxJOn3uulMsFm0tf/vb3+r6+trYTT4LNqPRaNgznE4nY9vS6bRarZYymYw6nY5JbRgECPPmBgjP88yuYCuTyXNXph9++ME6KQEY6JUPh4NJP2CcYWmWy+VF4SzsL1PfD4eD+v2+FouFJUc4rMtEh+G5SxM6bOmJseFny+WyAXy9XjdmERA6HA4XgYPuSd1uV+v1eYAnwYPjc36Wglh8Aq07jHihUJDvnzstAdrYPnZAcIWNIhhtNhtrHw1owabiBzBcMGa+7+v29tZ8sV6vq1Kp6E9/+pMVZ8OeJhIJK1z//vvvNZ1OjQXDvl1JHIXpr169MgDE7mAoYfNJckajkTF/JF1v377Vd999p9FoZDNH2FuYLyQosLWsNbGrVqup0Wgok8kYy4l/8jMuoNAWm8TQTRgo9iWpYD+Id+j0AR86PZHAuWy3W39xOp2s1iO+Pu2KMTLGyBgjY4yMMfLLwMj/dUkhX0TgABS4cHKO5SaTiRkNzBbHf+gyefvlrZE3fIydN2EYAXSvLCZMEgEUPTtsGgbGUbfLsrDZYXguwE0kEtbekrddupw0Gg2t12tNp1PTr6Nxh2nkz77vazweG9hNp1NjOJB8MM2eDjl08eH4nyPwzWaj4XCoIAh0dXVlx+XValWNRkPdblfPnj0zpoA3dd/37fg2DM/DCAnI/X7fEoJSqaSHhweNRiP97ne/k+d5ajQaGg6HkmRAXalUDODz+bx1mXKlBux/EATWnYdgBkBzlL9YLCygYhPop9vttk6nc0eoQqFgR/9oo9FvJ5NJs0fsC6BFKsKxMQW+t7e3+uGHHzSZTNRsNo2FQ6KDdp1ZJe6xOGvseZ4BDh2aTqeTdbpC/42dMmCQNSLQoXXGdpvNps1/4Uiez4Mxd5k4agtYXwIcYI2GmcSB9YGlg/3h91OplCqViq3vaDQyxo+aB/6XxA29NbNT0Gmjl+Z+DoeD6vW6/TuBkeDmeZ7u7+8tGD88PBizCutbLpdtryVpNBrp9evX+v3vf6/vv/9e+XzeJt6Xy2V1Oh153rlF7N3dner1uh4eHqw7F0A3Ho91Op1sqOzd3Z1qtZqdUJCMXF9fG/DSvQmAJnlE8gSbjdaf+0Djnkwm7XlpJU7CA7OG5Ivkl0RbkoEVP0syiN+TmPL9xDuSPVfqgU2GYWiJCjYaXz//ijEyxsgYI2OMjDEyxsjPIimkVSOsBZIHdJHr9VrS+QiVKfFojCXZUTcBlTdW6Uk7ztEg+mwYIelpNgObA9PGzAjmjCDrwIHQ4boSCwBqOp0aW0E3FQBsMBjYoieTSQsakkz3jpYVho3jezozLRYLc5xms2l6eoIxDggwcw84PAGVAElA5TmfPXtmg+eYtQGgSk/sDAC93++NGcjn87q/v9fxeNTXX39tR/rb7VatVsv03eVy2cCNfXXrByQZG9tqtcwmCMSdTke+71uRLWsahmc9M8ESo0fiUq/Xlclk7OgfppZghIYYfS/PDYCt12tbQ7fYslar6Y9//KP+/u//3mxpsVhYcuFq1gFUpDbYPU6635+nrtMSWdJFQuAOmUylUgbISAXcRILicBiqfr9vwfvu7k5BEFhNAnZHm17pDFwUhVYqFT1//lx//OMfzedIXE6nk+r1un0fvsJsjMPhYFInOovB5hAI8XdqOfh3l4Ei8JHUATisKTpo2Nv9fq9er2cBDn064I5cgHu9vr7WX/7yF+VyOXU6HQ2Hw/9SU8Bw1eFwqMlkolarZfHGrSMAAE6nk25vb5XL5dTv9w18kRgQCxqNhklEiE2JRELT6dR+DhaX4uLVamXSi1wup3z+PJiUxBidO7p4t3MdrC8nCpxq8KzI1gAI9oQEGwaWi3XCf0lkYf357Pj6tCvGyBgjY4yMMTLGyF8/Rn7s+ixdCjm645gQY8BZeNPnmI4uM64enMXh7ZJAOZvNJOmi0JEuL2g0eXvlYSkuRcZBsR8B+XQ6WTtOtKM4JxsOs+f7vhUKrtdrTSYTM+DD4aAgCNRsNu0ZkB9gkO7RJ8CTSJw7EOVyOZs0fjgcjClEV430gy5OzDx4fHzU4XDQjz/+qGq1qnK5LEl2vA+Dulwu9ezZM5NiYDQwXjgCQMqxNPr46+trk3DAEPJMsGnIMThGf/v2ra0fen5XCsNwPjoxsc7cPwkFhbfIBRKJ85wQ1kqSSQVwegIEzpdOp+3eAAcYHu6B4lyc1fd9/eu//qv+4R/+wdYSrTyFxhSQwuTi8LBbfCf258pR+H3WHGbFbc9MQgOzByCxdtR48LPM1chms5rP59putzY3p9vtKooimzvD+sG0BkFgNpBIJKztL0Htw2JgmC3ulQQAYGe93OTM8zyTSki6WKMwDI2hgkmCvWOfWZvr62uTW1EQ7uqqsQ8Y9T//+c/yPE+3t7dWt5HNZjUcDm220Lt378wG3PUFvIlHBOLBYKD1em3gj0+5pwGSLGF1ZSkkHHSV8n3f2Lft9jzcsVarmdSHteBnONUgaWefKKiH4aOoHz05rBvyjw/BA6bPlc+wV+wLSdD/CVDi679eMUbGGBljZIyRMUZ+GRj5seuznHAlk0/DGnFm9+9V9igxAAAgAElEQVQJGgAPjAdH4YAAhktwSCQSNpiQz8TJCH4EB0Aql8tpOBxeGAGLji4YbSsMCp/HZuEM7nE7xsWbcRRF1kHK8zxzaAp5YcyYeI6Gl3amQRBos9nom2++sWPww+GgwWCg4/FojJ4784Aghm4UFuXh4UHPnj2TdO7mNJlM9Pj4qOfPn2s+n1t7Up4BjTDMHQCaz+etyBSmAgCZTCY6HM4DIpvNpmq1mulk3cAK60lAdB0OWQ1rXSqVjMnDAbgI7KfTyVgx9LlIBAi22+3WBvbBVsACcRTNM8PeIVGgaJnhg+jIf/jhB11fX18wOW4gdbvYYJMEfSQGsHs8O/8RcCRZ5yzqB1wmJorOdQswcpVKxbpOIaEgmO92O83nc3333XdqNptmy9gue4Hd05LX7f7Dz1K4j4YfjT1Bh+fm33e7nRUEw0JtNhsLnjCR7C/7ARvrSq4oeB2Px0qn0yYbgemiPsNNAlzGiWemC9Pr168lyVhn3/fV6/W0Xq/16tUrkzytViuzM2yeNdxutwZkSKvm87k1AYAFJ3nAXtzCYHTexD8YRNaE5BbNObpxZBauhMutT0GXThyCbSNmep5n7CM2i/98CHgwx+wvvoaNk0TH16dfMUbGGBljZIyRMUb++jFyPB7/9zjwcZj4P1/ox3nDB0AwDt6AWVC0nTgBGwbTwia6BXm83fJm6hbtSbLf5a02nU5rMBjo6urKJBkYOoXBBJ7VaqV0Om3OAwidTk/tVTebzUXbS+ncWpap7vw9+mT0vYAV7J87i4PgiRMHQaDpdCoGZG635yF5dGXCibn3MAytANLVcRP06DDDsS36cd7qYWzYP/4jUAHidPGZz+eqVCoXsxd4m3fnLDSbTbMDDBG5Bwwsc0oACJgLGCiMmSNpSRcMMcfrJBJMrEdXTnICG0TAwRGRUCyXS+sOhB0mEucWv4PBQO1221gpHBapD4CAE1OEDMvCz/Eck8nE7Ipj6kwmY4Wlo9FI7XbbmBPuF7vC2YvFomazmfnKfD63Tk3YCsy3JLM1kijqJ2BkKdAmsLmAPBqNtN1u9bd/+7cX987/sjYkOdgL60JChawB20TewNq7NSqdTkeZTMbAhBjjeZ7evn1rLYRJDAiGBEKCqMu4DodDA9HFYqHJZKJ6va4gCCzZI4C3222rYSAR87xz++bZbGbdx8bjsd68eaNms6lsNmtgxZ5LstggnWfVYO8wZKyHqxvHLn3fv0i0jsej2QLx1a17wd7dhBeAZz9ckCdm41vYPn/vJtkweTD38fVpV4yRMUbGGBljZIyRXwZG8nk/dX22tvA4eiqVsuN4l6nASKMoujBaHlzSxULxxurqKGFJ2BB+l4enaxOGdDqdVKlUTOcpSZVKxTYWoyMw8gxsahiGNlTPLaDMZDLGbPH7sCsEe9grJCNojl3Dh3Hq9XoaDoeqVCoqFosaDodm5AQHgihMJFrUKIr09ddfG/MxGo0MdF22CxmE759nG7gtVAm+OO18Plen0zFm6/r62lqrckzveZ5KpZKCIDC5CY60WCx0dXVljEUul7PhkRzLw7pFUWTtXnk2gmwURfZ7rgwEoyZpwL4IuuyRJKsR2G63NlQQLTeSAO4TtrhSqahararb7erm5sbmy2CPMGySLjq38VmSLLFZrVbGdDLrAh+BTSPBmM1mNj+GtSepqdVq1iZYkt68eaNvv/1WDw8PajQaBk7Pnj2zWS1BEJitcQwPUzcYDKyomGcicYuic/cdOlEh7XH9ked36wCwdzTQtCd2NecwRa6fI2nCd+bzuYrF4oVcKZFIaDAY6OXLl+r1esb+0lb66upKm83GtN4AKDZ6PB7V7/dVrVZ1d3enMAw1Ho9Nw1+r1YzhZw1YF4qPAVAaGrD/y+XSZhE9Pj5a7IGtI15xMuD+mTiITSyXS43HY1tvlz1jvQBf/D+KnmYckXSRMPK7kgwIAAyYQTfuuacpLvuOtAdgia9Pu2KMjDEyxsgYI2OM/PVjJD/3U9dnqeHCiSkuI3hyfIc0gjdEWBQMxdVPZrPnFrFosnlo9LDoNmHF+HdJdsQoyQyWOSHuUS336/5Zki0Wi+syiLBPvOHSbQmGyffPha1o3Fkb2A6O0AG2+/t7PX/+XIfDebbIq1evtFqtzIHd+8WAYENdowIIORJvtVqaTCbabrfmlAAAXW64MC6X2WJPmJ9CoIB9oevTixcvLICjuQeMwjA0BgR25vb2VoPBwJiJUqlknYFgOfhZDJi9ZL/RDjNXASYwmUwamLLmx+PRkhlXsgGrBtN2dXWl2WxmmuN0+twF6+XLlzarBRkFoMNeSrKiTJhNwAW9MOxkuVy2YEkgIPhh1/iA53kXs2JYf4Ir3azoRkRxOZ1+OJKH7Uun06rX65pOp1Z4HUXnAmtmdOB7m81G9XrdgNdtKUuiQXBarVY2iyYMQ41GI63Xa2tRTHJJh7FE4txJCzkNfo//HQ5Ps2iQ9/j+06DRWq1mbCl+iT0hEQDQCaa9Xs9Y9n//93/X3/3d31mR+2g00mKxsOd3Ez3pnFCyDrDTs9nM/LxarSqXy6nb7Zofu4W8MJKuhASNPEywJPu++Xxu+nuYOmRRSCNcTT7rkEgkjOkjqWY93fgImLPmSCdg6vg3fo6kH5mMy1rH18+/YoyMMTLGyBgjY4z8MjCSz/qp67OccLHY7rEe7WYxKLTALB5BBwCHccpms8ZouAWdrgSDt3mCl1uEx+fg1BR6zufzi4JLmBAMD3bOPb5drVb25g6IlEolYxz4WZiK0+lkulXulw5IBHPuk03c7XZWKIpkh2DMc7Oh6MH5bveNGnlFPp9XsVi01pun08kGSFKE/SGLcjw+zR2AuZrP51qtVsYyEWyOx6Pev3+vfD6v6+trk4dwXM7gSGZK8Pfo2Nkb7oUgQNAj+LMv2Bb3D3CytwAJgZh9oOUrDscetlotK84mKLrMCDUOJEHSWfPvHk1jdwCYK91Ip9N2/A2Y7nY7tVot03ETYLkPjrBJlCaTiebzuTG+SBm4j1Tq3LY5CAK9evVK0+n0gvVbLpcmEyJwoA8naAAiBKz5fK7pdKparaZKpWLJHL+z3+8tUWQPYJWLxaIxkKxroVDQfD63bmTUalADAUC57BBSinw+f9GWOZ1Oq9frWT2D9CSFyWazBt50SUNLP5lMNJvNtFqt9PXXXxtIrVYrkyCtVislEgkbLst+I2+pVqvyPM9a/vJMo9HIGHYkD8xFATzxG5cRg012ZQnEJYCaZ5Jk4OTaMvsKGLnF3MQRN3lnn1kzki5J5mMuy+iekBC/iRsfY+/i67+/YoyMMTLGyBgjY4z89WPk//oLF2/fyBB423ODpBvkYWpgjwCH/X5/IWVwHQzDw6ldNosN5RgXFlE6a6crlYoFJ1g2l3k6nc6FrGwwmm4W7nQ6GQOD4dNBpVAoGFMDi+AyF7ytS08a2MlkYiwL7M9ut9PNzY0Oh8NFoSRv4NJT0JtOp8b64MDcF8BKF5f9fq+Hhwft93sbcEjRITIEglY6/TRjgtkl3Dfa9lKppLu7O3smjundN3w3OG63W/s7mJlUKmUzUI7Ho9kCzoLRoiv3/ac6BpgG/szz0mGIfUqn0wqCwFitq6sr7fd72w+0z7B82C/JAUHw/v5etVpNrVbL2gkjtSCAArgwHoCzG0Bce3EHHvIsu91OjUZDy+VSb9++Va1WM/3y6XQyNpQkAOkMoDWZTJTP540BYz2wM+wbP0PjjyRgtVppsVgYo0zAI2Btt1ttNhvTY2MXyWTSbKhYLFogJBFza0aQL4VhqMFgoFqtdgHC8/ncOitRMwCjRNCH+eP7kJUUi0XTjXuep3q9bkwrdQZXV1d2v8vl0mRHyEaIGWEYqtfrabfb6e7u7sIuTqeTFf9HUaTpdGp2SlzhnpEQUU+AbfA9+D72S7MAkjhAFTafOTduHMR+2XuSb+yRGADIAtDYku/7F8XDAAn+jEQtl8tdAFh8fdoVY2SMkTFGxhgZY+SvHyN5Kf2p67+v7vqZlxsAuAhugIh0ZvgymXN7zkKhoHw+b8fyHAlLsrd73mpZbAIWDuY+qKtdJ8AgE6D7EI7kvlFz3+7bKoYtPbVg5Rh5Op1qMplY1xwAhP9gj6IossFrh8PBiohhyiSZQ+52Ow2HQ5vqzUwSmEHWBcaGddlsNprP53YcCsBF0bmVK2uCBnY6nWo+n1sBJ8B7PB7tOFmSMUcMoyMowS5ks1ldXV0pn89bO2IAjg5EPDMgiRF2u12NRiNjGz3PU7VaNWbzw042MBfYEtKadDpt+nakGm73GvaYPxeLRbXbbYVhaPMdAEjWjiGdyWTSEppsNqv7+3uTSjQaDXM+N3j6vq/hcKj1eq3d7tzdS5LJIUh8YHTQ7GMraJNJnCqVimq1mtmjW2sAiwPL++bNG2O7YZer1aoFA4rRJanRaKjdbiubzRqT5TLpd3d3Oh6PVjQPI4+PIUVwWXrAdTabXQADensStkwmY+B8PB6NCeb7fd+3AlbmmZDYsSZhGNpwVdhEAJp6EQqciTsvXry4YEgZsPn4+Kjt9jwcEh8Lw/PwQoK/W9NBMkPQJYaR9MC6nk4nm9fCZ7kvKYVC4cKnAHf2CrAj9pEM0TWLzlvUIBDXiIc0IuDeSd6wPRIQWDziGL7CZ7kMH6CMX8XXp10xRsYYGWNkjJExRn4ZGPmxF67Pgp6e512wLxg8D4CTw0Yhi5hOp+ZQGD9BXno6EnXfMnlwjnldtpAJ7BS78qbKsbj7+cfj8eLo3T1ihCGAvUArG4ahHYly5A2AIV1IJBLmvBgeR7EwT+VyWanUuSCYQmCYS1i74/Fox9G8iUdRZBpzwANmhLdxtNtMlQ/D0Fi44/FoMpGrqytbq91up8lkoh9++EG+f9a4M8hxuVzq6urK2DLuZ7/fWwea7XarwWCgYrFoXaFyuZzG47ExDLAYmUzGZqlMJhPd3t5qOBzaHmDABIsPj3nZH47WK5WKsS8wPLBxrM10OtXpdDIgIUhWKhUFQWBFwjh3rVYzEPj222+NzXUZj/1+b/ptCnfZ/3a7bWsFCOL4yAAoeOW5cFLf91WpVJROP7XrpSidII0tu0lCoVDQZDJRrVYz9iiZTF50zMJOk8mkJQOwTQQWgi73iT5akgUmVwuNNvv58+fqdrvabDa6vb1VvV7XeDy2e+Ae1+u1yWqQsQC0MFY8O61++Xf8ZLc7t8GlUBtJTqlUMkkJsqXZbKbvv/9eL1++tGeF8Yui6AJMKNhm75G6YH+wwRSHd7tdW0sSL+ICXcUkGbMWRZEVPsMcMnSSOEkiVywWNRqNrF6HpJgEkZa/SNF2u50BAuAMKBDvXNmGq3GnVoC/l57qYojZxCTsL74+7YoxMsbIGCNjjIwx8tePkZy2/9T1WdrC42QsCDpiboyHIZiyABwTu2wLN+4e4bFIOB1gg7HzFs8bK2/h1WrVingJhkgt0DfjYGxMFJ3biGLUiUTCjlYBQdgNmLDFYqHD4aBqtWqMBG/lhUJBs9lMURRZe9rNZqNSqWTPVq/XrXMSXZn4DHd4HKwXTAZBLZM5TyNnzVymB0aFdrCAyXA4tGNnwKlarZpWlmNk99gfp0LXzv67DIpbbIvxYuytVsuAjwLYIAisEJd7dlkSHMj9M887Ho8teM/n84viThdc379/r1arZUkGsp31eq3j8dy1ir/fbrdqtVpmL4ClC6TYHdp85DLs52Aw0Pv371Wv143JTKVSJrtZLBbGOsKyrlYrK0xmpker1TKbdRnl4/GoarWq4XCoYrGo6XSqUqmk8XhsGm0SJ+xnPB7bwFFqNlgH7IMCX1g3GCUAkT0CWDjCB6xJ3nq9nr799lvbd2Q0MKL4C4ASReemAhTY93o93dzcGBhjY8Vi8WKOB/blsuvz+Vztdlv5fF7T6VSz2UwvX75Uq9W6YOix38lkokQioZubG51OJxvamMvlTHJBYgMgdLtddTods0XWDHAHbEl+AUkkKyS30hloGo2GFouFstmsgQJsdrVa1XK5VBAEFqtIpthjJEm73c6SBVjwD2M0+81nsRbYtMvewfxzEhFf/7MrxsgYI2OMjDEyxsgYIz9Ll0JJFkRhNwj4sF7ceBiGVtyIcfDmDQjAzLgyBH6WRYH5oNMMbJzbh3+32xm7wvfwu65GFLYNpg7HnU6nevHihWq1mg2TYxPYdI5ot9vzMEQ0uWzEfr9XvV639pzuMenxeFS5XNZisdBvfvMbTSYTHY9Hk4/QjQUQ8n3fjuLR5hMEAeJEImFdmCignE6nZvisqyQ9Pj7qq6++0u3trfb7vTFu0tP07FwuZ0P5Go2G6awZHEgRNbMrcPTFYqF6va5qtarZbKb9fq+XL1/q/v5e8/lcjUbDvgemyPd9cwjWD2YOHTgOtNlsTJ4AE7zb7czRCO606gXYGZRJkEulUhYkKR6t1+sXLCJFuejSOe4vFAo2DBCArFQq6vV6qlarqtfr6vf7dkzv/j4Fv0EQGBvDcM9CoaAffvhBmUzGAiwFrIA2cpJOp6MgCFQqlVSpVIyNwScTiYQajYaCIFCn0zGpTalUUq/XU7vdtg5jBH0YH8A1kUiY7QACMGvZbFY3NzcmiygUCnp8fLQ2sNPp1JIA9iyfzxvouYz3YrEwyQTAvFwudX19bfuHPbMudCFrNBrKZrPqdrtms/l8XpVKxQqib25u9PDwoE6nY0X6QRDo3bt3Oh7Pg1RhSf8/9t6sx67ryNYda/d932VuJpOdLFk2UAYKfijAhesf5gJ0UFXnR5xfU1UPRsEolMuyZVE2ySSTuTN33/ftfdj1Rc7No8sjHdH3wVwLECSRmXuvNWdEjFhjjoiAeaNwG3DY7XY6OztTKBRSoVCw75dkdSf4qxv0kedgu7CZ+/1eg8FAi8VCxWLRwIHvR6YEeHKqQL2CW0NAQtxqtWz/drudFamzziT1MLp8NhcMOMm7K0UjNvrXd798jPQx0sdIHyN9jPw4MNL9uXevDyIp5G2RAOkyNyw0EgoeEn0qm5rJZOxtd7PZ2ELDGHG8DLPDIgIKu93O9LIcvTJccTgcqtfr2VEqb7AUA3P86soquNf5fK7Hjx/r9vbW5AwcsXLkiaZ2OBxam1LYSYIr/w/7td/vbc1gSDabjTkkb/OAAAYJiwWQoHcNhULqdDoKh8P2nOwBUgKKJgEsggKtOGGFpKN+ng5HOBH3QneoYrGoVqsl6X6idy6Xs2BFsTBthwE89NSwjbCKBFUSikwmY8ETh4J12u125hiTycQMHSYD0EX/3u/3T/YGhqTf76tSqdgskEAgYIxUvV4/YdqkI7iMRiOz8VAoZIGLwsx6va6rqyu7D47XYapghcLh4+yPwWBwohFeLpdWyAnAknyhyW82myY3ePr0qWmLuSfY7n6/r3g8bowkTCVJFEFmOp1a0XMsdpxPQoLQ7XYNNCl4l+5bqsJEwwJnMhktl0ul02ljEAGfw+FYbE29Cv5GIfxut7Nagj/84Q/K5/MmSUDaQ7F0OBxWp9OxNYXBHo/HloC6NRLYMUzXeDxWvV63BMoFMLqIwZwS21zmlGGa3W7X4geDW1mvxWJh34/fsP7EJgI2ANVut62AGmArFosW+D3PswHEtGomEQ8Gg2bHgBrfH4vFlM1mrdMT7LZ0/1Ig3c+D4gWAv4cV9q/vf/kY6WOkj5E+RvoY+dePke+7fnDTDB7ODQ7BYNAKNzkKJ1CHw2FjAvh/Np2F5hgVVg0HQ4/OMS1Trvf7vTF0gA8ONZlMbDHR7fLWShtQmCOMNplMmoYZJol7wqBXq+Mwt0qlYvM3zs7O7Hd2u53NHBiPxydv5IBPNBo1x5xMJtbNhcJCdMJ0zIFZSCQSyuVyBi5IMyhiZBBjOp1WqVRSrVazzjlISGBhCPTz+fxkxgHMUqvVsqF4ONlgMLDgTGE3DAABhZa5nnfsTAMwFQoFhcNhY3VgTdLptIEIbNhqtbJ9Zv8IFoDHZrNRq9UyZgjJBfZHopNKpUxqQFBBGoMNw+rC8lHzwB6xr3T0ApRdKQCa+lDoWNg5n8/V7/fN2fl8dMyANPcvydr/3t3daTwem13DqqxWKzWbTev8hA+gZWbdpOPcitevXyscDpsNAN4EdI7HeaZ+v29yomAwqFwuZ7puimCr1aqxs6FQSK1WyzTky+VSjUbD/AXJRq1W08XFha0V/kFtRjabNVkDrWUrlYr6/b5isZhKpZJJYvr9vtklzJJ0ZLrPz88VDAZt6j3JKomAK1VaLBaq1Wo6Pz83BpY/B8yxO6RZyEmwi/Pzc9tTl/WE+YPJg1GmuBb5RKVS0XA41HA4tC5o1EwQBylyhlll30g2sFNkDtwrdgXDj/QBcH03ZvNCRXIi3YMLJy/+9f0uHyN9jPQx0sdIHyM/Dox0X8zevYJffPHFDwKTf/qnf/oim81KkrFxkuwmOXaDuXFvcr1e2wLx4LBdBF3+vdkcO5IQWNDBE1To2iLd99xHPy7JtNZuQOaNG2kAG0RwicViZnj8LkGBI3befmHjYEQI7gSH+Xxu8woALgwBnX4ikbBAOp/PrdiRAIFRSvddeTjOXiwWNmdiMplY4JFkQQFWjXairuaf/0e3DaAmEgkrkkSzDwMDA4HxcwQLy0ZAhtkslUqKRqN2zxgqwErSQMDHiA+HgyUfm82xfSvfHQ4f52NIx0nosJ8u0wtAcXzNvZEwjEYjZbNZK8R98eKFFXeiy3YlMtgwnZpgYIfDoXVrevXqlZ49e6blcmlH6thlsViU53kWTAFOWF32t9VqGVBzD7SDJaDAgrOHJGrorimiff3fbXRdFmo0GqlarWoymahSqZjdsi4c9efzeQtKweCx/Wk6nTY9PesBI4meu1ar2ZrBSLrs82q10nA4VDgctqL1dDqt3/3udyoUCjo7O9OrV6/ss0hwAoFjp6nb29sTxtvtqDUej9Xr9axYGPb+q6++Ur1eP+noFA6HrYU0toaPY9vuMwHwyDUSiYSxpMVi8SSRQkYSCoVsVoyr/ccX3Nk84/HY5vd0u11LfJFIAPqwwYvF4kQPD/C78dFl/tgD/o65PNJ9LZEki93SfW1KPB5Xv9+/++KLL/7XDwKOj+jyMdLHSB8jfYz0MfLjwMjZbKZf/epX/+PbsOCDvHAVi0ULWHSbYcHQE7sBlH8fDseOJEgdYCEISJJM+w2oELR5E+bNlkDBz+PgGJjLdHDUCnOHoRCcstmsNpuN8vm8/Z0ktdttYyGQdwCYaFiROMAw0ZrSDSiwkJFIxD6Hjd7v9xoOh2o0Gsrn86pUKraRlUrFZCM8ezAYtMGTOABGCgBjZJ1Ox97aASCKtzn63e126nQ6tmeSNJ1OLcgBfKw3k9nL5bLtPaDLz2PcMI2r1cqKaWHCCIzYEIGcwLDdbs2xd7udBoOBsQ/okZGKuDNLCAQcxY/HY0sEeHZYuFKpdHKsjpYcewLAeG5ssVAoGOt1dnamTqdjEhzAvVarWf0E80soQF0ulzo/Pzfds+d5NhPl9vZWpVJJyWTShggmEglVq1WzRe7J9QtAI5PJqFAo6OXLl/rP//xP65AFWIdCx+GHSIzQnMMW9vt9Y5Xc+h2SvtvbW0sMYHhJLln/SOQ4lPH6+lqTycR8kX1iHV0WH1BDfuP6ViQSUb1eN0kOEhcC7Gg0ssGW2WxWb968Ubfb1bNnzxSNRjUcDvXkyRNL1pCR8HsEd2yUuIIcBmlRs9m0pIvEmBoWJDZIs0jUCOrb7dY6Ph0OB+VyOV1fX1tMjEajajabdg9IoUjK2WNqO/B7GGq+k8SAtcEGKdrHXmBBkbSQcAAksO/hcFi9Xs9/4foel4+RPkb6GOljpORj5MeAkX/RF65//Md/NPZO0v/WDYgja46FeVt02Ztut2ubJ8mCuquNdhcnHo9bUAYoaE2JUwWDQWNHKKB0QcoFD7TfqVTKZldwb0gVAB5+B2dC8sDneJ5nQ98kWeErAb1QKFjxI8/o3iuacoIHrVgLhYIVxhLkx+OxMY4cx8KWoCUnUIdCx+JLWuFuNhszPunImrTbbXmedzJQbj6fq1QqKRI5zm7gCPpwOKjT6RhQUpyL8fLMsJWutrfdbqvX6+nzzz+3+4LRIWi7jKLLCJMcBINB1et1ffXVV1oul3r8+LGxFLCaTFb3PM8YTfbf8zwL7ASG8/Nz6+LV7XZt/9DvVyoVs3MYte12q0KhoP3+2J2J/T47O9OXX35pXZ+CwaANFwTICaoU+zKYMRgMmi4eKcloNDJ9cL/fN407vob0geJP9iASidhMizdv3iiTyZiNh0IhY9/43tFoZPIWWE/YI5eJR9KQy+VsjQA4ipzxoXcLSW9vb21NkNzw/c+fP1exWNTvf/97A/nJZKJIJKJOp3NSWxCPx3Vzc3OS+JAk8GepVEpff/21Li8vbS0uLi5Mkw7j/+TJE7VaLd3d3Smfz590X9rtdnr16pXJWWD9PM+zhIgZM/g/DHQ4HLY6jcPhYOw49rLf721WTzabtYA/n8/VarWUSqV0d3dnewHI41N8zruMHD+fy+UsQSIhg+FD1kMSi4+yV+wzLwS8JPgvXN/v8jHSx0gfI32M9DHy48DI8Xj8//nC9YObZuDABAUWnxuCeQIUYOt4S+RYEQciQNNthWAIG4IOlJ9ng3lD5chekjmqG+hxPo6GYXtYRI6keR6MwNWWLhYLe0aYKNhAScbkBYNBCxA8by6X03q9Nk0pQEz3IoI1AZSJ6J7nqdvt2nqwnrvdTqPRyIYCwlhyDMx9IFNAw45OmqC7Wh1b0sLooOvnezBKijEJ7HScwgFwFpxsNpspk8kgRbIi6UwmI0nGfsL0wprBimDkrCuFkLQBlY7B9fnz5yoUCsZ24sjYGoXGFLDG43G1Wi3tdjvVajWNRiP9+te/1s9//nM7koYhIdgTeDzPU7lc1mQy0QQC+/YAACAASURBVPX1tarVqiUbm83G9ODxeFzX19fKZrPabrcmkwDQYRuZSUGBNoEeQLm8vJTneTbTAilNPB63rl8Uw3c6HY1GI1UqFasliEajKhaL+tnPfmYMIGsJ+1mr1fTll19a4seeICvq9Xo6HA7K5/Omre90OtYSOJfLKRaLqdFoWME3Rd4ELthsQDESiVi3K2RGtVpNL1++VLVa1fX1tTKZjJ48eWIzbJAnbTYbTSYTmyVCR6bpdKrlcmkSgEgkos8//1ylUkmdTkfNZlM///nPtdlsjMWiCL5QKCgej2swGEiSKpWKSVkikWN3rWj0ONskEDgWOgNA7DfrR1IGqALO2WzW1mcwGNjpQ6vVUjQa1dOnT3V1daX5fK4f//jH+uqrr3Rzc6MnT55Y8EfSQRLsSoqQf5E40hWKGMjau9IwkjbuHQmPG8ORSRFb/eu7Xz5G+hjpY6SPkT5GfhwY+b7rg7SF5yYSiYQ5Pzpr5AiAAYbKcSi/DxAQ+Dh6RQ/KMSNH8RjNfn+cU8GbJseUvL3CsHBfAB5MHoWTkqy4FSkHBrBarUw7DfChvWamAEe/HPlS4MoRf7Va1Xa7te/zPM901e6RPc/OsbErM3A3HhYT1nI0Guny8tL0ucFg0OZVoBumKxXa2Gw2q3q9rtevXysQCJjUBMZmPB4rHo+bswP2sDmwWbB07hqn02kD+ljsODei0WgokUjo008/VSqV0osXL0xLznE0xg67udlsrK0t3YwIyLFYTOfn57q6utLt7a3pf8vlsskR1uu1MWg4Mce+aLwBnUwmY3pp/mw4HNrMj0ajocvLS9M+53I5JRIJtdtt035PJhMVi0UNh0NjtfL5vNktn80+whS7f55KpVQul9VqtfTixQu9ePFCv/jFL3R2dqbD4aD/+I//UDKZ1I9//GP95je/0S9/+Uv7HAo+m82mpCNoDIdDk/gg2yEYMlMDn53NZorH4/ZZksw2JKnb7SoeP87byefz1ukqGAzq9evXJlshoO/3e7P5brdrvgnITKdTZTIZS7pYIzpLkUhiw0h3sP+HDx9qs9no7u5OpVLJ/BpGm8RvPp+bJOv169f65JNP1Gq1rNtWq9Wy58QmkBulUillMhnlcjnd3t7a38Nov1sz0G63raAYWdNut7PEbT6fK51O2z4jxyExJI6Mx2N1u13l83lLolxmkr0hvuKrrAHfT6c4kiRJdmogyVg/EmpXFsP3eZ53UlfiX9/98jHSx0gfI32M9DHy48DI910fpC08b4cwC5FIxJg3gowbbAiGLsMC87Fer002MZvNbAjhcnkcRDebzYxJQBYBWxMKhUyKQRtR6X52CH+3WCysgJC3azYE5ozEgq4kOADH0jgCml6AEOOTZM92dnamTCajbrer8XhsDAoTzweDgTKZzAlo7HY7e3Y2HscajUbGBrKG+/3eio4pToTtw7g4BodlYG4GjkPxK2/5tVrN9Mbozek2BaASAJfLpR1nl0olK3Su1+sGjJvNRi9fvtTf//3fy/M89ft93d3dmcYYlpJWxOwLs1e2261qtZqi0agNwYzFYnr06JExfoPBQHd3d7ZeAFqlUtF2u1W73T7p4rPZbHR5ealarWZMcqlUMhurVqvq9XonkhD3qDmfz9t6hcNhm8BeLBb19u1bA6RPP/1U0lGWQpEnrY1hLJEtwPrt93tLWl68eKFIJKJPP/1Uv/jFL9Tr9dTpdPSjH/3Ikq3BYKDRaKR8Pq/ZbKabmxvV63VjLpvNpmq1morFok2Yz+fzmkwmarVayufzGo1GGo1GKpfLKhQKxiTCFqVSKQMTEgAC33q9tvWCjYMxR/qDBAf2qFqtSjoCDYlIrVaT53mmMW82myYpIt4cDgdLdmazmd68eaPFYqHPPvvMmPnVamXs1eFwLJwtl8vWNQ1Jw3K5NDshzmB/8/lcvV5PxWLRfA7pwG63swQZuQsSEJgxGGikTDDRvV7PANKVeCGRIBmtVCrWlQo/ku7rRPg8/AuWFKkZBeL8XCwWM9mXy4zDOLqadBJQTmT4Hf/6/pePkT5G+hjpY6SPkX/9GPm+64MNPuY43i1AIyhut1tzPgwGDTb6WN7yU6mUMWRortHGsoAwMq6MgYVyAydBD2DDUWHI0JpzxAjThrGs12uTL0QiEZXLZQMt2sr2ej1jEx88eHASBGezmbEN6XTa2DoCD6wOjpNIJHR2dqZGo6HxeGy6Z3T9bmF0JBI5YZqYGF4sFo2xwfCYY+AWDaJpRr6QSqWsixVMB4wFx/auNAJQ7vf7SiSOE8LZS7TU0WhUjUbDukMhgen3+8YceZ5n7UbpVjSbzfTy5UvT+qbTaWOHYV/RgbM2JCLcG0DNIMrhcGjtfmH/Pv30U93e3urq6krZbNakKeFw2KQekmx9KJrGLrF3GDOSF4II3atIWAgyjUbDuvJwxA2jGgwei8npLISMpVqtaj6fazQaKZfLWQ1BOBzW1dWVMpmMDoeDsZokRAy1TKfTJlXCttFiE1xIuEiUYJmy2awKhYKGw6H57Xq9tkJdunoBTCRp+OdgMNByudRsNlMgELACZwLo7e2t2TlyJhKldDqtFy9eaDAY6O/+7u80m83MzjudjqLRqOLxuJ49e2Z1CARpfJWAyho3Gg1LvhaLhUmCSAQAgGAwaNIgCoVDoWNTgW63a/IhJAskUMgiqCVBGhWPx60lMDGCRBeWjL3p9XoG2sQMbAUWH5aSIO8W9RJz3X/wE1fiBqPHM3NvSJdg9pBRcT/+9d0vHyN9jPQx0sdIHyM/Dox8n+z+B79w8eGwSDgUDkLw44iPI0+YLdgBivd4W4Qlcn8X6YB71MemsyCe51nBHeDkzqzgGJWFQnPMhi4WC9OrMvyMtqJ8J8GCFqA4PVpZNpMgg5abwExghDF7+/atPM/T2dmZHfHDjAQCAetOBPtGNxyYw/1+b1Pt2+32SfGyCzIYJUMeMZBoNGrzDMLhsBUoUviLZIHBcq1WS8ViUaPRSIPBQGdnZ1ZsSQA+HA7G4oRCIb148cKKpG9ubvTw4UOdn58rlUrp7du3ymQy2u2Os0EIhJHIcUZNo9FQuVw2dg+gHgwGpteXdFIIC0vZ7XY1nU6tbet6vdbZ2Zl10wqFQmo0GgYMt7e3ury8tC5Ek8nE1gZNNAnKarVSNpvVfD7XYDBQPp9XJBKx43IkMsg12Kv9fm86epwWdu3s7Eyr1cp03IFAQG/fvlW5XNbFxYWurq7MH9Bqc/QuyZg65BC9Xu+krS42gd+S/LC2pVLJbB+/6Ha7NqCRuRgu44uNAyCpVEqNRkPZbNYKy7mveDxuCR+ae1dnTZLDYMhWq2XzTJrNpoFiNHrsfIXsKJfLaTQanQx2BeCxF0B2vz/OjqFzWSh0LNSn4FaSgRQzSvb7Y5eo169fW50JjDMFzm7S47L/sMzB4LFdMGsGOMdiMfONSqWicDisXq+nfr9/MtcnFAqZTIzATvJEzKVGhr3DhmD4ARkAFJDh74i32AV/D8jz5/713S8fI32M9DHSx0gfIz8OjHyfCuSDCPI5CjwcDieD9VxNOZ2YuKHpdHoiqcCxOGaEfSOYYphuW0eX2Xq3oM0NhBwRchxOQSXFq/P5XMvl0oIw7BYaZgoZl8ulBSSMdjweKxaLqVgsajAY6Pr62tipUqlkBbQcfQN6GNR4PLYOM/v9Xu122zYXllOS6WxXq5UFh+FwaAwK8hGYp2w2a0XFdKDiODUcPk7lZnggenS+MxwOW8vVYPA41C+Xy1lgorYgGo3q0aNHphXmH+l4HMweFgoFlUolA6lOp2NFyDzv3d2d/vjHP+o3v/mNvvzySwNsZCivX782oMSG+v2+HXMPBgN1Oh0tl0tbn0ajYQwPtgfLR3B48OCBzQJ58uSJPSdH1pLMXmGgYGRhn7LZrLrdrumc3doCl1lhj93CUQIN9gDISdLd3Z2twfPnzxWNRlWpVJRIJNTpdNRqtXQ4HE504gRpjsgJyjDOv/3tb027TtJEQiHJWhgT9CVZ8A6Hw/Y7sFnIJjimz2Qy5lcAe71et/bN2MdqtTLJALZHvFgul+r3+1YUvNlsrLUvnc9CoZCy2awOh4OGw6HtCwkGdk5Sg4wLJjcYDKpUKpnEis+jyxCMF4Mms9msFouFBeX1em3zWx4+fKhisWgF2+Hw/dwb9OHMOsF+AVYK+avVqtVPADj8Hfey3+8tUSBOcRoRCoUsASVuwOzDhhJj3RcAN04Ta/b7vcVUvh97cut5/Ou7Xz5G+hjpY6SPkT5G/vVj5PteuH5wW/h//ud//gJmCLYJ+QNGwgPAAnFDGBsMWjR6bNkJ+FDsBgPEIgEgvFUTeFh0dKxuMSZHjEgGKFaEDaQ7Ed8PAwggBYNBK9bDYIvFonq9nv28e+RZqVTsvpPJpBXPcizNBhKoaIf69u1bewvn6BLWiKPOfD6vZDJpcy5wmslkYqwdAxxh3dbr4wyEXC5ngAZbxps6xZAECXTPyEu4JwovGZRI8XC/3zdGFrlLLBazvY7FYqrVaur1eioUCiYRmc1mur29tXa9AD5A0uv1jAl7+/atJpOJqtWq1RtIR30zUopSqaRgMKhXr16pWq3acEG3OLjX69l3wWLA1MxmM5VKJQUCAdNrk6zA5mI/HMFjw8wbQR7z8OFD0/26x89Ie/hu7GS9XqvVaqler1t9QrVatWQkHj/OB7m5udHjx4/NhziWR28N2+zKAHK5nBWOAmywPayL53kGbvv9UVdNITEBB3BCboRN81ndblcPHjyQJBtEuN0eZ7DwGdwXjBt+BwATwAKBgHq9nu2PJJOGUDy9XC5VLBa12WzU6XQUCoVMvkDB/n6/t2R0PB7r5ubGOo4Nh0OrNUGaFY/HjRFloKUruUGzHQgEVKlUbO1IEFhfmHsSJuJkv983xpj1pmsaEg4SSGIeiZ57weK5OnUSHb5rvV5rOp2ajXIRZ93EFaCCtUMqAYhGo1ENh0O/Lfz3uHyM9DHSx0gfI32M/Dgwcjab6R/+4R/+cm3heRskyAMiksxw5vP5CRtDJxiOCZEi8MC0rFyv19YyluM83kgJWvw/jEMgEDCjhQkBqPgcAgJs2HK5tOCCM2YyGdPLw5TlcjmNx2OlUinN53MbsohsAucCWAjYsAeAKsWSfE84fJyajuFtt1vrIIXhusxPIBBQJpMxR8GIYSX5udVqpVKppEqlYo6Ko3Hf1AOwvrPZzJwKTTaOzZ6yti6Qo2snOHIET0BCu879ZrNZXV5eWgBfLBYGkNjN69evtVqt9PnnnxtDd3FxYXMX2Lvb21s9fvzYjpTb7bYFT5ye4BOJRGw9WFMKWHO5nDkytomuF5aL+3edEkerVqtmB/l83hgemMv5fH7SOQs2GJ8gWLx580b1et2SlUKhoNvbWz148ED7/V6PHz82CY3bkpc9XK1Wpo3P5/OaTqc6Pz9XsVg0hon9dBM4/s6Vebi1DQwOdf0e26OF62Qy0eXlpTG0xAJm4rCe+CX2iG25TDr31O12rbA4nU6bFInCZfYjl8uZHAgWkRjQbDb18OFDpdNpbTYbvXnzRpVKRbvd/awW1uJwOFgyRUyIRCJWcA9zhoQJ1g0JBBIY1iYYDFqNhisNYV3c9XQ1627djxvbwuGwgQTSM0nmyyQugAPJgRu3+Xn+/W7cxjZgDmEJ/ev7XT5G+hjpY6SPkT5GfhwYye992/VB0JM3TBaem3GP+QjgBFuK9Ha7nRXcukd9kchx2rOrWXWDDCyKy4Kx4Dgli8kxH6wibB2bSWAjSBNMOCbebo+D7DiShtXBURliR8CFmQgGj8Wdq9VxDsmjR49szgbaUTTx3CubSkCSZM8DIANQ2+3WukLxdh0IBKxdrOcdC1/fvn2rBw8emA6WwIORoz+FLeTK5XInmlrWDTnCaDQy599utydT3rm3bDarQCBgz1goFMzJ4vG4sWgwVOFw2O6RvXvy5Inp5hOJhB49eqS7uzuTCwSDx4GJDx480HJ5bCs6HA4toHW7XWNTSVqY8/GuHUwmE2NxaZuLLXH0DYuLvh8ZCnrqcrlsIM46Y3d8JlILkiKCeDQa1SeffKK7uzt1u12lUin1+31lMhldX1+rXC7bsEekOlwkH/gJdR6BwFF3z94Fg0EDmnq9brUUHMWjJ+d+KJongeA7XY0zUgqkSthpKpVSNBrVYDAwzT26evzSZcyJGdFo1DqNSbJZLBStEySRQ8GeE9j5vVgspkwmo0ajYaz7xcWF1YpEIhGVSiXtdjuzZ/aWPcfG6US2WCyUSqUMQPB1koybmxur5eDEgvqNVCplQCkdmUGAOJvNWjyTdAJwJMwuW++ydqwdYMMaYgf4L/EFwHHjJ/fE87MXrmaf+O5f3+/yMdLHSB8jfYz0MfLjxsgf/MLFmy/FuhiRq3XkCDYajdoxniuZIGiwKPTUR7LAXAlXB4sju0eUaH4xBJgT/hzWiU0BKA6HY4EfzBqAxsRpwIS39VKpZAZBMCHQ4mQUPsM6FItFhcNhm9MhycCUlpq5XM6misdiMQ0GA+12O2P3YKJgVjg6Zy4ExhsOH4cF0vEmHA6r3+/r7OzM2Az3HwB+MploOp1aAIW9oO1qLpczw0NbDxsIQ1YoFKzLzXA41OPHjxWJRDQajexnKYamDfCTJ0+sePXzzz/X9fW1ksmkcrmcptOpHj58qPF4rFAoZAMMafXqeZ7q9bpKpZIFl2g0qmq1agWeFKzyHARbikM5UgeAYdOQACA7oV4BRnK73ZoOOxAIWJIBm7RcLjUYDIyxYVgofgPzDBPLdx8OB52dnen6+lqed9RIw/7M53OTZDCTo9PpmJRiNBqp2+0qFotZTcdisTA5hSSzI0Ajk8kY2AMaLjP3rm4Z4GQN8PtsNqvpdHpSnIwUYbfbWTIGW8X6IgUgEOKf+Lakk/sKh8NWKwIAwvzCMNGta7FYWCvaWq1mf05RPc8BULpMmSR7PphxAFaSyTxYN2wM5hTZBD7JAFfpvo01CSWMPl3piFWsUzh87CBH+2c3eXelMfw5iSg/y17u93v7WWKXeyqCjfDZLsuKj/vX97t8jPQx0sdIHyN9jPw4MJLP+rbrg3QpdL8ENoW3a27UNUYMheD77tEzGl7YBowMRos3dL6TwDObzYxJwXgxBhdwZrOZdXJZLBZ2xOgeQ8LA4fTxeNy60CQSCbtP2Ae6PAF4HFmzJjwjjsWGYyQ4AfeD/pk1gZVx1y+bzRqziTwCow6FQvZG/+jRIyvm5b5c5hMn4ngbYAgEjm12YfHco9X9/r5lKMAGk8ge4Hi5XM7WdLlc6uLiwpyWAB8OH7t3lUolC7DMeyFgV6tV7XY7K859/Pixut2uDofjnJbb21tby3q9bowmgAKLAhs3Go2MpYU9BvA2m43y+bw6nc4JuwH7hT1TKAlTBnBms1nTVVM8i6SiVCrZkD6Cged5Nn9mNpup3W7b581mM5u+3mw2VSwWzc5ms5kFZ7p/UZvA39O6eLe77/wFs8z6YfvJZNLYWen+6H65XFonMBds3OJwPmO3OxauU4PA/JL1em2BmCBF8oZPFAoFs0lqUmC+6JbEfpJMcY8kUZvNxrqhAbDpdFr7/d5kTtgm6+V288J/8Ed8gZbFPONisTjpEockoVKpqNlsWpKaSqWMPUOPD4tNkTvAxmexhtR/uFIaEm9siviIHfEzLhPuJkLEJOIIn8Was6b8mSSLi/71/S8fI32M9DHSx0gfIz8OjPyLvnBJMgcDSAg2sFscv7lGxAOhez0cDtZN53A46lX7/b5tEuyZ+zvuRk+nUzu25IFhwlzWC5aEt2V05fwZb+7MgOBNmvkOrp4bcEMXT0AE8PgZlwEolUq6vb215CWRSKhQKJhGmzdsjindDeVtH8Mh2PJ7sVjMGCscEkODvXElJRScxmIxCwoAYDQatTXgGPjdAmQCAJptjtI5gub7AExAPRwOG/NSKBQ0n89Nvw4QYAvIJJj9IUnZbFatVsskKmi8AfbtdmvOw7OwZgRCfg8dOjYVDAYt8GQyGY3HY9Oxw6RQcI7zcSQdCh27+DQaDR0O961MAVwGKbIn7O1+v7cBk+Vy2Zwfv+K+YC7b7bZ1YkKGNBwOFYkcuz3Bfnc6HSWTSXU6HQ2HQysEBjAImuxrMBg0n2K9x+PxSfBC7sAzuBIatz3scDhUOp22zkSe55nch8Jb/HqxWBj7jyyAYIp0CUaW1raTycQA0rWLRCKhZrNpAMxaUHwMA41kBBCnuxbxhvqBYDBorZ2TyaTJNfA3l2WMRI7thEkCSAaxEfacxBdgicfj1vlrt9vZf2OrxA4SWz7H/fP1em1xydWhS/fzQqjBIjbzZ4FAwDpDkRiwptg934st+Nf3u3yM9DHSx0gfI32M/OvHyPe9cH0QfQhO5n7Ruw/Eg8PMSDpxFI7n+DlXowxjRsCnUJe3UTrvwKLAirHgyAjePYKdz+fWmYR/cx/uM8DAdLtdTSYTjcfjk4JenB8QZSAerMR0OtXr169Ng8q8AyQEGANHuGy4qxElWLE26IlHo5Hp3zl25r7QvgJsvJmzLslkUtls1pgtlzVdLBbmXLPZTOv1WuPx2D5TklKplHWDYs1geHAQiqPRYcN4hMNhNZtNW7PxeGyAFI1Gjd2CFYMR9jxPhULBmMtisah0Oq1CoWDthRmoR9Eo9oWdwMIA9jBhMDfj8dgCjOd5VsTM72O/2AtMLrKMwWCgdruteDxuXalWq5WxQ/P53PTn7lG7JCuMrtfrNr9Ckq1BpVLRer1Wu9024KM9L34IqwXoP3/+3ArfCXqu3d/d3anX62k4HJoPdbtd27f1em2gOBgMNBgMNJlMrM0vSUg0GtXt7a3C4bAGg4FevXqlw+FggwyRqJRKJQNs7v9wOFhb6dlsZkNC+V4Y0Gg0qmazqbdv3+rVq1dW5+EmD7SjHg6HyufzOjs7Mzvm1ACfpqg/k8mYj8B6Afx8Bzr4fD6vfD5vXbrYQ2obAD9YYDTqJG+73c5aI0ejUZXLZR0OB3W7XWu7jr279sWeujUDsIr4DYBCnDgcDuZP77LZ7CtJJ8mh2+mOuE489l+4/u8uHyN9jPQx0sdIHyP/+jHyL3rCxRsmgRqnIxjzxs8N8cbPGygPwX+/q1clEAD2y+XSNp6j7Wg0akwfwRhHx8iQITBTBMaJYM4RMWCEgcDIUTiKAaCFdov1XM0nrAMbzZC2dDptXWxwbJ4/nU6bRpz1IqC6R50c249GI9MZ45DNZtMmvtPqdLPZmOY9mUxqOp1akWkweGwzCkAGg8cWoHSxicVi5vh8P+DstiJ+t2AYJyAAwKy+ffvWpC8EONZkOp0aS8D3hcNhY3dZSyaSA5zuMS5MW7lcNrYIBpF7ctmLzeZ+/gLdrpiTw33gyIA7rXZJNkh2YGxgvRqNhrFnFMvu93sbxsl30TIVG4Q5CYfDKhQKarfb2mw2Oj8/P0m8kGVwzI40ab/fazQaqVwuq9/vKxQK2RwZwLbZbFobYXeNUqmUBbVyuWz2ie1TvOt5nrG+u93O5mj0+337vdvbW7VaLT179swAYr1eK5PJKJ/PGyjTHQ0/3O/3Zrcwii4zSltbEgZXysB9dTodBYPHbm/4PH4WjUbV6/Us4OdyOUtssHGYUVrfEteoWcE2iBWuFIQGAel0+n+Tl+CvnERQmN7r9VSr1SyZI2ZNJhNjPLEv9gzJC4wj+0TSTSwKhUKW1L0rC+TesRuXpWZ/2BPivX99v8vHSB8jfYz0MdLHyI8DI993vfeEy/O8/8fzvPz7fma/P3YkYcFoV4mDuw53OBwsKKNlhfUgGPCG6X7m4XCwVq58Jp+PNCEQOOqZCW4AEzpkV8bAIsPk8LYP4+YenWMgbjEpYMKbOsC42WyMIYhEjlPu0fym02m1Wi0Dxn6/b2DBIL/ZbKbZbKZkMmnGNpvNjPUiONMJhsFxu91O7XZbjUZD3W5X3W5X19fXpp3lszudzomWHJkHhY0EsWq1qnK5bC1VeWuvVCoWWAki7Bfg1m631W63bTjgbncseGaIZ6VSseLafD6v29tbYw32+73u7u6siJQOXBStkmRst1trIYyjE6Slez0/LBaFswAdawmwtFottVotTSYT6w602x3npnBsDdN0eXlp0g5+f7lc2hH54XBQuVy2YudkMmmsKNKaSqVimnFAMp1Oq1arWceq/X6vi4sLPX78WOfn56pUKicBjY5E+/3e2FJJFvSLxaIViVOkzZ6TgAEuaMIB6O12azUYfCY2AIvbarV0fX190vJ1vV7r/PzcvrNcLuvt27eaTqfWSrjf76vX66nVakmSDV+NRqMmw8jn81agjy/CTr169UqhUMja8bqDTQeDgTGEJILITtrttrFt2C9MNs/AaQJ1G7CfJDgEd9YDWwfw9vu96vW6AoGAxuOxtb+FdXaLiWHQMpmMdrtjpypYaUnWfcpN1JF8uXGPpI54QILrgg72hw2yPtj5crm0RGu9XpvPSzpJellz/7q/fIz0MdLHSB8jfYz0MRJ/et/13hOuw+Hwr+/9bd33tKc7EW+VkuwYjrdAbp5jRBg1Jkxzwcpw/AnjR/BmkTzPM+Pg7wEtz/OMwXK147ydwhQBIgRTmBWOHl0JBYWJGBKBWtKJVnU+n5vBrdfHOQClUklXV1cqFovW2ejhw4cnOlyONdfrtc2noJ0sbMB0Oj2ZU4DGdLM5Dn4EiDzPM20+WnxYA/TwTFhfr9em9eW/WXdmUpAocNQMaI/HY9tvz/Ms0BJsN5v7YmX2hjUtl8u6ublRr9ezugHYXJzBddR0Oq1er6flcqmbmxvF43E9evTI5qDAwlEUCjjCPhBoXT0vbA8A7TIzsVjM/h7bpRaA74JthkHGucfjsQXup0+fmrNSGNtoNMzmG42GcrmcST/S6bSxTcFg0MBwtVpZcTXf5xZWkwyR+Nbt/QAAIABJREFUmJG0cXyOPGC325m+GzshyWLfh8OhBoOBfbe7JoVCQev12n6mWCyqUqloMBjo4uLC/JUC3bu7O11cXNhwyGj02Gr4D3/4g4rFop49e6ZMJmPPg7/SySkQCNhsH4r48RcAA501oAN4zudzPXz40Nb57du3pjGn1oD4BaMXi8VMakNChQQiEolYu1zWAECkaQBsPnbOiQBJz3K5NB+NRCJ2mgDzhv/g2yTHxAr8ii5Pu93OGE4SAvdEYLVamd279SvENUAKlo9rOp3aOhLfkDH51/HyMdLHSB8jfYz0MdLHSOKbG8vfvT7I4GOOot2gS4Bx3zwxFAIEjup5nh1DslmBQMBmSEQiEXvr5aEwfCQK7uLBRrHI5XLZdJ9ckUjEgls4fD+53ZVLzGYzA6H1em3tQzF0jpQTiYTp1jmWhllhM0ejkarVqiTZ/XBE/uTJE2NS3La+n3zyiW00z00w8zzPtKVID2j7SvDs9XpmJKvVyroT4Tx8P6wOnZkw3Gw2a0fUaL7L5bKk46R09o8AB5uITSARKRQKBsTpdNqOdefzuer1ujqdjh0tsxeS7PnYA9qHRiIRffrpp6b7pV0wrAOSGfZ5OByazMDV4NNVBsY0l8vZPBKY4lQqZVKO1WplhawkH8yFIKEBzN3vm81m+uSTT3R1daVyuWxH4DgqzMh6vdbd3Z3JXQaDgVKplMlXMpmMIpGILi8vzdaxWWQjnudpsVjo9evXFtgBi+l0qnw+r0KhoOFwaDbktktGHuPWMsBsAX749KNHjzSZTE5kFbFYTN1uV9VqVZ7n6W//9m/V6XS02Wws6FIoXqlU1Ov1jGEjMJPI4W8kIjDnsLqe55l/EXN2u2PtSTabtUL0ly9f6uzszFrfArB0PXJlFjQi2O/31goYVhGb4J7x5+FwaEkgiRDACyiRrEwmE4tZ2+1W7XbbupIdDgdNp1Nra0stSygUOplrA1MJk86+UQwP402xNWtDYs+zoafnM/lvmDvWmPhOgudf3+/yMdLHSB8jfYz0MfLjwEhe5r/t+iBdCmG86GSEBtI9UoaBgvFwdZC8pWKwqVTK2Jh3u4FwJI2MgqI2mDc0rbCJ2WzW2DTkB6FQyIyQY3OYHBwIFgpAgt3liFKSKpWKSSFgRXhOCm35u36/b8PjqtWqvv76azuaXK2O7UEbjYZ6vZ4F3FwuZzINtOtuUPW8YxEr8zc8z7Mj2OVyaVph5BLIOSiopggUMHADL8fJlUrFGLh2u61IJKJ6vS7P82yoIFpavpvgEIvFlEqlbJDdb3/7W3322WfGlsFAZrNZ3d3d6cmTJyfdlNhz9t/zPBWLRSsG51gYYIrH41ZsigxkuVxa8MYmAZrpdKp2u62bmxv98pe/tJklsMAwrwRpjpphZUhwYJDRES+XS5vUHgqFNB6Pbd1JBGgjTNAIh8PGCj1//lw3Nzf66U9/qkKhoPF4bHY+n89VLBZtvdGwU5iODIJ1xa7H47EKhYLOz8/V6XTU6/VUqVQkyeQXgDOASHtcGB8X3JPJpMrlsqLRqB4+fKh2u610Oq1IJGKSgn6/r2w2q1wup1arZZ22JpOJsZJIM66urtRut5XL5czX6NRENyV09t1u1zTpweCxMxXzN6LRqA1nZLjkmzdvrNXw73//e+33exWLRQNpbAS9PgkVMSiVShmweZ6nVqul7XarZ8+e2doCpJvNRhcXF+r1emYnMJ+cFAB4sVjMZg553rHrW6fTsecCKCKRiNU/UNPgAkSxWDSfo6kAzBsnFdg/LaSl++J9mMx3E3XAAw27y+z51/e7fIz0MdLHSB8jfYz8uDEy+MUXX/wgIPmf//N/fpHNZk2zzc3glAR8juhg76R7LTrHnAQQgg+BZbPZWICloA8A4Ahcui/oRrOMMS2XSws2LgPBMTfFjHQNApBKpZJ6vZ7pVgmykuz7OQJ+/fq1gR3H6zCBg8FAkuwNP5lM6urqyob1MWjx+vpauVxO2+3WWJV8Pn/CMMLGuVIHgux2uzUmiEJU960cXTqBo9Pp2GRygJLCYOnYOjUYDOrm5kZPnjwxXThH9AyhWyyOA/VCoZAqlYoxXXzXYDBQJpPR2dmZBdJYLGYFl/v93oqgAXlJZrzL5dLWDoYFEIapA7DRepMEkMDQfhVQIFjChsEuxuNxY6BGo5FSqZQNfzwcDtYVSJK1QgWEYEvoWEViEI1Gja3Z7/fK5/MnxZp0z5pOp5pOp6Ztv7q60uPHj/Xo0SM71icoDIdDk73k83lrw5vP57Xb3Q/i5F7xC2Zf3N3dqVKpKBwO2xo+ePDAkjWSQORMMI0Ex1Qqpclkojdv3phEZTab2fDSly9fGqDT1hVmut/vazgcKhaLqVKpKBAIWHEzzB12AEA/ffpUv/vd77TZbFStVtVoNCTJWDfuebvdajKZWE0DcafdbpukQpIxXwRufJ8YwfwcOrkRq7CdWOzYXppWwjxjNptVu93WdDo1aUQ6nbZ7GY/HBtjhcNhkF0iKZrOZFeuToIRCx2Jo2Dm3W1ogENBgMLDYg5Zcui9UR7KRTCYtQQDQiKHIuwCWdyUVyNOCwaB6vd7dF1988b9+EHB8RJePkT5G+hjpY6SPkR8HRo7HY/3qV7/6H9+GBT+4LfzhcDh50yVgccRJUZurBWZT3SC52+3sv3FOz/NM84kRcJROYIVRkGRAxlGiexQIA8S94RCAWzKZVLVaNbaJAEfwpJCUxQZM9vu9bm9v7TNgcjzPM/mCe1xK8ShOt1qtVKvVrEA2Eomc/D2a1FAoZB1vRqORHdFyX51OR7e3t3r+/LlevXplhsYRLgAOG4oEgOJAGLFCoaBCoWBv/ePxWE+fPtV2u7WWsjjBdDrVcDi0gIWDhcNhc0aKKBuNhklpYOdgIQmo3W7XNMqhUEjFYtF0+WiKh8OhDahcLBYaj8fmkLC4SGP4O+yTI2c3eSSQZjIZk8nwd0+ePJHnedaeFgYDVhd7x14Gg4Fms+PAvv1+byBxcXGhm5sbffPNN5rNZnrx4oWi0ajOzs4Ui8WsjSxgmEwm9cknn+ji4kL/+q//qkajYQW+1EdQnEkg3Ww2VpCNzIfiXoIHXYkAzdvbW/sst7gdvyYI7nY7A1GSH5KHhw8f6u7u7iTZ+vrrrw1cZ7OZEomEnj9/rs1mo1arZb7BfWy3W5XLZbNF/JlEo1qtmm38/ve/N4aZpBVWHr+DMUS/PZlMVCgULMGJxWJmI7RlJumDzefkYLfbaTAYqNlsqt/vmx3CnkYiEWWzWdVqNfM3WGWYTuyckwn+Qct+OBzU7/dt/fh54qckDQYDq1XZ7XYGaDCK7BuF6fgUrGK5XLaTFex/tzvODeJz+c535W7EXpex9q/vfvkY6WOkj5E+RvoY+XFg5F+0hku67/rC0ZsLHrAWvIWiY+emeWPE+Hlr5I0UxoUjVj4HDTXfEQgcOxPxlprL5RQOh3V3d2eyCY6QYTVIIJB1sHEUPHK8S/A7HA7K5/NWaIiDAqiwRbB2vBlXq1XT43Kcmc/nFQ6HrasRhYcEs/F4bEPp0OhzpMrGw4xkMhmVSiUtl0szoO12q1qtZsaPXAQ9NvuD/pfgN51OjQGA+Wu321ZAOhgM9PjxYws4tK0NhULGkvI58/ncjr73+2Ono4cPH9qxbCwWs4F/FCuu12tls1kLBDwPXbQAF/Syd3d32mw21jY0Go2arCOfz2u1WhnYwKoC6gSe8/NzjcdjPX/+/IRZ7nQ6JvtgOCDMCQGaGRuwM6lUSg8ePDAdNfrgUqlkgbXZbOrf/u3f9LOf/Uyed1+QiywGEOYzqdPALqfTqcLhsM1lga0ZDAamPw8Ggyc+lUwmrX4gkUjoRz/6kRWJwzh2Oh3tdjvV63VLxlirRCJhQR4GKBqNajAYKBQK6de//rUeP35sdRrVatXW2/OOc2Emk4kxprBz8/lc19fXxtjhj9lsVtfX16avv7q6Ur1et9oQai6kI5MZDoftmYkDaPsTiYTq9bpev36tfD6vwWCgxWJhDCTJoXSUjvz4xz9WoVBQv9+3tQbIYfnv7u6MKd9sNiZ7qFarVt9AB6bdbqdms2mBnXWB5aaexk2uOSUgYSUGEof4N/U9xA7iB7EM/yTJgyXH5wFSt+sSSdJ0OrXkl5/hvvzr+10+RvoY6WOkj5E+Rv71YyQ/823XB5nDRcDgWC4Wi9n0eY7aCNiwJxgDUgekC5IMgHAA9OQc6fEPQRwACYeP8yiYq7BcLm1DeTPt9Xr21g+bRGHgcrm04lWOs91j2P1+f1L0zNs7x4swIBTuuV1t3j2WvLi4MFC7uLjQv//7v2u1WllBM0MsAZD1+n7OCDr1/X5vxdOSVK/X1Wq1rM0qb/XB4Gm3JQb2cdQeCAT09u3bk6nn6OwpcsW4nz59am1zKax1nQB7gNWAOUPWMZ1OVa/X1e/3NRgM5HmezTZJJBJqtVrGMMEARaNRG2IJ24IcIpfLqd1uK5lMajKZmGyCpIUagsFgoNXq2Po0lUrZXI9er6d2u20M3WeffaZgMGiT5T3Ps1ku6PhXq9XJ2tKulO+BxYOJwdmDwaAajYZ+8pOf6O7uzoIhGm2m25MU7Pd7PXjwQMlk0iauS7Li9P3+fp7PZrNRrVZTNBq1QIJWHFa5XC4rm82aXv5wOBj7gzaeI31kF/gHjDs2jN8lEgn1+301m01J0ueff66f/OQnljBkMhl99dVXKpVKNtTwm2++UTgc1qNHjwx0uCcCO3pxWH008rPZTN98843K5bIxmAAmwI8fEleKxaIVx4/HY9VqNdP8c4qw2WxMYjIcDhWNRq0AmORlPB6bnzPUdTKZ2IkCa0XdxrtJNWxqLpfT3d2dFd0DaPiNm3ijLSfmsO6w8nRucllp4iCSGJJ94ibSNr6HWg9+hueGgST2EO/86/tdPkb6GOljpI+RPkZ+HBj5vusHSwo5WvY8z47i3bc8dOD8DBvH2yZ6cz5H0okRu5vHkSALiWGj5YSlIcjBAFGkOZ/PrcCT7xkOh+p0OtYJRZKxJ5vNxtqCZrNZ1et123CCOUwBz7NarexeYPgwhMlkok6no/1+r0qlYg4wmUxMmwvrhVPzxg/T1uv1jCU8Pz+3CfZoqWHheINnQCGdX5BfcIyLhKXRaJiBX11dWYLAETLyCSQKjUbDWBCSCFisfD5vx7MEYuk4p0WSbm5uzHhxBIALRpSBjBRFo89tNBo6HA5mYxw7JxIJnZ+fG5NYKBTkeZ46nY4xp2dnZ2Zr7DWdp3K5nGq1mkKhkL7++msFg0Fls1mdnZ3ps88+U71eN6kKe5pIJGwuyH5/310H3TyF1KvVSs1m0wD4yy+/VCKRUC6XUy6XUzKZVKvV0ng8tn1BonN3d2f6aBIjCmlhv7D5/X5vTBzMUDx+HEBZKpVMbhMKhUyXXalUtF6v9fz5cxUKBZs4T41Du90+Bor/DlaAvCRruxsMBnV5eal2u61/+Zd/sQn3HOfX63X7bqQciURCpVLJfODx48f6m7/5G6XTadXrdXv2aDSq29tbC/4wW26coeifpI6EjTav7XZbf/7zn3U4HGyPaIMLe43cI5PJaDqdWtcotOckidQukHym02kr/id5giWPRqPGtrvyCmITe5DL5UyCQhxBakO8m06nJ3NHJFkyACCgq280GhoOh8YaYvPSfftn7hOgIw5QxM9LABKi97F2/vX+y8dIHyN9jPQx0sdIHyM/2AlXMBg0J4L1cIvveIt1tcncKIvBQ2G4bvEuoEJg4884ygSYDofjXIxOp2PBEqaKAMCCUizKUbw7eHG5XGo2m+nBgwfGGEmy9rY4/Wg0MvaAdeA50Mi6bV5dfTaFrIFAwAoFw+GwyuWyyUGm0+lJEd9yudRgMLDPCwQCmkwmSqfTWi6X1sUI1qLValnBIUxbLBYz9ovjXrdAlb3B+GDw0EDjrNzXfr8/GWrYbDY1HA6PBvbfgX2z2VgQcOsY+v2+ra/neapWq3aED2u63W7VbDZNtjAYDJTNZs1h0DnTqQeZAAH9j3/8o6bTqeLx40BFnItjY4Kly/IiX+G4mmBCUEYeQ2K02WxMUuAWkx4OB3W7XWuN3Ol0DFgKhYJSqZQxne12W51Ox7obkTjc3NyY3VFbQeEzviDJpDEEM9gsSZrNZrq7uzMQ2+12evPmjdknrHUymbQaAHTgJE8AGAwR94OfXl5eqtvtajqdKho9DmlcrY7tnj3Ps3a+l5eXWq1Wmk6nxn4vl0uzn1AoZH5HYwDXJvP5vMUQCvb5ffw+lUpZkH/79q22262q1ar+8Ic/aLPZqFgsnszb2O/3J4nmZDLR+fm5FQTPZjNjRymC3+12KpVK9juw++1222LTfr+3mgsKywE0/JAkajQaWR0A3aQoFEerjnQKho3ECt0980roNMf6kVRvt8chtzw7iR41KzDSyL6IbawDP+9f3/3yMdLHSB8jfYyUfIz8GDDyfdcHqeHiS3BQ3ghZJAyc4BIIBEwOsFqtjFHY7Xb2MOFw2Ipv+W/3LRtAcHXlHLPzpo+js4iuwy2XS1sggkcoFLKZAKHQsZsQDkFgYvFxLrT2gCBvwEgMUqmUMQ2BQEC1Ws2YhlQqpXw+r2Qyqd1uZ//meZF7cPQsySQls9nMDIfp4Mgn0Oh2u10r1sSxKHLkODkUCtkxPEHx8vJSh8OxSBEGUJKxVgAH7AU1CbANBM1o9Dg/g9/nKJsOM7TxJUgRVFxmFmYOGU4+n9ft7a1Go5Gi0ai1a+VYHk0++4tdwPBSd5DNZq3D18XFhUajkUlXarWa3rx5Y8EKFgZGhWNvAhhBz2WSO52OdfDC/nq9nsLhsD799FMLghzPJxIJPX361PYWrXMwGFS/31er1TL2mIJU5r7Qkpk14sh7s9kYaxaJRFSr1SwRoXNQMpm0RKHVahkDBYv89OlTA0UKskm8+v2+fT5FvaVSSf/1X/9l9jEajRSJRDSbzZTL5YytC4WOhfv1et0+CyChwL5arSoej2uxWGgwGFjw5DsLhYLy+bzVtzArhiRnu90aq1wqlYz9kmRsG8Ga76aTEknafn+c64OPE3uQAFFITMJGPKC1LHU7yC+4LxgxZg1xWgFDCfOPf5GQcxLiyhiQs5Agk3CQkGazWVs/Bt2iu8duKIJ2mUFiO/UYoVDIfMq/vt/lY6SPkT5G+hjpY+RfP0by9992fZATLt4q2SSYEP4M5g59JIwJgYO3WAoPcWICExvE273LknEsytC7w+FgWmTYCIIkcgRYQ1eu4Wp9ATAK4tgsdO/ICNiwRCJhszzQWS8WC2sJu9/vTWcNi4YGnTajmUxG2+1xJol0f0TKPaLzhiljMB+OeXl5qdFoZEzeer1Ws9m0YIxWni40sEEEcoIUmmIYJgwZoz8cjp1pYKMCgePwTQAUx6BVLAAN+wGgjUYjnZ2dmePDlNEJCcmK286T+6BYEeaLdqaunIHfoWMRjMd2u1W9XreCW4JmJBJRsVjUZrPRgwcP1Gw2jYWRZCxMoVCwI20CFfdBAML22JtQKKTXr1+btOLt27cnbCmJ1bssIFKLRCJhE+Q3m41evXqlUql0wnZjM9igK7NxWZhKpWIBGOkLzwcTtFgs1G63VavVTGLAz2+3x05c2CU+2u/31W639fjxY52fnyudTms0Gllig89yP8QH7jkajVpxNbNFrq6uVKlUjM3r9/uWZPR6PR0OB9VqNUlSrVazfYnH4xoMBjaPJJFIWIcvpAkwkm/evFE2m1WxWFS/3zdW2WVF8WnABYCEwQfgAoGAhsOh1U8gecE20NMDfvF43GIOCRMsNv/gT8RRZD40X8AfYeJisePcEp4nHA4rHL6fjUMtD0lRPB43aRUxFvmTW4fBiQzf71/f/fIx0sdIHyN9jPQx8uPASPzh264PUsNFIOItj0DNYrrAggPwdzAN7s+9+2D8g76cQEMHGd6+g8GgsW3odnFuSabl5l54A+cIkns7HI6Fh7ztMynd8zzl83nlcjl7FhgqW9DAsei42+2atANd6na71VdffaVWq2XH+64Ug6JRGAKOL5fL49C529tbAxACZiAQ0NXVlX338+fPlU6nbcYBXW24FxfYmVjuHlW7E8rRESONuLy8NEeHoSBAbLdbk3yw7hwLS1I6nVa73T5xTNepAFGcEGYVJgKpBjM9CoWC3rx5o1AoZAEwl8sZe0H7YHevJVnNAsfFTFePRCLGFL148ULPnj3T5eWlyVAAdhIDEhfsHemEW4hMUJrP5yqVSpJk0+sPh2M3MJcVxG8AEzplrddrVatV2/disWjsMfIkEiAYILcWgVoJmF/+PhQKGfMLw0lCw6yYVqtla47siOQmmUzaUTqsNf8dDoctIQyHwycFxiSV4/FY/X7fBnW6x/ee55mNuENCAaBAIGDrAXtIEotP0m6ZTlyu3hvJE/YM8HFPweCxhXWpVDIwdBNiQPxd9gumj//nZ4g13Cf7iu8TC5H+cILBfCJkL9S+AFIk5q5Mq1gsmnSDgn8SaNaLpB4fc++JuILvAWqbzcZiqX9998vHSB8jfYz0MdLHyI8DI3kJ/bbrg7xwsYnIHfhC/g1A4IiwbjgVhs1/EwgJUgROWB6OIWElmLSNk7iTqQOBgKrVqg1LpK8/hlSpVJTJZOR5ni2W5913ZsKo0JYHg0F1Oh3FYsehcblcTtls1qQNSAxgzdhwt/CQ2R3o1jkKpcvOfD43Zgd2jTVhiCSsByB8d3enTCZjPzscDm29cKTNZmN/B1DCCjI0sNfrabc7dpshSCcSCUmyOSHFYlGRSMQKU3Fed4/cAlAYJZhSjoRhFJFuUDuw2+3MoWjDyjPA3nH8fHNzo1KppEajYYCfzWaVTqdtXQlUrEW325XneRakisWirQM2eXNzo0qlosvLS+uuROAZDocmqSFpcgMLGuT9fm8ynFKpZGtar9dVKpWMTUTzHA6HLahjj/F43KQAFIgDQHw3687zI/3i+VzGm5+DmSHgMN2e7+YfEjbpXurEvnIc3+l07OdXq5XK5bL2+71JdmD9VquVsYXITAhqDNmU7ot0SeI2m43+9Kc/qdPp2Pq60ixXhgMzDlgRGNGH48f4OpKM8XhsQZfGBujVSQj57mQyacXtbn0GySxtsd3ACxDCMBLsAQKSOmQ7ADKgSYIFM4utuuDssrlIOJBqEWPd1tokuLCAgCX3x37znNiAf32/y8dIHyN9jPQx0sfIjwMj/3+RFPIgrhxCuj+G5c/ct0dXC8nDusGGQOvq3/kMlzU8HA6aTqc2ZwMJw36/V6FQ0MXFhTqdjrXP5egyFDoWusLMsHmwHGwe+txisajD4dgCk85MkkxLyr2xaQASQQZZRyAQULvdVrVatful4NTt2gSDQytOjt5xaI6GF4uF/vjHPyoej+v8/Fy9Xs82P5FI2JHocDg8cRruHdYBdgOGDR09OuvZbKYnT55YhxZYD+6XI19AtNVqGXNBkMEmCGiwcoAkzkNAmM/narVaSiQSNv8EGUY6nTb9PZIb7A3nJ3kg8MHg4lTMTqEAGUZuPp/r5cuXikaj1uFnt9vZ2kQiEfvORCJxsnYwhSRIPMdoNLJZI7RhJRjBZKZSKfOB6XRqzBN7jS1lMhlLypArwXYD8tgt6xYOhw1EeIb1em3tfdk/ZsO4DBQSFxgm/BE5QiAQsGcB9BOJhM7Ozkw3DqiTSOAzm82xCBu9PW17KQTGRtwj/0wmY/ZJgTfafNrRAiQwo0gPeE46XsFsbTb3HddIVjgZQG5FLQbsFiCfTqdt8Kekk+fj80kQWCfiWjAYtCJyfAn2lXoaTi+ILyRoxE6e0fOO84nm8/kJY8fv8TvIYN79MzdOE1vR6/Ps/vX9Lh8jfYz0MdLHSB8jPw6MfN/1QehKV+YASMDWvHsRSDBSNka6n/Au6cQYCATu2zBv7+738sb+8OFDO2pMp9OKxWJ2RL/b7UwDPRwONRgMtN0eiwSXy6UtKMaDpjSRSJzM2iBIwHBhONFo1IxuuVwqn89Lup+DMJ/PlcvlTONeLpfNWfhMVzMKCFH06epmAUOOOa+vr/XJJ59oOp1ay1lX24o0wz3uxpnS6bQdMcOcMLsDVo0BiwAPTkEwJKiNRiNrubrb3Q/O3O126na7pu0laXD3HDCB5WWdCJ4wR8hhkBGQSMCIMvsBO8tkMsZ8wLTAkri2w97CLHa7XasHwMlJCGBR6LYEiyTJAhfafKQnsGoEYIJHLBbTcDi0+TCdTkez2Uy9Xk/5fF6NRsNYHoIlXaWwfwJdIpEwDTnF19j0dru1GRqBQMAAwk1oeBYSFrr7AHBo6Hn+s7MzYyvX67W1ZGa+CYkHyQNF6YAdyQSsIywYvh2Px3V5eWn7yXBL9PzIo9DyI6EJBI6FwJ7nWfE3CWs4HD6Rb7APAJhbvwDjh1QDhg/7Xa1WNisE1o/kgIu1J/AjbUG2gx0hi5hMJjo7OzNQ4n7xEf7BB5EFSbK1o1Cd5J1khEJ04i8+wn64QAf4Ay7fFtP96/98+RjpY6SPkT5G+hj514+R77s+yAsXgY8vdt8ocRgWAjaO/+fiLRtmCgcHeHiT5K0Zp93tdhYAu92ums2mzs7O7Lic4+rD4WBOxFst2k9XqsE9SvfAgPNjiOi5cSqMhvs7HI5dZnBQmAtYpv1+b/NAYrGYTX1nPgRv4awVBoCMAqYmGo3agDrP86xgmcCRSqWs3SigQSBEeoEEgPapOGgwGFQsFrNZKNls1oJboVA4YQWWy6UVNxI8ATzadVJcTDABGGCG4vG4zaSAiWDNkHmUy2VNp1NzVtgrd50lWcDkGWFCsS8YCmwNBpfgicYZlgigcTv2pNNp+zmYEQI43ZX4XqQIpVLpxE+wBRIYJAX9ft8YuOFwqHQ6bfYCswxzBYABvjwTReFIFUiKqB8goSJ4sPboufkel82mQxhBzAUVgic2Wup3AAAgAElEQVR6+FqtptFopFevXlliQxAm2cLP+F2SOe4XqQcdtbCRRCJhXatcfT4ADdNNAplMJpXP5y2I0pKXBMcN0nwnbB9BlcLy+XxuPsapBdIVbBH/lY7MHf+NBp19hOGnJkQ6AgHsoStlIB4RC7kAOP6M/efkAIYO4GCtYc+5X+piXDaY9eGe/Ov//vIx0sdIHyN9jPQx8uPGyA8mKeRm+DOCCSwcIENg5MFhUSSZw8N2uG+77zJ+OCH/5q252+3q5cuX+ulPf2oGQIvH6XSq8/NzY9E4hoZZcZkc3tbZSDe4wcrc3d0pm83a38OusPHuBoXDx/aSaKO3262ePHmi/X6vwWCgzWZjgwthOVhLWDhXfrHZbIz9W6/XVtBIpxkYqz//+c9m+O6RMY5dKBQMfGAuACcAi7f39Xpt3ZtwPJ4NwyMo8z2wfQRbPsOVfLDX6Gb5XJw2k8kYu1GpVMwJ3ARgPp8rlUrZXvDdFErCkASDQQuy7hR26X4AKWyJ53knjg2YYxuBQMB0xxydA8qApcsiAtLYCI4LKNDFh45SZ2dn6na7ms1mKpVKdm/7/VH/TvcqSdaGmEBG4JNkQHs4HKxVLglQqVRSu902cDwcDicaaHwBMCBw4rskQSQEdLRqNpvG9rIebqLJehD0YaoJ3m7Cw73OZrOTGgKkJUgD0ITjd3TnikSOM1xarZbm87kBlSuxQcJBfCF4chIRDodNtoBsg/VForBYLEzeAAPvFu5yb3y+C+bcL7bKM2OL/AMYwLZh48Qt4hXSE9aS5+Iz3T9zYymgSDwmBvGcnFL413e/fIz0MdLHSB8jfYz8ODDSPUl79/pgc7hwLv6bm2OjXMbE/TsewH0bJkDxRu8+ND+P0QWDxynUHPFnMhnd3t6qXC7r7OxMnU7HAAsnAcwAGoouYQsJlrS1TaVSFqxCoWObzLu7u5NBidwvaxAKHQtdOXaPxWKmu5ZkRaW1Wk3tdlvD4dCOqXO5nILBoLrdrj0vQdntviQdWZxCoWCshHRkCer1uiaTibEdMIoYI2vCEXI4HNZ8PjcZBIZD5x7WPJ1Oq1Qqqd/vG9Ox2+3sed51RI6uPc9TOp22SeE4EsfD3HswGLSADIvjecciX5d5oHVxPp+3ID+dTk2Kwr7xPQREt0YB4N7v9xY4GSqJxnw+nyuTydj9wLS4rXhXq9WJlCOVShmD1u12NZ/P9eDBA7VaLW23x+GCh8PBWirDlMKu8HzBYNA03bFYzLqNVSoV9Xo99ft967AD4wm49Ho9zedz67603W6tDsOVdaB/BwiYvYJ0hCN2GErkN9j4crm0AnDpfognXdLYP+k+iOHnFNUCcgRjpEzEicXiOMDy9vZWmUzGkn58gGGISJIIukiD8C0XqIgBsHaw8Tzjfr8/6TiFLIHfJ9jjQxQcs1Ywjcx0cb/XPaGAMSd+BQIBK/x2pTj8DicHACZrS9KEhCMej6vb7Ro4STqxDxd8id3uhY8AVO8DEf/6P18+RvoY6WOkj5GSj5EfM0b+4BcuNoKNIYDAMBG4WGRYGVgh2BxXP4p2k0DlHgPDlBAk0JtzRPjgwQO1220LuslkUqPRyDao1WpZQSsBjm4/MGUwb2jC0WN7nqeXL1/qT3/6k4rFommJ+axIJGJyA4IDrBdBOhKJGHN4dXWln/70p7q7uzvpKIVhUASL4fH3sH90bEEmsNkch+795Cc/ked5ev78uYG4yyDAnnH0j1ETqAEQANXzPANsgkuz2bSABOPG3uCoBN9sNqtCoWBrPJlM7GcqlYqtEwYLu8jQQOwjGAzaDBXkF9idGwRLpZLS6bSxJ+w3rBWfDwtGETNFpgQDV64C0wggkVy4bMd2e+y8lclk1Ov1FAgEVKlUNBgMFA6HVavVDOBxeBIc1pJuO9Q3IJVg3fr9vj777DNtNhsrtEVvj61sNhtjYGGekCjNZjMtFgvTeMNAep5nAZ3vRipEkoHUwS3ahe2TZImS689owLELbBC/Ho1GCgQCyuVyluzAugNy+DL2MR6PVS6Xre01n+V2F3KLzOPxuG5ubhQOh00v7zLak8lE9XpduVxO4/HYJAokr9RnwArzHMViUaPRSJJMUkSbX/YUQIK9RN/uAhbxDQAhDhEb8WHiDD9HzKAFNQxkOp02cHXtl3t3k49vSwBZZ2yDuCPdA7h/fffLx0gfI32M9DFS8jHyY8DI910fRFIIQLgsHQtCwOcN0D0uhZHjaBvH5LP2+70NzHOLT1k4jv9pIwvzlslkFIvFLOAw7BCGqt1uW4emWCxmzoDTYZQ4SCwW0+PHj9XpdPTq1SubVM+molHGEGDZ3KF8BFI2ttlsqlarmaYe/THHxhwnHw4HC+wwOAQJWohWq1WbFzKZTPTpp5+q1WqZlGCxOA7pQ0KB8Uqy73DZS9hTHJMhlTCEg8HAZB+AvutoLgOYz+eVTCatmw0OkMlk7JicwIuh0+6TZ2I/IpGIbm9vNRqN9OzZMytgJejheLBG7CtgHA6HbSo5TB8TxpG9uEfTSCRwSNfZsL/xeGzT4ZkJwvE8gydJqBKJhH0PGmn8gt9jzwjag8HAkhaCN2B2OByLY9Eak7jAZsKwIe9hT6bTqa0xxddIUNLptO0f97ndbm04qsuc8XPsHe1x8/m84vG4UqmUhsOh1Y/A5FFz4N4nP0PgLZfLJ4DNoEJYPRht2EhsF+kGDOV2u1Uul1O1WjVJkctGAXjuuvPfMHvEK35uNBpps9moVCoZ0+4GeJJK1ot1gj2LRqNmF/wZfk98A6Q5DUBq40pL3FMS/p/1RXJGAkadDv5O8sLvSDoBmXfjtbue/vX9Lh8jfYz0MdLHSB8jPw6M/ItKCgn6HOuxGGwATsjivMv0EezQ1mIc7mfxHSwqgMVxciAQMAdJp9PGDsbjcb169cr+H1aBzyEwwNLANMDYhULHGR6PHz9Wv9/X9fW1UqmUdeNBykGRcSQSseJSgiLguFwurT2mG8yYBcKAP8CNFpho0mECADzYLNY9kUjo7du3NvDt9vZWg8HAinfpkAOTxb3DpgAoFP/hnLAF8/lciUTCfq9QKJwE1Wg0aoMOmRlSq9W02+3UbDYNUMLhsIrFohkvGu7D4WCyBByAz06n05rNZlYom0wmrZibQYSwEOwjNgd44GwApCvrQS8Oa4e+3+0UBDCx5gQs1s7dj/V6bXrzYPA4owYGGLDmHgFpGDSCH4wJBeeLxULlclnhcFhXV1eqVqtqNBpWT0DAms1m2mw2JisAaGjdCuC8evVKz549M/lHr9fTfr+3drckGxQUs6fM0yHBo0MU3c1IskqlklqtlgE92nISPtaAQArDR+EvCRiMfj6fN99xazg4JVgul8aEvbvOnU7HZrwQc2CbD4eDsXPRaNTYbAIviYVrI8yKQfZBzQryBPzHZQFJdABh6b4tLhIZ1o/EhPsHNCORiMlSttv7lsbEUGQX7okKMg5+ljXGlqkrgekjGSSBcu/JlZz413e/fIz0MdLHSB8jfYz0MfKDnHDBPPBlLoC48glu0t1owMIFHx7M3RTeKDm2ZRHC4bB1LeINNZVKKZ1O65tvvjF2jTdUCie5Z9gN7oGN4Tg7l8tpOp3qzZs3yufzCoePRZHoTt1gHIlEVKvVtFqtTBPLRGsmc9Olh9kWo9HI2B66ISEHgaEDPGFA4vG46eoZZjmbzTSZTIwhu76+PjlOh+HL5XJqt9taLpdWBM1aRqNRC/oc8yYSCQNdZA7xeNxA2PM8c3wkCm63oNFopFQqZd17kJKs12vrolOtVk3vznMSgNFvM3iP9Qc4CLSpVMqA1S2sdFkX2F6chkAbi8WUTCYtKdlut8Ya0bKY+gKOsFOplK0HkoRw+Dg5ngCIs0qyQE9Q4Ls5Pqeodb/fW0Ct1+sneniApdfr6ZNPPpF039WHNQUcsXckFIA2xarz+VyvX7+2CfH9fl+bzUbJZNIAhfoLQJ9kCxlMMBg0KUgkcpwpUyqVNBqNFIvFdHZ2pn6/b7IU2KrBYGAJGEf7MIT8XD6fN9Dgubh/dOAkfu6RP2tENzIaAAAe/X7fah0IuNgPn+UWFi+XSw0GA4srgcBxFgpBHaDOZrP2uQA7ci33ZIIkERYbwInH40qn01oulxY7eBZsgn8TP5HwsDfBYNAGZUr6f9s78+ioq/P/vz+zr5nJzGQySQhZCESImBZCIkT8gSAQZXEDrFvP8QC2WtwVteJXbautx6W21T+0at04RywgqEBFlnoUEJQtlE0gewJZJpNZMltm5vdH+lw+CUkgEizL8zonR0nmc+fzuZ97n/e9z32ee0UYCq0qULsnDxz1D3kMO+XIUL8hG0w2jtozc/qwRrJGskayRrJGXhwaSe2qJwZEPSl2m5b4qfPSA9BN00un/6dGT/9OJBKiM5NgUGOmCgROHBRJ4kMzVDLiKSkpUCg6E+tsNht8Ph8UCsVJXgiqQPL+kEeCngcATCYTDh8+LJZvyXMGQHQs8vI1NzcLUWhubobP50Nubq4QPGrUgUAAeXl5cDqd+P777xGJRBAOh8WLJy8neYjI+0QJodRxIpEI9Ho9fD4fnE4nKioq4Ha74ff74fF4RHIshS1Q/VIyLQBhIOgeKOyAlnupo5HRoXhums0DEEuz5BWljkrvnwwxCQDlClBnpTokrwKdtUADCrfbDZfLJWL2ybsCQAhSa2uriMmlU++DwaDw8kWjUbFrkbwz0XbEciMCQGxxS4aYhCiRSIhdrlpaWuByuUT7MZlMcLlciEaj4j0oFApx8KbFYoFKpRLGlNoReWapU1OoAXmD2trahIAHAgEMGTKkizGk9kXvUq1Wi3umPub3+6HRaMSAiNo6eY8zMzOFR4z6gtfrRSLReQaIfECYlJQkRMnpdIqEb6vVKsIfqqqqkJ+fL0TT6XQCQBfPJp16r9VqhWeKzoiRx2lTTL1arRbeMjKgFJrU0dEhknB9Ph8sFgsMBoNo4+3t7WhuboZWq4XX60VycjI8Ho+I26ZwEaoXGlxSfUiSJHJBaCWC6hHoPBOE+isJlzwcjPpS91APACJmnsKfKKxEpVKJXBHqK/Ss8nySeDwuwjbUarXw1NIzUPsjz6lSeWJr22g0KgaO1PfIFlP/lSRJDASYHwdrJGskayRrJGvkxa2RAzLhophLmmFSJXef8SUSCdGY1Gq1qDTyFpDXhZbvKBGRZpNkBKjTSNKJU6lVqhMnSwOdy4R0PgR1UAqVoPsj40HfTTsxUcMkjxHtKEOeGgq/oKVDMjL0zGazGXa7XXhSjEYjMjMzcfToUTgcDvH/lPio0+lw7NgxEYsaj3cmClK8r8ViEfXkdDrh8/nECx80aBAUis4DGh0Oh+gU1AG02s6zMpqbm4VBo7hvhUIhPH5yDya9x2g0CpvNJpbFyUPX1NQkEippMEBeABKPSKTzvBESAPJWULkUCmAwGIQH0mg0ijZB75TOsKBwGLo3Ohyzo6NDlEPCSQJF3hyFQiG8lXTuRFtbm+h8tPRMuQjUXmgnJhpcRKNRJCcnCyOWkZEhRIt+qF3FYjFYLBa0tbUJzxvFiVPHJoNPXmgAovMCEInAlEBOBoPaHG25SsJNBpIMDuVskDecjBZ5pqk/tba2oqmpCZdddhmam5sRjUaRkZGB6upqWCwWEaoQCoXEO6A8B/L6ktElg2+z2VBRUQGr1Qq3241AIACNpvMQQ/KCyQeRclE5fvy4GBBQX1Or1aitrRVtgbzctApgMpnEM1GeBu1WRUJECd/UTuPxuMhlIVGlwQoZbhIE+i55fgUNqmiQ1dLSImwT1Q3ZIhooUDgF9VMKuaEwJ/obCUUkEoHX6xV2guqI7CzZL7LDNCAjWyu3w/Q5amfU9smO0bsg+0P3CpzI8aD2x/QP1kjWSNZI1kjWyAtfI/tiQEIKyWMnb6B0cxTbCEBUEH2WvAUELbFT449Go6LjAScOX6TOQEazpaUFKpVKCJBKpRJx6pRAS4f7kThR3LB8ydLj8QgDRkbFZrOhvb1dbJUaDoe7xI1T47FYLCKGValUwmazCY+dSqVCamoq6uvr4XK5kJqaiv3793cJy6CGodPpkJKSgmg0Ks4eoTCJvLw8sYRN3gu3242cnByEQid2fqK60Ov1sFgsOHbsGBKJBNLT04XXRKFQCMMqSZ07UdG9U7y4wWAQHYZi2VtaWrokN9LSNTU0tVot3ht5dGKxzkRNipPtHo+clJQkDCHVAbUHWmamEIRIJCK8CSTUFHoijweWx6VTrDF5TCiuWf5vOjciEAgIL408fl/u0aVEX4VCIfIYkpKSRNsgAwEAVqsVQKc38NChQ0J8KHGWvGDyMz7IG63T6dDS0gK1Wg2r1SoEgd47eQ1pWZ7O0klKShJL9vL3ScbVYDCgoaFBtGES3La2NjgcDjidTpHorFKpkJmZKXYxk+cQKBQKMdghg9vW1oa2tjZkZWWJsAB6ZrIDFH9Pu3mRcHu9XpF7YLPZkJKSIkKqyLOm1WrR2NgIlUoFj8cjPMLkUaSQCgqHOX78ONLS0sQAgHIL5MnbNDgim0Mec3kb9Pv9ov1Se6QBH7UPh8MhhIE8+z6fTwxE9Xq9WNWgQQF5CElcyZ6QV44GfWRH2traxOAXOLH1MH2evI8U4kL9jtqKPPSM7DfZamrrZHNogEliQ8LK9A/WSNZI1kjWSNbIi0Mj+2JAtoWnLVfpwehmqUNSR6SZJQARixyPx8V1dC1w4mRteaIkzeLJ00ENJisrCwqFApWVlWLpWafTiZAIiiMmDxsA4XEAIGJT6SXSzDwWiyElJUV4A+hz4XBYVLT8hZtMJhFTTmEBdF4G0OllCQaDOHz4sNh+NSsrC1VVVdDr9cjJyekSv0uCplKpkJGRAYvFgsOHD4v4XfK21dfXw+l0wmq1iu1PqSM2NjZCkiQ4nU6o1eouyYDUMVpbW8USss1mE/VPUCOnpe9wOAybzSaMqVarRXJysoidpY5KoQEU8jB48GCxUxA13GAwKHZgisfjSElJEcvL1Eno/dOggmLUvV6v8GYcO3YMdrtd7K4VDAZF7L/ZbBbL5JFIRLxzGhyQx5K8gSTYtFQvT7z0er3wer1wOp3i0EK5MFFCMD0ftZdoNIrBgweLLVJDoRC8Xi8MBgPMZrMQYUmSxG5HiUQCKSkpoj/5fD7U19cLbyglR1McfyAQEN4/Ct9QKDq33VUqlaiurkYikUBNTY0wJnq9Hunp6aipqREGk06dlyQJHo9H9KWmpiYRilBbWyvip9vb20VcOoWB0M5jHo8HFosFPp9PbEFLHl9qh+Slb21tRUdHh9i+mNqHXq9HY2Oj8NqSZ93hcAhhJuNOce40SLRYLKipqRGeVjLmFG5D7ZTivEn45J5mCu0hD25dXZ0YtFD+Au2YRZ5Eqn8K5aFEbXp2EhOyMzSoJJGlnBMAXdoS2RoSIbJn1EZISCm3hHbqorwcugcK/SK7RIJFXnUScOr/ZCuoDzKnD2skayRrJGska+TFoZF9TbrOeMJFs1v6IWi2TTP01tZWsWwpv0lakpP/P71gmk2TR41ihcnwK5VKcYZFcnIykpOTEY/HReKlVqsVW8rSC2xsbBQdm66nOFwSm7q6OlHZHo8H+fn5kCQJDQ0NwhOk1+vFUq7X60VVVRVcLheGDRsmGjh5u8iYyQ2ZXq/HsWPHoNfrYbfbxZIo7aJEXri2tjYYDAZkZWWhra0NjY2NSCQS4uyK9vZ2HDx4EMOGDYPFYkFDQ4O4d1r6Bjq9R2lpaejo6BBbiSoUnbsMUdyz3+8Xz0j3Sp4KanAUvtDQ0CCEnjxfVI/kNbDb7SI3AAAOHz4sOgqJnlLZucVmKBSCyWQSMd0AhIB5vd4uxpFEhQYxra2tUCg6z0OhgwLJU0OeYOpsJAz0eyovFouJbXzpe8i4qVQqYeBI5KiTUhw/tWEK/aBYYAorUCqV4qBGADAYDMIwUOyyWq0WOxlJkiS8mJSXQAYiHA4LrymFBVB71+l0OH78uLi2tbUVDodDPGsikRBhF3q9HvX19aINUHgNxfFTjgC14YaGBhFCQ57XWCwGs9ksPMF0H8eOHRPecvKgezwekZROniT6O3npSXQaGhpE3oVer4fH4xGhNElJSSKngjzc1JaDwaAQHmp7arUadXV1YvADdBpoaiPUnymBHYBow5RwnZycjGAwKN4viRaFWSQSCdFGyMZ5PB5hpyjRmtoyhbhQyAuJm0KhEO+aBIXyVuS5PzRgoIEMiZ3P5wMAEYNOA5NAIACz2SxCxshG0nXkgVQoFKJOKGyI7o9DCn8crJGskayRrJGskReHRsojGLoj9fXH0yE7OzvxxBNPdBb236U18iCQ5448LzTzk98UiQd58eQVSzHGJDLyiqQXRgJmMBi6xEwrFJ2JmGRoyPPmdrtFeAAtKZMQJhIJEVtNzyBJktgNhmKaCTKufr8fgUBALPvTC6FnIO+Mx+MRgkcNnYy02+2GyWQSRoq8QuTNSEtLg9/vF0u7tD0neZRMJhPS09NRX1+PoUOHio5Py7YAxJaplCRLIQe07EzCQXVODZoMJYW/UFw/GQ7y0pIHhIwWlUkxzPRsZOhoubp7GATVLV1Lngt5oiQZEI1GI5an6VnJq0vX0RK0RqMRQkvvl9ohefLIwKnV6i5bqOr1etFWSGjJIJBXg+pHqVSKTksGlepCkk4c1kceZOoPFA5Ev6O2QQaHciMonp08cPLwDwovIANEhguA8DqTF5d2r5K/U6pbMn506CKdbUFtigZ1ZMRpYEZhMvK+Te+MPPYUzkH1S6EU5HGTx49TLgudkUI5BpT8T6FXJBrkvacBB3nF5B532smLnofaJAk+1avcTun1euF9p0EetUcaPNA9yEMY5PkH9Fm5kJJHkN4n9X2yM9THqH+R9xFAl++lZ6UyyO7KQ55oNYAG6eRFl7d5sgHUH6kN0juNxWKYP3/+94lEoqi/WnGxwhrJGskayRrJGnlxaOQzzzyDysrKHpe5znjCVVRUlNi+ffuJAqUTyb/9Qd6xGYZhmHMThULBE65+wBrJMAxzcTBmzBh89913PU64BmTTDJopyz1wFH7QPZ5R/m/yfMiT4+QzxTOlvwJFn6UZbvd77qms7n8bqHvvXmZfz9K9ns9nYe5ef92fYyDr92zSU9s/298nR94G5G20p/rt7T7lZVJ5A/1c8nJ7+ltf9Nbn+tMXT7efdK/D7td1/84f2wcH0o709r5Pp/yzYc8uVlgjWSMHEtbIH/99clgjWSPPhkb29UxnPOGi0AQAaGtrw549e5BIdO7ek5OTA6vViqamJlRUVCAjIwOZmZldQg7cbjf2798PAMjLy0NaWlqvD3o6L6j7Z3oSs97KoKVCAF0OxjudCqUQgf7Q3wbTn8+fL0b3VJyvz/FT33dP39fXQK6v33X/m3yQRQZKfh0t0ctDjHqCru+tHPnn5IPKvvpfT4JJ/+2p/J4MLIWxyH9OF/o++WYS3UNxiL7KldcLfbb7ILL73051T92ftXu99/dZmR8Ha2QnrJFnh/P1OVgjT4Y18vzXyL4+PyDncNHN79+/H7fddhuAzhjUsWPHYsGCBThw4ACef/55zJs3D3fddZdI5PR4PHj22Wfx+eefQ6lUYvjw4Xj55ZcxdOhQESPa2wxUXin0bzLo8n8DEGV1r9juNDc3Y+PGjdBqtZg4caLYJrenZ+3pHuS/7+7FlCO/5nw1lszFAbVd+fatJpOpS3K22+1GNNp5/oo896CncqLRKPx+P8LhMEwmk9gVibaHpusoubm3/kG7ZtGOWZTXQAcjUhIvxaQDnXaAzjKhAS1tLR2Px7scBHm6dSNJnfHlzc3NiMVi4gwf+Xd2dHSImHWqm57sD50FJEkS7Ha7SNSm76JduSguvrdBM91XMBjs8jx0n0qlEna7vcsW4szZhTWSNZK5MGGN7LtuWCNPMCATLiIWi6GlpQWjRo1CcXExli9fjkQigUsuuQQ+n08khFJYxbJly7BkyRLcdNNNyMvLw+OPP47hw4fjhRdeQDweh9/vF2csOJ1OkUDZ1tYGv98Pq9UqDhmMxWJITU2FQqFAXV0dJElCRkYGwuGw2IEpNTVVJAc2NTXBYDCIhEC1Wo2dO3fixRdfhMlkgs1mw6hRo8TBenISiQRaW1vFCe6pqamiUdHJ4DabDTqdTuyCJEmSONvA4XCIMz1YUJhzFbkA7NmzB6tXr0ZtbS3y8/MxZ84cuFwubN68GZ9//jkCgQBGjhyJOXPmIDk5uUu7pnLC4TB27tyJNWvWoLGxEYWFhZg5cyZaW1uxbt06kUQfiURQVFSEsrIycS9y4xmLxXDw4EFs3LhRfO+4ceMgSRI2bNiAr7/+Gu3t7Zg8eTKmTZsmzgxpbGzEunXrMHPmTKSkpCAYDGLDhg1Ys2YNFAoFiouLMWvWLJhMJvGdp6qbSCSCTZs24ZNPPkEsFkNhYSFuueUWWCwWRCIR1NXV4dChQ8jLy8OQIUN6Lc/n82HlypX46quvAACTJk3C9ddfLw5ldLvdWLNmDQwGA6699loh2t3vh7bCPXToEPbt24e5c+eK+/z000+xdu1aGI1G3HTTTRg3bhzvOvgTwxrJGslcOLBGskb2hwGdcNHONnl5eXjggQfg8XhQUVEBh8PRZRatUCgQDoexbt06KJVKLFq0CE6nEx9//DEOHTqEeDyOhoYGvPfee9i7dy9MJhPKysowZcoUHDhwACtXrkRlZSUuvfRSzJ49G+vXr0d5eTkeeughaLVaPPfcc8jLy8P8+fOxfPlyfP3114jFYigtLcWNN96IxsZGvPzyy0hJSYHf70dqaipSUlLwxRdfoKqqClqtFi+++CIeeeQRjB07tsuBfgBw5MgRvPfee/jhhx+QlpaG6667DmPGjMHOnTuxbNkyNDc34/LLL8eoUaOwdu1aVFVVQZI6d3JqaF7AP9kAABNwSURBVGjA1VdfjVtvvVXsLMUw5yqJRAKVlZX429/+hiNHjiAzMxNvvfUWzGYzJk+ejMWLF0Oj0WD48OF48skn0d7ejgcffLDLzkBApxfryJEjeOWVV1BfX4/09HS8/fbbMBgMyM3NRUNDAwKBAOrq6rB27Vrcf//9KCsrE/krci9WQ0MDnnvuOZSXl8PpdOLTTz/FAw88AKVSiRdffFHsVvb8888jKysLw4YNw8aNG7FmzRqsXr0aEydOhMPhwK5du/D4448jLS0NDocDr7zyCiwWC6ZPn37KQR556Pfu3Yv7778f6enpyMnJwR//+EeYzWZcf/312Lp1Kz777DMcPHgQDz/8MHJzcwF0bsd84MABOBwOZGRkQKVSYfXq1Xj00Udx9dVXIxAIYNGiRcjKysLYsWMRDoexadMmLF68GKWlpbjqqqug1+vhdrtRUVGB1NRUpKWlQaFQoL6+Hlu3bsXf//53KBQKzJkzB5IkYceOHXjqqacwevRo7Nu3DzU1NRg+fDgcDgcPaH9CWCNZI5kLC9bI3uuFNbIrAzrhotCAjo4OJCUlYdiwYSgvL0dzc/NJMZOBQAC1tbViq0UAuPfee2GxWBCNRvHaa6/hzTffxIQJE7Bjxw5UV1fDbDbj448/xrfffovMzEy8//77YivUFStWYPz48dDr9fjwww/x5z//GV9++SUee+wxWCwWSJKEL7/8EhkZGXA4HFi6dCnUajVGjBiBCRMmiK07SRDl8enyuNBwOIwXXngBn332GSZMmIBvvvkGNTU1ePjhh/Hqq69i3759yM7OxjvvvIO9e/fiu+++E95G2uqyvLwcZWVlSE1NHcjqZ5gBQ95fy8vLceDAAcybNw/XXXcdfv3rX+Orr75CTk4Ojh49it///ve45ZZbsHHjRnzwwQd48MEHTzJO5AE8ePAgHnroIVxxxRVYtGgRvv32W0yaNAm/+93voFar8dZbb2HLli246qqreo2fbmxsxI4dO3DbbbehuLgY//d//4evvvpKbK38+OOPIx6P44477sA333yD7Oxs1NTUYNeuXQiFQmI72E8++QSBQACvvvoqYrEYFixYgPXr12Pq1KldDvLsCVqB+Pjjj3H8+HF89NFHyMzMxH/+8x8sX74cU6dORX19PX744Qd4PJ4uoQlNTU14++23MW7cONxwww2QJAnvvPMOnE4nXn/9dTQ2NmLs2LFYtWoVLr/8ctTU1ODdd99Fc3NzF69dRUUF3nnnHUyZMgVTp06FRqOB3+9HZWUl9u7di8LCQvHZZcuWwWQy4f7778cXX3whVjSYnxbWSNZI5sKANZI1sr8oTv2RH1Hof+NWw+FwF7GQJ/nRAWIAREVv2rQJW7ZsQSgUwpIlS1BYWIiXXnoJDz/8MIYNG4a9e/di165dmDJlCp599lnk5uZi7dq1GDp0KAYPHoxVq1bho48+QkpKCqZPn45///vfcLvdSEtLg9PphCRJ2LlzJxQKBYxGIyZPnowXXngB8+fPx5w5c3D33XcjJycH+fn5eOqpp1BUVNTFw0YiuHbtWpSWluKll17Cr371K6SlpWHbtm345ptvMGnSJPzhD3/AkCFDUF5ejkAggGnTpqGgoACXXHIJMjMzxYnl7LljznUSiQTcbjcUCgXS0tJgtVoxYsQIVFRUiMNRN2/ejCVLliAYDKKsrAzRaBThcLjLTzAYRFNTE7RaLbKzs+F0OpGdnY3Kyko0NzeLM0jWrl2LjIwM/PznP0c4HEYkEulSTiQSgcvlwiOPPILp06eL2G+DwQC3242MjAwkJycjNzcX6enp2L59O9RqNebNm4fx48eLs2nIm0ifpeeqrq5GS0uLePZTcfDgQdjtdmRnZ0Or1aK0tBSVlZVob2/HrFmzMGXKlJNyVJKTk1FWVoaRI0eKc2oqKiowYsQIEX41cuRIbN26FX6/HytWrMC+ffuQlZUlwrsSiQTS0tJQVlaG/Px84eHMy8vDwoULMWjQIHFOSCQSwebNmxEMBvH6669jy5YtSE9PFwegMj89rJGskcyFAWtk37BGnmBAV7iATsEIhULYuXMntm7dCpvNBpfLBYVCAZ/Ph/r6emg0Guh0OgwdOhT79u3DDz/8gKSkJLz77ru45pprxKF81Hg0Gg1aWlqQkZEhKo0OYgwGg3A6nSgqKsKKFSvg8/lw4403wmazicMMr7rqKmg0GvzjH/9Aa2srEokEjEYjxo0bhzFjxoilT6PRCJVKJRp2bW0tUlNTcejQIVgsFgwdOlQ8J3UYt9uNmpoa0SjoYDqaqSsUCthsNnHwnPxQQYY51+nuQVOpVHC5XOJw1OTkZOzatQs+nw9erxdjx47FsmXLsGfPHnGIICWxHjt2TJzGbjAYYLPZ0NbWhkAgAKVSiZqaGuzZswfTp0+HRqPBX//6V3GiPGE0GjF37lzcfvvt2LdvH95//304HA5MnDgRH374odhNyWKxwGazoba2VhwOSSfLd3+uRCIBrVYLu92OI0eOwOfzweVynbJeAIh7I2/eoEGDRO5Kampql1APusZqtWLGjBmiLBJEQq/Xw+VyYfv27di2bRtWrlyJm2++GVVVVcJDCQBpaWlIT08X11GeDdkiefm1tbVQqVQYPXo0du/ejX/+85+YNm0aCgsLOaTwJ4Y1kjWSuXBgjey9XgDWSDkDvmlGJBLBtm3bUFdXB4/Hg3nz5kGhUCAQCGDVqlUoLy8H0Hk42HXXXYevv/4aixcvhl6vh8lkwi9/+UtoNBrceuuteOONN3DffffB4/HAYrFg1KhRqKiowJo1a7Br1y7U1tbihhtuwODBgzF58mQsXboUwWAQ11xzDRQKBSZOnIhly5Zh8+bNMBgMCAQCKCoqQiKRQDgcPsmgu1wuZGdn49tvv8XixYtRXFyMGTNm4Mknn0Rubi5ee+01GI1GlJWVYcWKFbj33ntx+PBhZGdnY/LkyaiqqsKGDRtw6NAhtLa2orS0FNu3b0ckEkEoFBJejHA4LBo1w5zrdDdM9fX1SEtLw+7du1FRUYGnnnoKOTk5qKiowLvvvos777wTLpdLJLMCgFqthtvtBnDCC97S0gKbzSaS47du3Qqfz4dJkyZBrVbD4XDAaDSeZGiVSiUOHDiAP/3pT2hpacGDDz6I/Px8ISQA0NraipaWFmRlZYmdj+TPQRsTkKiEQiE0NTXBbrcLr9bpGFgqg4SqqqoKDocDDoej12s6OjoQCoWgUqmEMMh3c6NQsry8PCxduhQVFRWYMGECqqurEY1GsWnTJlx55ZXQ6XSIRCJCQHpDpVLBZDKhoKAAd955JzZt2oT169dj586dQkyYnwbWSNZI5sKDNbJ3WCNl3zMgpfwXm80mdjxJT0/Hz372M0ybNg27du3CxIkTRYhAPB5HLBZDSUkJnnvuOaxcuRJutxvPPPMMJk+eDKVSid/85jewWCzYvXs3CgoKMH36dIwZMwY2mw0pKSmorq7GjBkzMH36dNjtdowdOxazZ89GNBrFFVdcAUmSMHXqVLz00ktYv349otEoHnvsMUyZMgUejwe33347CgoKusziHQ4H7rrrLtjtdhw/fhzDhw+H2WxGQUEBMjMzkUgkoNFosGjRImRmZmLv3r2YOnUqZs+ejdGjR8Nut2PVqlWorq7G7NmzUVxcjOzsbAwZMgQulwsdHR3w+/1obm4Wu7wwzLmMJElIT0+HSqXCsWPH4Pf7sX//fgwaNAiBQACtra3IyclBcXExtFot9uzZg4kTJ2L8+PFdyonH4/jyyy+xYcMGHD16FIMHD0Z1dTUyMzNhs9kQDAaxfv16pKamYsSIEdDpdJg7d26P20V7vV689tpr8Pl8eOKJJ1BSUoJ4PI7MzExs27YNLS0taGlpQUNDA2bOnAm1Wi086eRNUyqVGD16NHbs2IGjR48iFoth//79KC4uht1uP+36KSoqwvr161FZWYnBgwdjy5YtyM7ORmpqqkiKlm9/LUkS6urq8Je//AXFxcWYOXMmdDodRo4ciT179iAUCqG5uRnl5eW4++67YTabUVRUhOrqajQ0NECSJBw4cAAlJSU4cuQIlixZgokTJwoBpjqi7yWPXlFREXbv3o1oNIpQKAQAYutxXt366WCNZI1kLixYI/uGNfIEAzLhopsZOnQoXnnlFSiVSuGN0+v1GDt2LPLz8xGLxcTWlQaDAQ6HAzNnzkRJSQkikQhSU1OFkc3IyMA999wjtpW12WxQKpUoLCxEdnY2gsEgLBYLTCYTJEmCzWbDokWLAAB2ux2SJMFsNuOmm27ChAkTEIvFYLPZYDQaYTAYcN999510noBCoUBJSQny8vIQiURgt9uh0Wjw6KOPigRhSZKQm5uLhQsXwuPxQKfTweFwQKlUYtSoURgyZAja29thsVig1+uRlZUFjUaDjo4O4THo6OgQDZYHO8y5iHyL2csuuwwFBQV44403sHr1auzevRu//e1vMXToUHz00Ud49NFHUVBQgN27d2Pu3LnQ6XQnhQAkEgkUFhaioKAAr7/+OpYvX466ujpce+21sNvtOHDgAL7//nuMHz8eKSkpwkbIxYT6zoYNG/DJJ58gPz8f69atw7/+9S+UlpZiwoQJ2Lx5M55++ml4vV7o9XoxsEwkEggGgwgEAgA6wxxmzZqFpUuXYt68eXC5XAiFQpgyZYrIl+krpIn67ezZs/HBBx9g/vz5GDZsGKqrq7Fw4UKo1Wp0dHSIe5Yf+kgx+g6HQ+TszJ8/H7fddhvuuOMOhMNh6HQ6zJgxAzk5Obj++uvR0NCA1tZWSJKEG264AVarFR6PB9nZ2UhOTj7pIMlAICBya+ia1atX484778Tx48eRk5ODkpKSLs/CnD1YI1kjmQsL1kjWyP6ifPrpp8+ogDfeeOPpBQsWQJIkqFQqWCwWJCUlwWg0ilmzRqOBxWKB1WoVP2azWcwwzWYzrFar2BaXfrRarSiLXqxCoYBOp4PZbD7ppHv5YXOESqWC0WiExWIR5atUKuh0OqjV6pNO/qY49aSkJHH4m8FgELszEVqtFmazGUajUcSo0s5NdG9KpRJarVbE42u1Wuj1ehgMBt7uljkvkCQJRqMR2dnZkKTOc3SmT5+OmTNnIi8vD5dccgnC4TBCoRCuvfZaLFy4UByG2v3HZDIhNzdXxHLPmjUL11xzDZKTk+F2u+H3+zF79mzk5OR06Zfy/hmPx1FRUQGlUgmXy4VIJIJIJIL09HSUlpYiKytLnD80b948XH755VCpVFAqlfB4PLBarZg2bZqwCcOHD4fH40FSUhJuvvlmsftS97j83rBarSgsLERTUxOUSiV+8Ytf4MYbbxSeNL/fD6PRiJKSEqSkpECSJBgMBgwfPhzZ/00iTiQSGDRoEPLy8lBfX4+kpCTcc889uPLKK2EwGGC1WqHT6VBbW4v09HRcffXV0Ov1MJvNGDFiBAYNGiTCJeiejx8/jksvvRSlpaUAgMzMTGRnZ6OxsRG5ubm45557MGLEiC7283R55plnGp5++uk3Tr8VXdywRrJGMhcurJF9c7Fp5JtvvokFCxY802NbOdPYxKKiosT27dsB9LxjiXxGebr0dk1fv+/+/adbRvdKPNW99vfeTlUWw5zLyA8LDAaDiEQi0Ol04oT7WCyGQCAgElUNBkOPBorKoST+jo6OLuV0dHQgEAjAYDAIQywvQ963gsGgOFGeoLKo/Hg8DpPJBJVKJQaB7e3tCIVCsFqtIpQgFovB5/MJb6F8gHq6dROPx+H1esX22/RMAMSmBnq9XsTJ0zXdBSsajcLn8wkBJ1Gj+vf5fACApKQk8UzycuR15fV6xQoGEQ6HRfI1DbpPVzTlKBSK7xOJRFG/LrqIYY3s+/enKothzmVYI09dNxeTRo4ZMwbfffddjxcM6ISLYZgLD7nRlO8sRn+TJ+LKt7XuqRz5T/fP9mQYT1UGIQ8XoCTk7saS7r/7IZHdv7c/xpXuo7c6oL91/50cuu+e6kZ+DV0nr/vu5RAUmtZd0Hsrvz/whKt/sEYyzIUNa2TfdXMxaWRfE64B3xaeYZgLCzJ23eO1yRDJt6Q9VTny/56q/L7K6ckInup+5NfRd0qS1OXzP2YCcqoyequ3nn4n/1tP9XSqcnr6Tvmz9lU+wzAM039YI099P6yRPOFiGOY06M3w9NcgnWk5p/pcX3/vjzE+Xc7kfvrz2TOtZ55cMQzDnD1YIwf+fvrz2fNBI/lkQYZhGIZhGIZhmLPEGedwSZLUBKBqYG6HYRiGOcfJSiQSKf/rmzhfYI1kGIa5aOhVH894wsUwDMMwDMMwDMP0DIcUMgzDMAzDMAzDnCV4wsUwDMMwDMMwDHOW4AkXwzAMwzAMwzDMWYInXAzDMAzDMAzDMGcJnnAxDMMwDMMwDMOcJXjCxTAMwzAMwzAMc5Y44wmXJEmXD8SN/K+QJKlEkqTU//V9/BgkSbr8PL73KyVJSv5f38ePRZKksedx3f+/87zux5+vdQ+c//XP9I/zWSNZH/93nM8aeT7rI3D+2+jzWSPP97rvCz6Hi2EYhmEYhmEY5izBIYUMwzAMwzAMwzBnCZ5wMQzDMAzDMAzDnCV4wsUwDMMwDMMwDHOW4AkXwzAMwzAMwzDMWYInXAzDMAzDMAzDMGeJ/w9aiLv/t+87XAAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA1wAAAFDCAYAAAAu+g+jAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOy9ebht21nW+Y3dnHNvSEsSQqNRkBIsKEQEAS1ERR+KVAC1RCwCCFEioKKGLtIEiCARpC2waKURKjRGKXrFQvQBCRRYYBAsH4qQgIQQEiMhhHt2M+uPtd69f+vd75h7n3P2DbnnfN/znLPXmnM0X/+O+c0x5xrLslRTU1NTU1NTU1NTU1PT9dPe7zQDTU1NTU1NTU1NTU1N9yr1BVdTU1NTU1NTU1NTU9PDRH3B1dTU1NTU1NTU1NTU9DBRX3A1NTU1NTU1NTU1NTU9TNQXXE1NTU1NTU1NTU1NTQ8T9QVXU1NTU1NTU1NTU1PTw0R9wdXU1NTU1NTU1NTU1PQwUV9wNb1R0xjjF8cYrx9j/Cb+veUdjvURY4wfvmb+PmKMcbLl6zfGGD89xnj6JX0eO8b44jHGy7b9/r/t9ydtz//iGOPXxhhvgj5/dYzxQ/i+jDFePMbYw7HPHmN8/XXK19TU1NT0xkFjjA8ZY/zEFjdePsb4vjHG/3gN4379GOOzr4nHS8fa4tfrtnL8lzHGF44x9i/pM5V9jPGZ2zH/ItofbI/9XvC1jDH+CNq87Rijf4y26Q1CfcHV9Eig91+W5dH49yu/E0yMMQ4mp350WZZHV9Xjq+ofVdW3jDEePxnjRlX9X1X1DlX1P1XVY6vqPavqVVX1R9B0v6r+1iUsvWVV/aUrC9DU1NTU9IikMcazq+qLq+rvV9VTquqptcGbD/yd5Osu6A9ucfO9q+qDq+qZs4ZXlP3VVfVZl1y4vbqqruXCsqnpdqkvuJoecTTGeMIY47vHGK8cY/zX7effhfMfMcb4hTHGa8cYLxljPGOM8Qeq6iuq6j23FbLXbNveHGP8w+3dpleMMb5ijPHg9tyfGGP88hjjk8cYv1pVX7fG17Isp1X1T6rqTarqv5s0+/DagMWfW5blZ5dlOV2W5deWZfl7y7J8L9p9flV9wuzCbUufVxuAmV0INjU1NTU9wmmM8biqel5V/fVlWf7ZsiyvW5blaFmW71qW5RO3bW5ud0r8yvbfF48xbm7PCcs+frt74uVjjI/cnntWVT2jqj5pi43ftT3+lmOMF25x9iVjjI/bHn/T7Vjvv/3+6DHGz48xPnw21hoty/LzVfUjVfXOdyr7lr6/qm5V1YeuTPcNVfVOY4z3voyvpqbrpr7ganok0l5tLn5+T20uXl5fVV9WVbXdhvelVfV+y7I8pqr+aFX91LIsP1dVH13bu1HLsuhC5vlV9ftrk+zftqreqqqei7nevKredDvXs9aY2lbWPrKqjqrqpZNmf7qqvn9Zlt+8RMafqKofqqpPWGnzz6rqN6rqIy4Zq6mpqanpkUvvWVUPVNU/X2nzqVX1HrXBsj9Ymx0Tn4bzb15Vj6sNxv2VqvryMcYTlmX5qqr65qr6vC02vv92q/p3VdVPb9u/T1X97THG+y7L8ura3I366jHGm1XVF9UGY78xjXWZYGOMt6+q96qqn78L2auqlqr69Kr6jDHG4aTNb9XmLtnnXMZXU9N1U19wNT0S6DvGGK/Z/vuOZVletSzLC5dl+a1lWV5bm+TJitVpVb3jGOPBZVlevizLf0yDjjFGbS6i/s6yLK/ejvX3a3eb3mlVfcayLA8ty/L6CX/vsb1j9ttV9Q+r6kOXZfm1SdsnVtXLryj3c6vqb44xnjw5L4D59O1Wxaampqame4+eWFW/vizL8UqbZ1TV87Y7Jl5ZVZ9VVR+G80fb80fb3RS/WVVvNxnr3arqycuyPG9ZllvLsvxCVX11bbFxWZZ/WVXfXpvt8U+rqr92BzL9+zHG66rq52pTXPxHk3ZXkb22fH1nVb2yqv7qSrOvrKqnjjHe7/bYbWq6O+oLrqZHAv3ZZVkev/33Z8cYjxpjfOUY46VjjN+oqn9bVY8fY+wvy/K62uwH/+iqevkY43u2FbRET66qR1XVT+qCrjbbEniB88plWX77Ev5etL1j9oSq+s7aVOtqjPHUgZd9bNu+qqre4ipCL8vyM1X13VX1nJU231tVv1x3BnhNTU1NTW/89KqqetIl28ffsnZ3Vrx0e+xsDLto+a2qevRkrN9TVW+JQudrqupTavP8lOirquodq+rrl2V51RXlIL3Ldv4Prqp3r81W/Nq+DEO4+Yy6muykT6vN3b4H0sllWR6qqr+3/dfU9AajvuBqeiTSx9emMvfuy7I8tqr++Pb4qKpaluVfLMvyZ2pzYfOfalOZq9rcESL9em22I74DLuget32QtyZ9prTdJvgxVfVhY4w/tCzLy/iyj22zf1VV7zvwBsJL6DOq6qNqs61jRp9aGzB81FV5bWpqamp6xNCPVtVDVfVnV9r8Sm0ulERP3R67CjnO/VJVvQS4+PhlWR6zLMvTqs62z39VVX1jVX3sGONtV8aaT7qhb6uNfM/dHns/4OY319Vk55g/UJvtiR+70uzravOSqz9/VV6bmu6W+oKr6ZFIj6nNhdJrxhhvWpuLkqqqGmM8ZYzxgdsLmodqs23idHv6FVX1u7T9bvuSi6+uqi/a7kWvMcZbjTHe904Z2+5v/5rafQ6M9E9qA2YvHGO8/Rhjb4zxxDHGp4wxnhbG+/mq+taq+riVOX+oqn6mqv7ynfLd1NTU1PTGScuy/LfaYMqXjzG0y+NwjPF+Y4zP2zZ7QVV92hjjyWPzEyPPrapvuuIUr6iqt8H3H6+q125fGPXgGGN/jPGOY4x3257/lNpcWD2zNi94+sZx/nZAH+sq9Pyq+qgxxpv7iSvK7vSpVfVJs8m2d/o+o6o++Tb5bGq6Y+oLrqZHIn1xVT1YmztUL6rNNkDRXlU9uzaVvVfX5tmuj9me+8Gq+o9V9atjjF/fHvvk2lTDXrTdnvivar6v/Xb4e9oY4538xHY7w5+uzZ23H6jNSy9+vKqeVFU/NhnvebXdbrFCn1abl3s0NTU1Nd1jtCzLF9QG2z6tNs8p/VJV/Y2q+o5tk8+uzcuW/kNVvbiq/n1d/RXoX1tV/z2ekz6pqqfX5gUcL6kN1n5NVT1ujPGHt3x8+LbdP6jNxddz0lhXlO3FtXk04BMn5y+T3dv/SG1wdY1eUFd/nrqp6a5pLEv/5ltTU1NTU1NTU1NTU9PDQX2Hq6mpqampqampqamp6WGivuBqampqampqampqamp6mKgvuJqampqampqampqamh4m6guupqampqampqampqamh4mu+kNyUxpjLPb97POyLGff08s5vO3a+bXxxhgXvrMNz8/4uR1eZuP6HN428e3yzc41PbxE32q6XlJsePxel77X4s6Pr/UnrcXzLG6TbJflh8Rryk1r+e2y8a8i64zvCf36sixPXmvQdE6NkY2R9wI1Rj581Bh5b2HksixxgitdcI0x3mNZlhfNzu/v7+/8dUb29vbq5OTkQr+9vfMbbHK2MPfZ+fT59PR0tQ0deW9vb+e7jy+eTk9Pi8S5xLPzsLe3twoy7pAci3yyDed2fk5OTs76jTHOeHb+xxhn+ldbzpEc1XWqcdWWvHMs6oD653icI/GleU5PT3d0QBkpp/5yDPej5Fve/vT0tPb393fm2d/fP/vsflRVF+TiOerj5OTkbOy9vb06Pj6+oMvkz5RFvLis7pMuo8j76Rhj1u2a4ob6cJCY2cHPeQy6r1M2nyvFp8s+ixnnVTY5Pj6uZVnOvqcxE58zPaW+LvP+/n6dnJzs+Dvbuq+n3DiztwP3LOc5bylveR6tqjo+Pn7pBWbuY7oMH6saIxsjGyOpA9dHY2Rj5L2CkcfHxxf4OBsnMXk7tLe3t8ghnTkyUnXROSS49zk4OLgAPjMw4Rg+H3g8S04MvOTEyYF8XhnXEw3l59xMLB5A3scTHw19FT5d17NA5bjJLuyrwFMA+JgEMrenbCkbpISV9OZA4u2UoKkzAsxa8CSd8+/+/v5ZcpF8Pp77sAcwSYl0WZa6ceNGHR0dRaD0xYH0R1tStlnyoJ9qHPcfjuk6pq7dpvQVjX9yclIHBwd1fHy8k1BddsYOvytB7e3t7Sw8leg9yeuc653y08fEwyxBc8Gk9vpO27iOPQ/ouPyHx2hrzuE+MLOj2jD+Up5wHXEc8k2ZqJME9Jx7+/cnl2V512q6EjVGNkY2RjZGNkbW2fF7GSO3PMaAupYLroODg51J3ZlmAtNoqb2CWYKkMaqysXjcq0RrMqfqEeed8UpnpVFmAe9zMslqHFZ2ZomW4/IvHcZlmAGzJ18mVAFCAhLnz5O5JzUnT1LOL/U9xrgQrF7NSaDmcztIK0i9AqM2rOTN+HZ9uy6VLJU8UyKnvr0/E9hlfsw+GodgwsrirDqUxlLfqotAwcUH7eQANItf1yNtQ13oL4/7uA6OySdnCX8tbjkux6DtU2ylOJFuaHMHRY9N54HtuQCfVXRJKe41drKZjvUF1+1RY+T5+cbIc3kaI8/baKzGyMbIRzpGbuMkXnDd9TNcZKpq90qZiVfn2N4V7QLRkDMg8NunDHDOsZbIdZ7OlozgRmSweJLjX+fBAWvmNDP9iRKPKXHrc3LoqnOg8MSnc+5gnMvt5pUu8k2g80THv05sR3t7YNKWngwSoHhC8WqgyBcKvtDgOFy4OIAk/6c8XtlyHaUqoldh6DM+BkmVNk+I9M0EXJ5oUpynqpaIAJBiWhVeXzA4UXfJf9aSr+ZJY3q/2bhegZ7Jc5U8MMtNass84zkx8c1jvpiazZPkcznW+Gxap8bIxsjGyMZI10dj5L2HkWt0LRdcSRnJ0AwcKtkTur67U3tSSnP7lT558AQgx/VxkqNRybM+dCZ99wSh9ry1nBKv6zAFh8bnLdRU5WSScHlSALgD+pzkU98FSDrHROoyuGMq+Tq5LGOMnUStPkx6HC9VMCk//60lL+oiPWeR9MKx3T7kl8CUAJZjz6qWngBmiwxf6HiFxvmm3RKI0qfdH9y/Ul/Guujg4OBsO0kCS6c1wKTvcsHktvCYnQE5x1yTl3xRT/IzbTtYizuCuqq9qiDrvNpyzzj58/hdozXf85zadPvUGNkY2RjZGOl863tj5P2Bkdd2wZWS0ywZUlkpOLUnlZVAju9KmlWlyI8CcQYYHkwcKzkMkyfl0nypEsN5JVPa2+9yOhDxPPf2+nxMMjPZ3JmdF9f7Wv+0QPCHX5kQGRzuwGvBmWzt/KXFjSccBmZV7Ty463JoDvkrj9OXfMGiY2lRdBml4E4y61xK9L5oolxcGKkNYzeBTlp80IazpOR24dj0UybGGcgS8AhWszzgfbnQY9tUsSKIehy5/9K2KZnLf5K/+0I5ycLqXapgM7d5jCbdez/PN66PNVBvWqfGyHO5GiMbIzmnjjVGNkbeCxi5RtdywTW7belBQ6E88blR0xzqQydYczhXUkpM5E2UDCGSTLzVPEv2HHctwfucpFR98lu1/jnp1G3jlIDP+9DOTDqcY81R1Z8BKqf1MTwZpoWeB57LnGRIbaqynikndbCmR78F7kHPyh3jhDpgkvfk7eOQPAZm/u6+SPm9n/up62c2ZkrULpPGTQAw45M0S3YEmJQL/E1xLjvtlvjV3GvnRQlkeVzkb/tK+ZTjU2/pPH3K8+5l5LLpmMZJFwBN69QY2RjJeUmNkY2RM5k0bmPkIwsjp/0uHfmK5AZISZIK57+qDDwJVDi2j8ME5ELzvPPtbTiHeHNnmDk62/pYNL7z44Gfqlr6p0pFSv6zAEs6ZxsfJyVh1xflmNnzMn273n1+Vk4ceBwAZ4Gc+OfCRJQSp9tztu0k9aONkj7dR1w+3/4gnTpoUGeeYHyBwc/UXYoX13/aUuFAqzaSn4la592HZmDievGElhKr68ftIXl50eB9vZI2i9kEjtLD/v7+hfyVbJMAw7+7H7nNmS8SmHsspRhOPHARd9WKc9OcGiMvtm2MbIxsjGyMnMl6r2HkXV9wUfn6rluCblCdXwMaGs0F8GSsY+6kKaGv8a+/7hTpuxzFHcINQX4YYN42AeBlCcJ1xnEkewJn/04n9OOeoKlzB+5EDjRecU06Sf5APXDuFDQaj/q4ik+kxOg8uO1cB66HtSpfsnXiMSVt5z+18WTreqT+PaE5yNLmHCPxPauy+nYN5yXpOyVMB5IZAPrcslGyv8c/27g9xhgXYl9/WdX3yp/LnT579Tj5jeeKmSyXxVKKaW/r+khtm65GjZGNkYkaIxsjeb4xci4j+Zz1I49vrBh5bXe4nBFndmZICc4ASz+ulgAozZNuI6uNK5LnaCC2938OdDNnSYGVEqTasNqT5vd+LrPG8LlS8rwsaXvCoTy0l8afjbmW9GbbARI4OV9rSZ102ZaDNJ5XLNw3ZuA7O0Z9efUrLR54Xsc8Get3R1JlzPuTqHeO71s8Eg+zMdO5GVhq/tlCR3/5zEaK9wT+1HUCmSSXj8eE7n1crpRgqd9ZxW1WdXfbrOW7JIPLTD2zjXLrbMHhlEC/6c6pMbIx0ucWNUY2RjZG3tsYeW0XXJxsFriJydTfyQOZiXeWLBkkCQwSf86TO5cnwjVZUiXGyUGA48+cxXWx5vA+T+LZ5Sf/azJS1pQ82S9VitLWGq/kkR9/kJK686BlsKZb2ldJQm43X2wkANFCxu3nt7KTvb2PA13inTqjHlMyFX/pOB9yTnJ6+8Qrq2O0rW89oA5cvpn+UwXO25CPGd9+bKYrPz5r4xW3NQBIwOM2cd9O/ZM+Z7E506cWKPSzNXv756Y7o8bIxsjGyMbIxsj7FyOv5aUZoqtWzkSz4PFbj8nRZkDE9inZ+nFXZlLumsM5iLqR19qmZERDezXOgXnGF/v45xRsyRGpS+eX8vEckyn1riTlP1rn89D2Tp600vYF5zMBhstOn/Xb0AQ+l5UyuN+neXjc28+SB/slflKssb/rmck2AQb1n+TVea9i+SJgFp++v5zzaFx9dh0kfXrcphhLslMe92OO61VN9zmXOS2aUpJO8UufcN0lvXEsl8d5IaV8ugacMz9rujNqjGyMbIxsjGyMvD8x8q4vuNwAniBnbeU8SfCUyD0xJPKA83k9WFLic34ZuDKCQM6rMF5dIF8MQL/qJ+8ug+/jJh/u6LPb3pSFeqfunQhslwWp/nqCYgCnW9Ep6TvPM0c/PT09e+jSk3/ih3pxH5Ueks+wsnUVH/SkM8Y4e4WzA2aqQCY/GmOc/TbFWtLRmD5G8jvala9u5Vhug7Tooj2cJ/bn+CTy4fqfASt1SOJDvvTtlLwpj8dA8p3EV6qG+mevzjnfkl/HZWf6j/NGO6Rc4218scKcwfNp3FlubLo6NUY2RupvY2RjZGPk7uf7CSOv9S2FZFhMXuVWfXI0T0z86/OINBd/hCzdlnfn9vmTESkLA9Tl4w/4uXOdnp5e+AHDpAc6uTuCV5huh9RvxiMdOtlDCTEl4xnY8Dznrbr4hiHy41tdPIkmJ2cSmN3Kph5IvsggjwmYk4+kYGRwU/7LwJRg7H48m3eWsDWvg5fzshaDPifHmNmDAJF05nrm5zV/m+mCn9cSvcuY/CHJnEDRF28+psf0TE9M8LQ7z1+mb6ekv5k+kj+zXfLDptujxsg6m7sxsjGyMbIx8l7EyLX57voOFydLyf2yRMPPVJBvHXDHdKIyfBxVejhX4snHSD/c6HPOZHY+ZqDm8lblvduz5KVjDgCpLXlmAnFH0fxenVT/NJdXoBQQrED6g6wC1mU5/xV7zrmmb352nXNLBeWnjj1w1V5t/AcemQxnAJoSi59z8HawdB9mu1SNYkxI3z6Px4CqddK59O/6Ij/Oq+amfb1i6PwlWzoopUqa+EmJ32PJ84svxhzUEgC7vqkPtyeJOpzlNo8/b0seyL8DiCd59wMtXmaLrssWU2kB5fptuho1RjZGNkY2RtIOjZH3Lkau0bXc4ZIwqjbpH4WW44lB3qqjorxixe8USEktOWm6/effZ8oRn+4USjBejSBv7qx0oL2981flkj//y4TGc0nnnqAODg4uyMz2KfA8WNMclyUyHvfKabr1L13we9W5L1A/s2qJEqDriLea3fZeSdF2C8pIG7nsnkxYHXFQTtUoD3xf4OjvDKwc7Ge+7JXPBDQEEreh257nZhXs2aInJUDZKSV3p6vInIDa5/bjSrYEMi50Lls0kD/19Xh22ROA0C+8rcdOiusEcGmLzwwcGEPuM/Jx2neWj5rWqTGyMbIxsjGS/XzMxsh7AyPX6K4vuDy5UGkyDpXhb9Jx4jkal3O5ATkWkzEDOrXlmLPvVKKSTQIo3wrgsvu46Ze80+cZ0clk7OPj47OxPTkleajn5OQOkBorBZ1XcpLsPKbKqAOaE9t4JcirTRo/VT+Z6F03x8fHF5KWeEzJRjx4gvCFhvsDk2+qgPKfg14CG+rCFy6Um8f9PMdMCyC3ifsAK1auG68kroGG+tO3KLMn8EQzP3cdn5yc7Cx6eJ4JWCR7pd8XIZ9abLo+PZbU3p9ZSPy7HVJ8avGiv6ma7XqmTn2sGZA23Rk1RjZGNkY2Rrr8jZH3H0aOuwXUMcYiRScDudNTCH3e29s7uz3tQcQx9JkCJ6BxB/cg0XF996Qso7Cd2vq8zpP30bieXDj3ZeDq1SPXseva+aUe0jgEAc1HUJ45sldUJCtlkg0dgCiDB5+DhvPtfKjP/v7+GaBSJ/KxNK4HMQOPyV8yOD8ur7fTHLNkSF5mvuBJSHMQZPy8Jyyvlnk/Aqb6Men4go3yOaUYcd2xHbcl0U5rlOSd5R1Pui6Xx4jnKJ+X8iR/mYGCANYr4e77sznXcgxznuexMc4fSnceZnxybl80HB8f/+SyLO9aTVeixsjGyMbIxkhSY+S9i5Hb4kRMVtfyWnh3BAeQ5ED+3bc4pKSSBPZAk3OqXbrV7UGbDCSeXKbU3vlzQ/EcHZr6E6CmOdwB2ZaOIyD1oBHNkgB14ufTYmAWxOLLt5CwD9v7w9Gca1Ztdf1wzOPj46ifmewMNvKo/gxyghQrj8kHCMTkZ+YXDjhMCgICT3z0oaRvkQO4+KOe5X/UD5MRdeTA6LaTrMmH2Jbt6bOM37SNQDxSN6JkR18MOi/U98xP3Sb8m3zU45UxQB+gLS5L7mkBxdzHWOJnju/5bS3eddz1dhWgb7pIjZGNkeKlMbIxsjHy3sXINbrW18K7kLOEROUzyGbG9KtS/8w5HDy8DR3dHcL5VELzyqTz6cCkIGVSIbFKoD5eXfOkRBmSYV1eD7KU/Hx+t40HInUufnXMnZtJ3BOWj5fmT9WcGfBoPlYYfC4PCH99LJOg+D44OLiwh5zJNY05xth5ToA2pI3d1owXJUn3dQ/0tACbLdxUgSGflMv5ob0oi2SY7a1PiZlzpKSlMT2xejzys/TqvsZ+njxn+UnjJsBUW4JD+q0cAobHp/sJ84r8O1X1XGafJ7VZW4CxrfvUbIFInquuXl1tOqfGyMbIxsjzMRsjGyPvZYxco2t7aQaZSNUAMkrG01WwnFUJQuMl5/K+rhw6j9Ps1a+eKI6OjnYSgicFr254ok2GoEweTOKHAUu503heNT04ONjhKQUJ5xI/Xg0k8OqYPzTrenOeZUPKIB0QPHg7d20xwERCAJM9fR+xVwJ98SD98fzBwaYWofkoR5LRQUTtfF8ztwCJD3+w3RcqXs2b2d354ThMWr4NyRdC+px85ODgIFbFGPNqR5/zOPF5k8/N9Cy5ucUqJVOPewcfjkueuG888etv5vKxku00lr7zYXIHMre329ft4rSmwzXQS+08r1wGVE2ZGiMbI6k357kxsjGyMfLex8hr2VI4xvkrQhNTzgiP7e3tnd3mZtJSOxpjrcrngtJ4lwVLChx3Lg+MqotvnnHHpvMwYDgfE4Ta0+FnxuN4VedvXxLxIddU5anKe/T1V5Uwr5JqbPVfc9hZYnLdeCVhFqDiy4+Rj1mQO7FymKr21NHh4eGFfe+SyRdDfMZCbdRX86S33nB+Aqu++8LHdeoJlHJ5ghXfyR/4mQlvpj99djsopqvOfYlVXCY0r9B6ouPiTJ/l76xSp0ovZWRseUU+LaIoJ3ni3n3y5zHtMqqvx4b4ZF/Xs3Kst6HNSL7wEz/K09SF29d9mPw13Rk1RjZGkhojGyMbI+8/jLzrO1xikJUor0JVXawQUOGqcLgjzBxZ43ogeHB5hSF9JqVkNJt/Np504CBTddEJOD6v6lU5cj7ozCmJJ749aSS+GTjiXWAkB2ZV0sHHg01zqvLFW8EMbM3t/pLsmgJdCUUJaqYP8uyLhqoNODoPTMJHR0dnbZm8XH6+EtnncNskvekzZZ4thHxsH8fj0IGbMjl/7KN/vthIupxV5wjEHhvkTXz7+KycJ/2Rh+QDihfNzTdr0cdTbuLbmnSc/X3B5QnabeUJmp+TnPrsFWvqzeM6gbGIvHmVMrVvuntqjGyMbIxsjGyMbIy8ljtcVReV5gwxoei7G16GPD09PQMo9vGHTRPJKGl+Kc0rZEwcs4RbtVvR8uRER/TAo3MwsTFRewVEbVMySgGpz3TAdGVOPakN20rnbh8HkQRaPOZVI205SZUP2piANksO4i0lOVWDOa5XSGkLPtybdDSTOS10/JifV0XMF01rVVQHBLcNgZuySn8JxJdl2dnaMVtguB1ncnMu+p6Px7ben3PSH5INHHBdVl88cNFKQGFljHpIydSBzpOv5xOdd3+fLQA8Z3E+5RXXo8eciHGvMfhMQgI25yHZa5Zzm65GjZGNkY2RjZGNkfcvRt71HS4agEqUAvzBwRRsFNzfnMPPKTlKoQlkXBF0PPLon51Pd87UX/w7uHgCch6dVwYGg4Z6YvtULZolh9nxqtqpPPkYCdB8XPHCKiR1w3lcP+rLBDnj2feeu+40jiekBKoaz8FG333BQL2khQLldL15LKhtsmGaiwst94c1nnme9nEAo/4SSCbd+XG3rf/uDmWj/pzv1C/pfqZXjUuZPHcQSLiAnS1yuVgkD0gY+XwAACAASURBVH7cK8m+qKI8rg/3A8aBA7/L5z6RclUCBvdv/eVibG3bTNPl1BjZGNkY2RhZ1Ripce9XjLy2O1wiN8YsiF1o9vPEu5Yw9VntVL2RQR3gPMHreBqbTujKTE7nPLr8Hjg+pvOTrug5LoPI+fGHPpMjUT/UC/XjPDOhpSoL9ZaqgA6+5CMFnZO/PUiB7A+Hup0pB/klyDFRkHe2oz1YiaO+WYGVzKxqObCmhZD7PT/PFkyuU573pOd2pU4S+KaE5vPLJx2QaBvqIOUKVpFV7eTrh9cWNuncWi6ZgabrzGUVj+QpAb23nxFBraoulZn/Unz62IrFtVzj4wtcBe6XxWXT1akxclfuxsjGyMbIxsj7ASPv+oKLDpKYJdMzhpICpBwHA533uXiOD41eNka6Gq/a/R0JzuH9PXm5k4koOx2Azu1O6gnOZVijBMJ+fvZdfKdbvTPwSQ7vCZzjK3g8wXg79w0HJp5P22lSYiTP8hevlPhrc9cCmm3SgsD9MwEDdei6cUoLnLRQcB/1MVwun3MGpGkxxzlnFVSO6bJRd6yEcWGoY2nh4X7C4/TDWbXe+fJYnx3z3DLLNyTqM8UHgSQBly863Mdn8ceK3Mxv/Pva3Zemq1FjZGNkY+R5m8bIxsj7FSOv9YePxQQFT47nx5xZKYKKTEkljZ8AgvNe9oNzLpMnCDoex0q8qI/3dUrHOU+qtKR2SoxMWJxfPCfAS/rwgEg8ps8JXMQTg3uWrNN4SiACOE9wCQyTLJRb/pV0xURcVZFX6dITnGyQFgL87tVZkip7DrYzH54lv2QP6jTp7rLYYJuUxAlKnuySzyXgTYnLx521uwwgvc8MoNiHx9PdBNlk5veaJwHWLCekN3WR95TfOD7fpMRzrC47MCUdpnhquj1qjGyMbIxsjBQ1Ru7Odb9g5F0/wzWbJCmKgnj/FLQcZ83QNJIblEFNnviP1QE/lnjWPPq9BfLvt4op90xmgsZaUva+rhO/0k5/Z1WOFAjJVjPnS444S/AJGKWjpLs1x14LRp+P55J/eiC6H/h8ae4ZaDFxE0g5PnlOIJ+Ax0EkgYbLlxJ9sn/iRbzPbLzGo3xqtv3JdU0e0/gzWTkf/T4lYn6mLi4DqpSPUnvyc5Wx6SeM85QbRNKbg7PrMuk+ycbPa/M2XZ0aIxsjGyN39dAYeZHHxsh7GyOv5Q6XJx3/7LeH/Zj38yB1R3ClrVVw0pYHdxpevbO//qXbqj6my3qZM/Kz792dOWXSlYOEV/pSAkk0S+JJjhnIeeAz8bgeyQsfJHXZLgONNd2sJR7nmYsG6jTxNgNzT2CetCmTg9wsYBMIMnk4z6Tka1y00D4eQ15hvey2vebwCpwqtgkwXZ9uE58n5YzL4sQT8ox3Hy+Bub57/lqrQvOY3+XQuQSY1PfMz9JYJG5v8bZr/XTOZV7TW9M6NUY2RvrxxsjGyMbI3WP3Akau0bVtKZSDJqcjg8lRluXibX4KxH8+b5orOYuDABXtDsS52cedYbbP2I9xHk8gqVrp5P14fPbZE7I7hPMxSw6zMavyfmXK4cDtY8t+rlcPtDXg1jH/4cAUcLOgkt/xt11SgvXFQgL0WWKkrKlvskfSp+j0dPfHIVkhTImQi6ZkK/Zz/r3q44sMjw/3aSbHFCf+3f3UfSTpx+Wmf5Bnn4v6uGwhl/SUwCnFm/Oo+ZI/6ZxvwdAx17t/15ieU33rCPW7Jq/GXNNN0zo1RjZGNkY2RjZG7urnXsTINbq218IroH1f9ywRqo8nHZGDSDKyOw95mQUmjZcqez5/Ihlojf80ZuJxWc5/DNABivp1npKuJBPlmx2/U2JVRzLosydKDxRPEuLd+fFFhbf3RQb7zcA4Je2q84eKZ4BGGf17WnAw8CkzkxvjhIGv766jpAdP4Pw8A/YUJ0lXHEv8ccGdYiYlWOp3bcGWZPQxHFyod9ljbduA+72DQIpTjjfTlcbxhO3ju/8keZ0PB+e13LA2HnlIMejHXK/U92WA0nSRGiMbIxsjGyMbIxsjr+Uthf62ossm5Tkpileqnpz9sxuef/1q3IPAjaw+6Va+A5L30Q/j+bzsPzOEB2EKEp5P+vTx0u3RtI2CAZACM1UlZ3yn4y7TLBkkW1OPs0Sj9i4b9SL5OPcMtCh3SjIEAhF1RP9hW/mfg6rL7/ZJYJK2RZAn9id/Dmak5C8uj9sjxV7ile2d79mCaPZXvLMC6gmcxL6+uHWdOCCmNj72DOzTQiJVSynXDGhTPkn5Y01ns4VBymue/5wP96umq1NjZGOkH2+MbIxk+8bIewcj12isJf2r0N7e3qJqRDJwSuQXmAjJk8L4rVL1ITF4PWGqvTsO501J1AORtzeraudtQJ4ckqxXBVm2cz5nxDZ6Tagncr8S55yc96rVglmwsS1v63tyJ98z3XPMZdl9Kw3HnIHBTFeiVG2egYnbn+epP87BX2tPPMwWE76okWz6vQ72TXKRPLk6iOpc8uPL9Ody65a+H08yp2Oz+EgAcJW2a1sFkj1FHndpQTDjjbko9Umf12RJixmfNx1fI8/LLjcXQl6JPjo6+sllWd51dYKmM2qMbIycyaAxGiMbI5PM6Vhj5EVZ3pgw8ujoqE5PT6NzXMtbCqtqZ2LelvMrRlJyLr8SnyUrfaYT+5hjjPgL2xzTk4EHmMbXG3n0b39/v46Pj3eSL8dwY3m1R/18Lo4/0xn5pS607cL3Wcs5XL6ZTqjTyxJUkpfnSWkrhwcJz4lkw3RLnL/w7ec4HrdEsGpZtftA8v7+/s5v1PAvx6BOk/6SXPxMvsQTdcTPApAkG8fS3Dzu/9wffJykZ/bztpJz9iOis/mdUnvnbU2GGflChvajz1wlPpLcCSC8jx/T31nV0e1Ked2XZ/Kv6SX5n/ObLgr4t+n2qTFyd7zGyMbIxsjGyPsJI6/lpRkkVgicGQZOStqz4FMbHverZSZsfaaTOAiJ0i1E8ea/80AeCA4CFY3nyVBtmRT0XcTKGnWVkquOJwNT79SpEpxXqyg79Zn0w0TgydP153ymOZKNfX7x4E7u5AnPdTMLBj4bIGLlU4sk+kNKdr4tgklA8/giy5OJiL+i7jpzffG8PrtPuw0lGyttzqPGp045hsaUbujjtAF14NUstx/lYgymCmuqjCXbcxxvO8sd8gHnx7+TL1ZVE7BwbOYKt4nPl+I+2Sb5ygxQBITJb1xOH3MNTJquRo2RjZGNkY2RjZH3Jkau0bU8w+UOJ0pJ3Nu50q7S57LxaOiqi7dKU9CncfnjfHJgVgNnVTvKpeOU8+DgYOeXsmd9HJhn5OCdEtXMOb39siw7FQ8fn07qyVbH3CE1psZL8mqs1K9qnqQUBGznPJPvVOV1HRwdHZ0lWwYe++mz29B1rjFUUfWqiPsNeaJNJRdfj5zkIF/OJ/Xi86UElmTlQnCMze/snJ6enm3R8WqPj+8g69tdSA54h4eHdXx8fGGbEvmbAe7h4eGZD83I5U5boWYLgLRIS75GfSYQuyzWZ3Knv5SVMZoA2AGfYzNu1/TXlKkxcnfexsjGSJ5vjGyMvJcwco2u5Q6XJ+W1Sf02MonOqiBMAOXj0CHHOL9Sd8emMmX0VA0ieFAuObEnTAcZ6iElG/KuMd053JF5O/oyo7oczpd0Q12q+kAA9dfIcgz10zyuVwaHbOLEapEDl+ssAZfPqe9OvngQX/Q1T7ZMPAnw3XccFOQT5DktotjXY0NtyIvLKt0SrF339GcHYcq3lmh9geBJVX7DsbhA8IWXxiGfPr/rjVVy8pESvl8kcBzJrkVdks9l98UO2/rixuXiefpv8qk1XjQOY4bnfcF22ULU7e7+wzZXzTlNmRojGyNdr42RjZGNkfcXRl7bBRcnTEqZJXb/7A7px/V5bS4GoR9zw3uFJilShmPlw9sdHx/vyJr4oTOqKpEclHwISGZJOgFkAlwec2fUA93UV6rIeXVN51z/XvHyAKLuOZbP4XOlPdDs58mFuq6qs4qpV8Coc/qWwM6TE3+7wRcBnkw0v+uEfkW+2c8/p+8c3+3gSdDjidsy0twcYyanP/xLAODzG+5/HF/H+T1VGXWcfuIgQt3y/PHx8QXdcEuK7MxFAqu6XCSk8RN4EmioW8lAnlIc8zv5TjbiAigBmgOpPvtCyeX3OdYWHE1zaoxsjGyMbIxsjLz3MXKNru0thS6cBBFTFJTt0lW2K0JCzSptpFTxYyUwGYrzegJkW47DIPOtDwogzS0DpeTJtp7APXmnxU4ai0FN/ZBHb5fmYRKs2k3mKZhmgO8Jmbe/aVcHPi4KVE3UdyXDlBRcnuTjs4Ujk1+yp/tFGssXLLS/J6bEF+fjOJp7tkhaW6BpvlnCTYnFq3scl3HAePWELMBSX/qj2oivg4ODnUUZea+6CDBJRh73ve2esJPufEGWdJsq+NQXfSXpnDrzOPRFYQJV+kOa3wGE+ddtR5l9sZwWRlu99lsKb4MaIxsjGyMbI6m7xsh7FyO3sRaD564vuMYYy+Hh4YWrXE/UfnXOednHA9jHmyV/H6vqIogkB0mOqe++n9UDhQZZ40/y+5xJ5gRUDsKp2qi/3PtOWbktYy2RUpYUvA7uOs/goGyuj1nS8+ByfvSdQZaAhJ9nAepJnrzyoc4ZyM18wduor7YzpHEcUDxevILk+vJ48SSjOS6rnvPVvGsx4bbiYiQ9GzDzd/KW8oTPmwCR4yVAE60tclLCTCCrua+ib5ePY6R4ZjvO68DLuX0eyirZ1kDXY9qByRdQor29vX4t/G1SY2RjZGNkY2RVY6TriX3uFYzcXkQ+vBdcTJTJ6WeM87i3rdoohbcVZ8lpTXFUDuee3X6v2nXaWeLQVbG34WtYE3/qr4coXZ5ZciT5rXzXn8ZVNSQlcLVzkEl2cD9xe5BmycvnZdskt6of1GeaJwWVJ2S1T74iEOE+bwILE8Ks2pmSEuX280nfKXlxfk9eOpYWKklOB9eZDZks3Qa+aEyJem3xwLHd3uTXk/Bawna9pTndLg441Bl5dJ6T/bw6dhVyPXryp2/zYfBUuXVdOVG3fJjaZfcFjI+hc8fHx33BdRvUGNkY2RjZGOlyNUau0yMVI7e+Fie662e43GmSI9FZZg5NB+JYcmRPUO4wrpzZrb+1RKDv+/v7Z7duyVd66NKTvDupJ1A3LNvSsVwP+i49+FuSnKjrNf5cHvJNfrzKpeBKCcyrK86X/l6WiKRz8enJU/0S0PC8v8rVk44n5zQm2yUwUl/tyfagTX08CVK3nFdvb2Ilj9W2FFOpspTiZeYjBFSPO+rYeZ8labVn4p3J659dNgKdVxBJs3H5vIfOeaJ2+ZI+mWCd76Qbt4+PT79OMZQWaVpQ6pjP4znEKdnAgTPps+nq1BjZGNkY2RjZGNkYeW0vzSBjfswZ8UScmKRACQxc6Q5ETBBrvPAzDTgDr1S5cF74nQAgHmaJccYjeRBvbEti9UlATF7XKAWB+ioJ8HdSWGFgok2A7fKxeuEBOnvI1EErjZvs4wHjScp/xJG8pPY+lmzpvyMiIODzC25/D2T6kCeA2cLB+6gt+3t1iecTGItP+lDa9uE6Ej/uo7RhAhy2TWBMXdPfEsAn/xBfjCH3D++fZKHfz5Ir+U68uL1PT0/PfF6vEKZs6pP80ytuCdTYnjkugU/yhbVzTVejxsjGyMbIxkjy0xh5b2LkzAerqvIL82+TZiCiyf0H6lxJsyShSggF9sCkQt2xLuN5Nr8cbe2fg4B4S8f11x2MDjsLbE82JB5X4B8dHZ2NyR+QVPKaAUqal2NrTK+KJjD0JOjyiWTLNd2wD48rMFgFoQxuQ+qAFQ/X+9HR0Vl7VXEJGDN7UBaSb1eh7zOhkVKyZnvplDL5Z/kDwZhvjtI8M39j4jk+Pj5LdkmnapNiaSa7yO0tvSd9uG6SX8ySHWMzLV5SPy5sNHaKK18AesVTfTmPeE45g+RAxtyT8ogvKFxvWhyoDRdSvthI/ZvujBojGyM5TmPkOTVGNkaq772Okddyh0uOR8E9Uc4SZLri1N9U3dN5D2yvvM0ClFfdbMOkRsDzJOyVF/LlQZEqGNTXwcFBPfTQQzu/z8CxfO6UeHic7VKFYVY18qTCSoDG9C0HHtSuNxETHXXFz3RsD5QEQmpLn0t2pIyz19yyv/ucL26cdyZcb5uAxxOt+usvdU19emWG47idaEMCC2VyWZikUszxmLdx23Fs+q/bdpYjfKHmFVG3jdvNda3P3HI1i3/3saSfpPcxdn/XyPn0udbIX4PrFW5f/FBvXOAl/dDn9GxMysvyHS4irwImTXNqjGyMbIxsjGyMvL8x8lrucDkz+szjrgAPgJQwSFKIO57O+XfywL9ugORAPo635/g+h8sxk0v7YTkfr5jpID63O5AnMe1nTuPMdOvnmTCY9LyP8+R9nFefd/bZbU15xJ+Szuw3P9Q/VVI4p9tO/bQ9hO1Y/VPgul6qNknBbZn8gv7BSjf/Uc6kr7WK8SxGkh0o+yyW04IiHfc48wowE/SyLDvVQdfPwcHBBbBw3hU7s4qo5ubYPgZlTv7v4My5U1zSn9ifff2zyAHzsvghz2qfPnMBlfIN23G8tChsun1qjGyM9D6NkY2RjZH3Dkau0V1fcNEhHDBcEBmFxz0Rsa0ndc7nBmQCUB83CIN/FtguR0qEKTns7e3t3HpMVRC29d/JcB1IHncY6tvH5oOOyfiSf3Zr2HWt7+l1rdQTg5j9UiJLTu5j+puxEph7JYXjUPczOzC5aXzZhAGmtpKPv4FxOwuRGYBxTh3j7faUoNmPbdnOkzj1m3zZ+fctCzo/k5HVK/LHeZO+vPqZkpq2FMwWZrPj1DM/e5VxllCdDwKDxmE+4eKFdqNtE+iKfPHkPkNd6vwMYBKAJHl8AcWxOecsbzRdTo2RG2qMbIxsjGyMvJ8x8tILrjHGu48xnnJJm81gKxWXyxJYUgydkQl05pj+eeZgPO4Gc2NQ2c7vzLHJfwKTlLAIiJf1pxNUXbyy9irWmnycKwHsZWO6g/rYfJ5AFTf6gn933XBxIFk5v1dEnIfZQiAlOspDnn2clBw8IbPv/v7+heTsSSf5mR9nQmXS9qTsScLlXvNbT24kBx7nlbfpPTm5bRg/TNJpsUkeCYhpbPfbFEMz3jS/L1YJdP77PdKB79/nZ/Z3G0lejeMLwWQz9z2X2T9LHvqq+5/GIx8zoGo6p6vg47ZdVTVG+tiNkY2RVY2RyTaNkfceRl76DNeyLD92WRu03fmcHNdBJQWiPtMICUg4Hx+8pLLJj7/q8jJek1wcS/PRWDSGy8mxXFZv5xUG58OdggGwluD53ZOUOzT1PrtV7ElY5GCb2s+SGudw21BGBmqq0iVwcOBy/hLN/I56mumCIOjA6HPws8Z2XYh4bs3XZvbhOLOYSN/dN8iLHxNo0HccgHyuZDMd95hIdve/szhLcqnNmn9yPvJwenr+FiUS+Ve/xFvihcept5k8CazofzNwozxpvLU573e6HXzctt/53Bi5KyfHaoxsjPQ5+LkxsjGSxx8JGPmwPMPFREWBnDzBsF0CELZJSTIZmv2593e2bYJj6rPfFuUc7JvAz+XxOVIFhOeS4SWrzns1LCWXpN/kzEzadN4UBGuB4Trl9gRPFl4l8qrWTN+s/Lls/Ot6c3+aAWgKagcWyqXvtI/rlvL7NqOZb5P35AueWP3cLH4cuL0vbbHGm9vEq7Ts4/rSeY93EcGIuqQ9+M/9NvkX+UhjsY94S+PqMxeRSd/pzkPKBzO/9Sqb93GbzPLBTE8c0+dNVeGmO6fGyMZIp8bIXd02RjZGPpIxco2u5S2FYlQM8NacztE5maBFnhR97JmwyQApsTFhpLkSv57I9Z0ONqMEjJxLfZlQ/Ao7yU++3KFnzql+DO6ZjtbkSAknJWz/uwbay3L+dpyktxmvHvCup1QlSmOlBY3zLkq38pOe9ZeJkH18zqTbJL/s7K/XdWBLx5jY5H/+VirKmxYb5DUtClzXLntK6smv0/wpCft4nMMTbPIR12+KrWQTArf7h1dcva/rxs9Rf+5Liadko1nbWT9W8D1PpJzQdGfUGLlLjZGNkY2RjZHe13Xj56i/RxpGXssdrhQgUjoTuBJwAgD/7NWP1IbjMLmvBfjMWRLAsS8/J74TuKT25M+NRSf1vmmeNX3OkuOMN4LpbCGgfkm3ntxSckh2STrwdq4/VoAY1Jw/BTSrKORLY+j1qs6bgn9Nn2v6mJHr2f85764L8bXms0k3HCf5iSdc10caexYTDg6XjeF8ypZ8DbDnF+pitmCgnhwQZsnfdaHvXqn1ahnbXZbUHRx9Lo3pNp75i+dbl8s/S5bUh1tAEnA3XZ0aIxsjGyPn+phRY+TFMRojH7kYeW0omhKO/vHHzDyx+2fRWnJec6hEM0f29hxT51KFjoanYzIRpVvX6sO2KaHQWT3wkp5cJzye2s3Go8wKFAcvPhDJ40lm119KPN4m8Z4SXkoAngxTQkzBN5vbwT0l+TQ37eigxPHTbe1ZTNDfdC69GSslJRH3T3u1V2OmpKLE4jJ4Wx2jvvxzkt115/7AOGCVyftyDPfHpPc121N29yfqyuV2mTg2eeUWrKSHtfHSIsWribM8cJnv8hj9d23upqtRY2RjZGNkY2Rj5K5MHPtewMg1uusLLk2ajM3AWktm7JNusXpQsa8L60pPylAy9wRBJ2RC4K9Oc440vhsk/fNfzua5lIw4J+cjOHmSS5Wtte0SDhocP+kv8V11EQB1zl/Nmvonn6FNFIDiK/0qudvHbx17wnR/ok45rm9RcBs58Ih86xB1Sr/yBYZIeqP9HZDdZ10PGlsPsftihX5M0HDg9rE1b6qa0R+og+TrrkPJItut6XiW2BPYyIcSuKVFQsotLqfriG19ET2rqjm/tAPP8zv16XHF6rbPzT4O4q4/+sEslzZdTo2RjZGNkY2RjZH3B0au0bXc4RIjclY6gQtBBp05d2SC1MxodF4PYneYdG7mHBqTtwx1Tj/25/0JEskR3GiJL5eFBve/Xhlz+Rw8PMnybwJB34rhOnbedCzZzZOW9/U+s+BiIpa/8W1YPn7yM87t56QT6sdB1f1H/eQT/qavo6OjC/Zz4Heg0vnj4+Od7+zvQOuLHo1Nnt3mnJNbEhxQPJZPT0/r1q1bceGgPvv7+3V4eLjDD5O524E+4GCuY5yffRMoc5HK8/wrvn0RS53RPvTFmzdvXsgdrj+Xze3v8eGvR55t5XA/SDFMoPZ+Dvbej6+q9opp0+1TY2RjJI81RjZGSg+NkfcPRt71SzNmQvBzEsaTUkr+vG3vCYhOoYD3/ing6CA06Fqid8ejAycHcUfSfMkxvK0fT3pLwKIENnM6yeHfKUOaz+Uiv5IrVVSTgzq4iQc6tCdZ/161G2CSiTzwvNvKdUl5We3gAmlmZ6/yyg4eePQ/90fN7UDPpMJqjAMr+fLxaEP3Rbe9gITklfhUWffku7e3d7bP3yuXx8fHO5Vmjp1s5lViP05wlL5OT093Xn/t8rCvJ3TJ5LkoxdXBwUEdHR3t2Jjt/XhaKLA9Fx6UW7qk7Er0XuX0eeg/iafE4yyXp5hpuho1RjZGNkY2Rqp9Y+S9jZFrdNcXXAwcVu3EwNrVpwOPB4AHWNVuQFXt/lo3nWMtqc6Up+9ySoKOjOpjM7BSNc0rac6L+Jk59wy4qnarROkK25NUSggJpDwROs2cl32SD5BntfMKCX2JCc+Tjcaqqp1KquwwA33Xz8nJSR0cHFxYCHgCdfJEx2PHx8dnx09OTnaqi5rT/T8lOepFOmACOTg4qP39/Z0fFHSefBzqheBD4lwpBhOY8zhlOT4+3rE5ZXBZnVd9Z1slU88rAhCXg/z4YsETPOdS+1kSPT4+3skJItepZE9VYI+3FEt8O5nrOOU06tYXCe7baSzqyP3NF4lNV6PGyMbIpN/GyMbIxsh7DyNn+qi6ptfCJydOzuLOx75Vu4qjMdxx2IeO4r9wrb7u5ExYa7zqOx+kJLCwXZItJWId55wpmJ1SkvH2BD8mjDF2Hx71fuQ7JVEHO8pGORwMfXzqSue9oid+Dw4O6vT0/M07HrRJnyL19aRK2dKCQG0EMAxMyeH6JoB5O/HiCXG2eHDAVCJ2/WjBw8UP3x6VfMtt7wtngrAqkO5j7iP6q0rW4eHhhfjl4s+TuNt8zaZ+LC0YXUfig3r1pJgWCzNf5jGPh9SGtqaeZ/K4HzIWuABJCwWvlnOx4Hz6oj/p0n088dh0dWqMbIxsjGyMbIy89zFyja5lS6Enh+QEIt7q1TkKQ6dm8PMqVIpg1cb7eMIWf8uyXEgW7uS6alUgcK6k5BRoTpSLx6jHGc0CQImPekxjU8d0pAR81MdMljSP28d50lhe+UsLhBS0M9CVf3jCZhLjcS0w3GfpC7IVKxdecXKe5CMaX3rxCkkCcPJDPREUyZP+qWJFPc4WTkwulEE8u27Fn1e+UuIXH9Q1+6ZE53Zn/Ign6legSh36oscXJjMgJS/8y3FmCyoCtvSWKqPkdea/VbsLVc5DfaSqKMdnW5HzlRbA9NMEZmtg2XR1aoxsjKxqjGyMbIy8HzByja5lS6E7WEoiqU/VbjLyvkw+7hypGuDJVpU7BZ+f41zJET34RQSGWUIUJbCSUfkAJpMNKzMEWZe9Klc8Ka9kY7J3QE8JguTJl0lXOlPw387Y7vypD4PYq1tMiBwvBZsDmMbyh3c5fgpiBy7Oy6qXjjFZJxDzWKH/sVInvVTVWTVnTUaOTR/yRZP057pT7Lifu8+77XzRxIVYSs4pEaak5/ZL7ZMdZxVIr3Sm+E7ypVznldDZOOwvu/oihXN5zFG3vmifLdCkf9fN7M6Fj8G2nKfp6tQY2RjZGNkY2Rh5ke5FjFyja7nDxb/87AaqOld+qi6wvx+XIql0oIjNyQAAIABJREFUJzqIB0DV7p5nv73sCc8TtMvgTkqnIGB5AHM+Jh2Ny+BJVQFP2EkPVRUrO+KJY/O4kiH33aZEPEtKLoe3pZ5mAaNjbju9iYigxblImtffXkR7Or9e3XCbM7H6P/EuHVLHlE/VuOQLDHzqXvPrYdoxxs7+d38I1vWfFmPuNx6LWujwTUBu57RI8KTMBZPrfsaL+3NKnnt7ezv78rmVgP08mafFCG3Eth7PfBYi5YvkY7PPCQi5uEm5h7pMgMr2tIM+M49KDp33cXysNFfT1akxsjGyMbIxUv0aI+9tjFy7+LqWZ7gSE7xqrzoP8JQw1I+K1XcfZ+aEnpQ8wTPgFYTsx8+uMB+Hyk1X354YnT/ONwswr2B5X+rSKxF0Rtcvv3v1yMfjeY7JhKfjqdKV5KRtHKSSPVnt4NiemD34Z0CusQlSDtwkJjOXg7zrn8YTEVyoQ98OoLl8QVJ1vhXFq5QOVJo/+Z+3d/LqsAOJ+if/T5Uu54Gyuq8k29PmvqhyX+JxLmLIDyn1d13pe3r9rPPsOYfnKavzSf91HTl/qthyIU79p7sPyb/Iiy80yD/Hc5003R01RjZGrsnZGNkY2Rj5yMXINbq2Cy46lQOFM1y1+2YSEpMDBXZFcB6Cj88tYh9Pojyuvgx2Vv0uU3LiLR13WfTdjeoAzCqb9OUO5kmDvLgdvH3i0/lL8nrF0vXsCYhBmhxfY3HuND/lTIuH1I9A5YuB5D+Shwsb9wWd51z6K+Bj9c154RjJPj62V7W8v4N6shvfNKY2DkIpjrnQSePO/Puqi6Pkg55k+fpcB1/q2nUxS7Scl/MwZrSIcn/SeJoz5b00n9q434r4Gmsuksgrc9/MDiTGlMb3rVQzXdwOsDRdpMbIi/M0RjZGVjVGJlm9b2PkIx8jr+UZLp/MkwiPiXjrT+ROSkdhwDh54NBAnMOdk4HhCWVt/OTEHvwOAD4er7h9m4LrwStLDigOJOzPwEoyqM3MlnRwBy3qjPL7w8G89S7n9yRIchBhhVB8c/uAv6WL4yZA8gSabKOxqCf/HY6U/FMwqqKXqiUkxoz7pffVvARlr3KLB/4mheuae8ddb57cU4WIOk/6pM+4Xznw0lf5cLVs7briZ9enf14DPbedz8WY0TaNGRCleE88UA/p910Sn75gczk9j+l8AjfPGYk3fp6BVtPl1BjZGNkY2RjZGHl/YOQaXTt6KmATEx7Y/MzEyKtWvnklEUGH4zuw+LweKGqXzs9ALJHasnowM3ACWyUIjsEELP14UvKH/jxRryVs8k49OWCxOpEcTu1cLvHjgEPe00OLSQ6nlDTII+3hfLjPUR6CFuehL7u/uc95HCSZ5ONpQcM+WkD4AosAQv/14HefdN1xm4XrXPOxikn5PcbcPrPYpV/Tn8k7fV1zCWzYn/YcY5xtH+Hiwuembjz+yTtl8C1gtJ8DrI+TFnVuGwellCN8DOczxb4fS4usGc1ySNOdUWNkY2RjZGNk0otTY+RF2zySMfJan+FKimRgJIUTOLxi4InRK1Gcl87ggc3+bJ8SF/+mc5QryTtrOzOIO6+3dxk5hssj4sOSnhBYoZjJr8+0mT8wS517NSxVTmkD3+7hcyfbiLzS4cl1pjPnI/EonTooJF7cl1M1RHP7w6SeVFJVzKvdrBrRHuQ7VTEdKD3x+rYVJh3/zH7+mQs69w/qKIEL/VJyut9yPuqQn+nzrLCRH7VX28PDwzMgTQnVfWgNMK7y2ft4FT0Buff3aqjzSDmUX/VbNxyLb4CjfSTbzNZNd06NkfO2jZGNkY2RjZH3MkZeywUXHdO3njBY2Dbd/vWEr79MWjrmSvcgdGMwOOmcbkApODkWg1XjXAYoHpQ8R/nUjv88QaX+rld3Vh5328wogYbLlWRkHw/4xA8TywxU3bmZMBwgZwCVAsEXHImHBGqpbQIkHWcCJfDOtqisAXzV7kJBWwrcnqzg+VYVxlgi8UtQcF7c1rN9224/f3tRSorehp+5mPR5HHxTjFD/6uMLIfeVxIfnt/R3Rq5X2cIXFOlOxFUWgEkOX3zyWAIjX0CwfdOdUWNkY2RjZGNkY+T9jZHXfocrKTcJl5KflOiGIqXkNBM0JSXOn273emXFDe88MZklXtWGPPt3BprGSwmfAUP+vQLg1Q7nidWLBBaeZC9LrP55BkDpdrF4cb1IFl9oOND5a0AZ6L5AEZ8p8B30mfCZwJIuHPxnicb1s5Z400Ip+bD49PNsJ7+hr1I28UT9zpKWx5T7alqAeKxfFdxpf771awZyrt80h+sj+d0auDggeZ5byx+ktPj0vrM4Sjy6b6Y7FYkXX6Snha3aNd09NUZe5FVtGiMbI10/jZG74zdGPrIx8touuNwRGNhkTLd1KbD6e+VN48yAJzk9gYIB7v2dV+dbPK05JZNHAlLniTrguP49JZFZ4iE/5JdB47KnYF9z+FkSE+/SgetIx91hRUzSs+cQvMLjweL8XyUpu4wEqFniW0v8Sac8R534gonycYxUqXFgnAW4y+8Vaso7Gyv5iycWyTFbLHmF3nVJnhwY0/wOYB4fM549Qc+qYAng+TktONyfEy9rwCo98hjnSAtLzuFx7jlP416WJ2ffU15oujNqjGyMdB3peGNk7bRpjGyMvBcx8touuOgUvCWYFJkcwJOjt+d3H4/9FCzce5kczQEhzT/jwR3BX03pQZr6zoDTkyIDODkE2yRnTLpiUJHPJLsDFNu4/WZJluM7L/SVtCDx+ZJPpQBd44tyscKX+ngC0S12Bzn/MceZv1Sd7wl3m+jvWn8HgttJ1Elv6U1oM59fAzOXcXY++WhKWMkX6EupajjzF+dlpj8HxWR/lzXpLsWXz8uxZ0C+BpDOx1rMzwBy7djs/O2AS9MuNUY2Rl4JI8eo4w/9tF2elqrTvW3fRYe2MtSoZTmtCrY+s+HYq9Pl9Kz9Ge+1HW9s+dueWyrbN+HZZoht3xFyd23mORt/nB/TfKfL6c6xpZY63p7TH/HJMWtZtofP+54uF18QQT7LU9io6EPL6WmNsbejizU6+Jkfrr2f/jeNkSZrY+Q5XfsdLk2u777vkkDii1ZRupWrMZzSMQcGOcravtxk5OSclMX5TcnewcMX1l7d9HHd2ejQY5wvmF129RO4p/ETgHrwpaDy8ZclPxfgCcCP8/Z82jubeE4BPQtMTxqzIOV5Xnh59YltXV8k+rx4mv0go4+R9Mx42dvbO3srlI8xq+ZxoecxWFVnY7KiSH6pG09atCNlSQujpEdv59thJJf7oc9FPXA+X9i6HOTF7XFZkk3gkHSQFnKe//wOwGwBl/hNPk7ZlSf0UPBVwJsyeS5oujNqjGyMvBJGVtXJMz51aoc7oZPLmzTdJY3/43Nr/NQPNUaC7keMXKNr25ifFpLucL6P1heBVRdfmcpjM2HoXHJs3n5Pzswkv+YInnCTwdPtztTHF7nUkwcU+3NfvMbgMXci7+vnrjJHugCaLbhnIOtzJjunvi6b2yHJQ1lSAMxk8jYJAAmYLhtfV+s+u5ZIki95hZYxw/6MG1LSJWVRXKRX50pWJRzfwuMLBo+BmWyJ57To4D/5xmwLjtvBdUm+mE+cl7XYph65PYSUchR1zUXmzDYJsDyfiAfqxfOs+8MMMMiL64Pz0yck04zfpqtRY2Rj5O1gZNMjixojGyMvi927vsPFRVtKtElhUgiZ5t0OtmOicwVyPl6l82FXvyqeKYUVQ0/4DhIzWfl5lnBdJy5Xkik5yRj518zp0NIFj7kdfFy1IwCPcfHNRz6Oyyr+OI/sqLcCJZtSbspJW3oQ0/Zqm/a7MyHSlmkRkHTkxDFm+uRbkiiX80253bbUAXXtx9Jvz7AvF1RMsvrRSS76aB/vQxn9GGOGcye7uu64vZHjpP3XabHHcZPd1N77O18+NvWW5nPdSg8zgEvxwq2qqS9lSDFDPaovXx+cqoMpdyaw8zzSdHvUGNkYeVsYebpUHR9Vffqfq3r046s+9ZvqAv3wd1R9z9dUfcBHV73n0y+ef0PQQ79V9Zl/sepNn1L1iV/7O8PDGj30+qrP/KDN5zf/vVV/68uq/tOPV33D83bbfdCzq97lT83H+ZT3r1qWqhsPVH3WP636r6+o+ry/sjn39u9W9Zc/46xpY2Rj5BpO3vUFV1okixFfkM62pohRH4N91xbFnMcX5TQ0xxHPPkaSy/vTAQmCBCIHHq+WufwJ9FyfdAoF38HBwc55OqfOpe0cM0Bj9TP9gJ22myWnmlUUq85/SV7POqXFgweixuDcp6enF5IOgSsFIn2ASY6y++9z8KIjBRn1PQN86kXHNebMDrNkxQTIX5hfluXMzkyE1BcXbrz4Ojg42LnYYnXI+fAFlPOfkq/sIXv7ooQJbZaUXd+u07TATG0ku2Tzi8okq/OlMZkLHCx9bo7p5DHAcf13Vmb8+XjyhVk//mUckOgvau/2aro6NUY2RlKfl2LkwX4dLadVL/qeqic85cIYVbVZ6D/uSVVv+Tb5/BuCTo43PL7FW//O8TCj46OqZ/+pqpf+XNUnfHXV8z+i6vS06j2etuH5vf9C1aMeU/V9X1f1J//ifJy/8yerfurfVH3RD1Z9/PtU/d2nby7cXvqzVR/8CVUv+t6qn/iXVb/4H6sq5+bGyMZI0bU8w8Uqkyu/ave3Rxx8EoOesHWMgnHuqvPnThKocQ62T9vOtCgmL+48Xnl0OWbgJUpX0un2anJyHiOw+I/h8TMX+QQrt5PGoX1E/vsx1JUHdrLzwcHBTgWAwHSZs/J2tV9geRLTXR5/Zor2k5zJBu4LM//zQKc+9FcXRloscE4uDmQvzkP9Ofmig3OuJXXZzuNKF17uC1wAaWwuMMQ3K22MCSYkze8JnHP6hbafd7t4bHobt2eqLCoPUK7ZwjKR+77nFM6dFlqzSl3yO/efdLdDY56cnFyQifp1sJyBu8eF3wFouho1RjZGzmx0ASMLMv3Gq6qe+U5VT327qs/89qr/5wer/re/fX7+GX+36ud/qurHvm9XgV/6b6s+8X03d3lGVS1V9eCbVH35j27Ov+w/be5OVVX9sfev+t1vX/Utn787xqd8Y9XbvvPm89/641Wvfc1mrK/56c3F1l//o5tzr/zlDY+//12qnvP1Vf/uu6q+ZvsM2jOeU/ULL6760e/ZHftLfqjqk59W9du/VTVG1Vf+RNWz/nDVY9+06ot/qOqX/3PVc//Cefs//SFVH/KcujItS9XPvmhzUfp277q5G/cL/6HqWc+v+tqfrnrsk6q++6suH+dnfmTz9x3+6OaC7ed+bPP913+l6tu+oOo1r6x68u+q+vBPr/ED33jhIryqMfJ+w8g1XVwLelJwCisGtKgmQIhJOgV/XV0K8M+z81W7iwG/xar5uI3AE6y2frlSVX3gHA6emtNBcub4M1B0PfKY91c/BorGPTw83BmHdzZ8fHdugrknD7WhLpON1F/tOb/4VkUv2ZcB7/7k+uQx+piDLeehTalPgfDh4eHOWORBixZPfGOc/1aMEhXJFyX+2f1X53VRpRdbHB4e7iTCqs2zVynQ6eunp6d1dHRUVXUhqe/v79fBwcHZXLQxfcP9n4sMyux5IFXmNSbtzEVIqgaTh2RPteE5T5bus4xZ6tztRn79u8ej5ybOxb5r7Umek7jgJY+yN+Xw3OA8uU45ZmrfdPvUGNkY6Ta6DCOrarOl8OO+tOpHvrPqeR9c9Qfevepz/s+qP/YBVS958eaC7IM/fnOh8pIXV33M51e99lVVf+UPVj3n6zZ3Yl7yM1Wf9e2bi4WPfreqV7y06tnvs9ki9+f/ZtULv3RzMfLO770Z42nPrHrcEzcXRL/0/1Z93HtVvfhHqp77gs0do2f8vqr9g6pPf8GGxye8+Yanj/rcqp/8V1Wf98zNRc4f+hNVX/o3q97ibaoe/6TN2Pr3V9+56pP+cdXLfm7zfVk2f1/6s1W/9rLNnaWXvLjq8U/ejP30j9rM9Q2fVfW/vnXVv//BCzrfoYPDqn/8M1W/8erNxaLoTR5b9TbvVPUT/6LqhV9S9eHPrXqvP78+ltOT3qrqG36u6qOev5Hzmc+retJbXci3jZG73+8XjFyjaytXMmFVnVfefUHNz0xuPO6A4U7J4yJPXE50Uu/v89Px04WC+qTkTl2IH78w8KC5zLlJHjBVdXYB4ORgSYDwcwpo8Uu+vT3BNS0evCLhQcRAkDy6ANPYnkykM1UlFDTUhQM45VKA8cLLE5l49Isp58ft7UmP7VndYozogkcyu8/TT5w32YmLH+mPcvGv+5jG5HNf9Dmfm7J7xdovzOQDbOe6Skk9Hee47keuUz/vsSX7eH/PR/Qhty0pxSLnmeUN12fSAY+57KndzB5smwoA1G0al3MnfTZdnRojGyOvhJEUaW9/cwfl+FbVq3616oE32Wzhe+wTz9s8/s2qbj64+fykt6raO9hctLzZUzd3j6o2zzAtS9UrXra5O/WqX6m6cXOzZfH1v7k59ujHb8d78ma8V7+86uhW1a/9UtVyWvWU37M5/6u/uBn3zbffDw42PD3xLare6b2qPuIzq/71t1R9z9duLnhuPlh181FVf/+7qp769lVf8X9X/fbrqp7y1Av2qKqqJ75V1ad+8+bzi3+46mPfverbvnDz/YOeXfWPfqzqf/hjua9ojKrf+weqXvjyqr/3zy+ef91vVP3mazYXqg8+en0sp4PDqlu/XfUFz9pciH7kO1Z99afsxGpj5Pn4jZHndO2/w8WFnC8OSek3QJJyfAGoz2xHRbmB3Jlp/DVHYeDwfLp16IbyIPIFte/vJj8z53TyRXQKMtmC+9RpB8qhisDR0dEOSMp2HnTen7xwC53O37hxY6eK686u89QrfSPZl+fdN5hkZreZvdJK25EP8pBs73fP9Ka/qt07SfzHZxLctyiDkk9VnVVkdWfQ/c+rWqzsyvZqc3x8vPMsWEo0HJdbDj1+bty4sZPoOIZXvVNVjP7A78wfmi9tBU18yZ+pD7e1b//wqh/jlluOkv+lZ/PUhnq4bOGcdOPysQ+rp6kffSzN6T7nOlB7r1I23T41RjZGUr4ZRp4+EkPsF15c9Te2F0EnR1Xv95FVj3ps1Qv+wXmbxzxhc2fssU+sGiv229/fXLh9z3+r+rHv39zZ+9bP31wYPePvbp69uoyOb1V9wJM2d+q+0O6G/eC3Vn3lJ2+2KH7Ax9y+rK94WdXnfnjVc7+16tu/sOpPfFDVz/9ULW/y6MbIxshY2Dkba3rmNkjKpJLpRFz8+ULXr+ZdaCqHbZmoWbFzUHAw0Tg6547pIEDlLcvFZ1jcOXy7gf7yH43DB/icV5+DPJM3r6akq/xULeB8rADSOTmXFt8JVNyGaQyNrd+REtF+HpAuD53ak0NVnW2L84B3XfLCx4PLFxwir15IJw60XlmV39AfPDDVhomMbalH/eVF1snJSfx9LsmopHN0dBSrt4kH15Xzpba3bt2K+9Y9VkmS33VA/9Fcrhf6Em0280vazWM/LfycL47l/2Z64hz+cC/9i7x6vnNbekwty/kdUrZPVTa/GOdf8jrjI1Vim65OjZGNkVfFyGVlwVb/7jur/syNqq/4xM33L/vbVf/6W8/Pf9Qf2tzd+qe/srkbthm86mmP2VywfNsvbbb4/e8/vnk+6TP+l6o/86FVH43ntz73L1f9+PdXffG/qXrrd6j6pv+8uUj6gDfd3An7/tdv2j34mM14L39J1bPeper3vdPmGa5bv131nV+5uUi6jL7vdVVLVT1te5fpNa/cyPe+D1Z94JOqPucZm+NPf1bVX/qkqi9/9ub8i753fdz9w6pv+cXN3bgPe7uqt37Hqi/615tzJ0dVRw9VHd6sOrxx3ufv/MnN2L/6i+fHvvc3N3ccn/bojT6//b9Uvdnvrvqyf1f1wKM2L8z4gr9WVbt3iBsj71+MXKPhk94u7e3tLXp+ZG9vb5psxVhamBJEBAwuFMfwcTWvK4CUXljgbX0uOp74Iwhqbg8Y/6624iEBX7p4mI23LMuFasLMKf2Cgo4v/pUYtChgsLqePcj84oT8C4B8kcHAZYVv5szLsnmjnp5j8kAmX15dTW15oeUXe6yQJP0mO6taeXh4eOEhTK+eJr7TXMme9PMbN26cPY9FOysW6Wdc0Om7+NEcrHT5Vk0lLfqB5FSi5++RccHkPkRiHHiskQ/GnPPu/uxtkt1SwvYFIPXkfj/zB9HMV/Q35USX0/tzAe7Ax2NruYAyuTzkMR2jTEdHRz+5LMu7XhC8KVJjZGPk7WDkyVJ19H2/tblQ2hzMn88mqKrn/M+bi6Sv+emqt37HTbuqzQXE6UnVDxxtjmlRuSwYc/vfP35u1Td99uZlGe/zIZu7UBqHMo1xfpzjTPkbtbmq2p4b+DvG7tgzGrXhZzndDgUeZuS8uew+hviYHecYPv6oOnjB8+vGN3/Otllj5P2KkdvCTDTotW0pZIVHzHBB50IlBVxmJG/LeZKzaO5kCAJburJVMGhB7TL4lgbnW+05d5rP5XIi4OpfuihQW1/Aq+LmthARSJgklBw84D1QEw/Op8/p+p4FpOY7PDzcufPE56Jc97QrK46sBM0CTDyPMc5eUOFz+gWcAHhvb+/sooTfdceNOqadXHavROsCU9sUJeNDDz101scTjD/bpTb+Q6ca0+1O/XEOr5oxFhgrrKalv253T6Yif7uXX0yzb4qtZC9997cuug9TJ7JVujj2vsmmbOPxk6pia77tdvHtrB4DfswXg2netOB20G66fWqMbIzUfGsYubn+sUX/7HNV1ed82OZlFfsHVX/tD1d9889vnrd6/ydsJ9yvevrjqr7vN3fH4Dgv/JKqFzx/M8Y/eObmLX7v/n5U8gW9x3ESf+dSnZ9jm9nYcb69qqumoMTb2vEZH1eVfW+/lr2DqlF1OkbV2Ob2cVx7Y9Te1l+Oj49r7KM4vLcUvXrU2F6bLjW2dVC1c7zY29/eadqOv7csmzukY1TtbS8Eauuby1JjOW2MfJgxco3u+g7XGGM5PDw8Yzqc30n2M7DwhfDawioBEft4P5Icyd9SwvNOHD/d5uR8lyk9GUbj0tFZBfXKo+aRIxEMpG9WDNkmJXo6tTuqHFCLc/5uE0FS310/qbqpMQiKiSSjXpfLu1sE9VkVQ3+TzxEcKTftwAVI8q0E9K6TlCyUxKgb+isvmtL2S/qGLwyks52kHCpeTGYug+uA/jR7zk5jzBKp2yHpi+2r6uyOZprL284WiilnUK4ZeVJ2v/ZFLWOXoJNyhuZOdxX1L1XwWGigv6ZcRzuqD3NAyrWuY7+TovO3bt3qO1y3QY2RjZG3hZF7e/X678bFUVPTNdHBV31S7f+zL22MrIcPI7dbGB+eO1wuPI9LaF+4UWne3hepHNf7pkWa33ZVOxqUlawZuPlCjsk8LRQT6FWdG0QVAb9oSJU87sf1Rbk7KOU9OjqqGzdu1MHBQd26desCT6J094Myux0FbjzvD3gncKWu0yLBb0NT39y3rwqjPu/t7V24+yE+6Wv865+p09kWEM3LbXokXySlhOX+oXjwB0jZxoOYCwza339HRhembu9094q64rNfvKDjAkl86OFf34YjmdPD4/RX+sasCi2biD//nbCZ3i47n+y1Rs4X77AxhqcLJ+jF8xp9Ly14XWfJj8kD27iuuRVqTW4WIBS7Kcc23R41RjZGsu+lGKm3ZixL1WtffWG+s3ntVs/ufZI6+/mtq9BV2nobfRcX/Lzb6qpkI6wNvJz9V1XjguzO77hwxhu5ZBt9jtBbmj4/t1zQwxsVLbX5CYAH36SW092ihKgx8g2Dkde2pVAkYVOVi7fueWtf/SSQHvL3BagvaF1AGlf/Dg4O4laomWPQOWZ3DDgXQdLPc4xl2X0wkPLy4kHHEn/sJx3RgV2n0h2f9aFTUQ53VjoV+yYg1zzkL/Gs77qY8gUAx0gAd3p6erbg98AiOLqd04Udx9SFgj4zkGc/ckm51I7+R/3xmMbz5wv82QVd9GgBQt50XMf8rYiMGz3ozaRSVWdbJqnvBP6ygVd7nd9ZH/qXvvOCzv2EvqW/3NLgvuHH/M7drB9tQ14oC2VIdwTF22VvXyKvJG05TdtcpSe/KE15jzwzDtbaej/ZlPqRPunLfdF199QYeX6eYzRGbnWyt+Xt9a+tB//S776Q9yWH46jePMvfaeQFfMpRHM8v9t1WfmfCL3STr3Icx8N0jHn7djGS5+TXaUs+5XOM5Dl/dMHjhrL5hQr1RL917PS1CfGG38kDY4U2SHyd8fuBH1snf+NLdo41Rr7hMfJaLrg4uSdRMuuM+UJJ3/XyAU/Svtjz+R0QWK3nlSiTjfgT8RkU3mZMbUV0sJSsXE7XiTu/VwHc8T3ZiUfqiHO6bjyx+jk6jS+O02LXkwLlFqXqLgM/BdIY42x+PcfFJOXJW8DMwEjVkZQ4HJQI8rOk4/ojMGvhVLUBDgE/jwk03VYM6mXZbFERmJ6entatW7fO9OZ80xas5mhebm1yHbgf8a1c9FfqNPk7+3sl3oHW558lbbaZgUoCRI8VzwEpd5FH8cFq+VWTtN9J0DGNq22yKbacJz9HWWfFBC5S5UfpItt1SaI/Jzs3XY0aIxsjr4yRp7t2a4xsjOT8d4KRPm9j5IYeDoxcw8mH5Q6XBPTJkwN5whHzM5pdzTKpucKrLiqJQeeGEv+s6Pj52RU7P7vRE8ixHSsBKegcBD0gWPnhOExUbOfnNLYSORMpkxb50zisOLpePQG7A3vydZ3zWR5PvOSBY7h8budkd7ajbj2QUuALBMbYbIF74IEHzsbTGxbVxsFFMno1zWOBOhegHB0dnb3qXYmeiwVPpAIwycRKmgOjqoP6y7loD/pESjzUUYpbkceE54rkV37nbWZ7XzBRxx4nbO+xO+vjC0+ku69/AAAgAElEQVQCottuVr30efWd47A/ZfDvugviVWj1T4tjl9X58vZNd0aNkY2Rl2GkU2NkY2TVnWPkSfCpxsiHByPX6K4vuDQJEwCFcod1B/Ereo2VBEsOlZTtvGluApzzq/Z0VBotAVwCKJ5zuQmWDgisSnmfBI6cw/W+t7d3tqdcSUbj+auBUxCx4pKSgAdzqrA6WHu7VN1Nr+f1Sp2/Wc/14qCncSgL5+RcmkO6c9B1P5Oe9fyUwOHmzZtnn9VGcghYpGMtWPTd3/aVZBtj1IMPPnjBf1ipJu9rFVh/OJ52pEzS59HR0Vk18fT0dOe7qkN88DTpmuQ+Qh/yRVnydX52u1M3M3/wsXjMFzWMI9nf50u5yGVPoOWLZJHHIiktNGa+yr5cOLhO0iKDczkPTZdTY+Q5NUZeHSNJjZGNkXeLkZsDF5+zkj4aI68HI9foWl6aQSY92biyXeFuGJJXsjhf6ucKYPB4EtZfnqMzM+kzuDknZVlzVg+Kmf7W9Mj2qfLIfrxNzXEcuBN4UibKxmoQdZbAiHrSX38OZ2ZT6kjJjmBC2yR9E6hdBudV5LeVPWkxoAQOAoX9/f26ceNGjbHZ5sOqHKt2XqWj/8yASvyICBI8xgUEZaCNCES8RS/Q0GduMyGp/c2bN+vGjRtnutQLN05OTs4qe69//et3tnfQZm5zki+aqCf3e9pU+koAxrESqM2+r+UMt5PnOB5PY3lecFl8oeztkm9W7VblvJ0vCF1HpKsAR9PVqTGyMfJ2MdKpMbIx0u1/Oxg586nGyDcsRl7blsLZ1SeZdePLQej4VbtVCncqUTKIxvKKD9tpvLXFG+d1ct7JjxuPxAqNJ2IFGpO+J9jZ55nzpnGczxkQUvfk2ykFp7fjGA4Ivh1lBpQEMY2VXlnMIHAfcP9Mi560cCB/AgIByMHBQR0eHp6BBrch6DN/9Ljq4gtDxKsvVFISlEwaj3HhIEs7eNWaPPDhe26zSbbwxQ3B9caNG2eg+qhHPeqsmnfr1q2zSp8Ahtsi6CcpefP7LHZmY/D82lshExjxvM/nVbyrgJQoLW6cD8+JyQbk0V8okPqs6c1lvWq+bbo9aoxsjExyX8DI/d0XRzRGNkaKl7vFyFHzLcuNkdeDkWt0bRdcXEin25oUSMbwqo6USKOlwBexLYmK9IpUApdZotQcHNcTkDtUOn+ZIZJTs9+sclZ1vh+WuksA6/pzZ3cAXwswH29WlaGOuWXAtyGwrQOAfIRjqWIkfSgZeuUu+WICX77tiCS/EWAIIPb3988qdTzHyh33mxPs9AaplJgT0Yfox6mKtyzL2Rt93DYzvUjPBBUHV/VJMe4VQ4138+bNeuCBB862U6i6d3R0VLdu3dqp7DkYyq6eBFMCpg59EeJ5gMdSrPpCjfP7fLSJt+M4s9y19p12TgDrfPO32hwAGFcph8xA0PPBZcDbdDk1RjZGcnzOQYw8wbCNkY2R14qR21fdN0bu8in/uA6MXKNrueBaS8ZM4p6oPBHPFOFjUkBXjPfzpK7vXtlKVSpPPAywFPwOADw+q5SpbXLaJHdKBGOMnVebul45TkoIHJ8An+zHBMO/qdLp8q2Bb+rH755UOS+rWklf4iEtPEge/Pv7+3Xz5s2zv16p01+9HYpJ0HWl6hG3JiSf8O+6QJ0lyJk89N3L9LqW1GaLLq8ySkb54eHh4dnxmzdv1snJyQ7oHh8fX3iQWePSngS2mbyMi5kcPMY48TZpEeV5YvYGuBnN8tjsePo+m2+2YE19Z/pIx9h/zc+arkaNkeftGyMv8r2LkVlfqR+/N0Y2Rl4lV/N8Y+Ru34cbI6/ttfBiwhWUvsugnnDpnDQujTcDHPZPwTfjh2P5ea+yeNB7X8rnPCbZOGcCSB+LgOcJicmLOmDVIt0GVXsFsctAnlzPlHEGximhs58DgficAT2BTrfnvdJAvc8AJOlvjHFWkavavB72wQcfPKvg6Z+2SrCyRz1ybsqnql7yB7cJFweevJTQPDFSDk+WKSHSJgQ+6WT22xfO/97e3tnDwBybthMPBJPDw8OzbRUClhRvlF8+QJD0O6aXJXe15V9RinPGEXVAvXJ+n2OWA1IuY6zwOOea5TAfK/F1O2OR56vqtSlTY2Rj5FUxcm9/f6dfY2Rj5N1i5Dlzu363Ro2Rd4aRa3RtWwrlNK6wFOSeqHScfZ14m5iJ0RO+b8Fwg3F8T67kOSVtn1Nz6ZzfXqbT+IOW1FWqiFEeOrLf9qUMnPPk5OQsKaZtFNTrzGYCfCVLfXZ+Lwsg8SEdcPuD610ye5WLiUsy7e/v19HR0Y5+OR7H9YTs8vIBXwGKgMPBhFU832Kg79Kt/IN7iGeJTvzNEgn9IP2mDG3KcTQ3v+s87Z/8whcw7n+UVe24KOH2DbVnlZM6Pzo6qtPT0wu/L0M5KR/H9O8JJPzzDBBmCTct/pwcPD2e1uImLUI5L4+tLZTczzyPpIUh4+EyP226M2qMbIwkX1OMHPm5uMbIxsg7xcizNjUv/DRGPvwYea2/wyUnmN1OdONU5QdlPTDo1OpPgdme7ZRwWN2RQdN8XoHysZksCCqemJIc6u8A5m0INvruCdrbOyB5wLqTSg+udwK1SD86KNlVqeF4/Os8+DnakiBFmdlGPOp3LjTeyclJ3bp1awfImbwdOLgHXef1WcBw48aNnYd9b968eXaLX6+x5VuVpJuq8wTtCyTagrZPt6CZmBOgCMxpe7c79Uy/ZiKWnsTTrVu3dt4MpTc0cX4CO6t6GttlYvXYEyr14tVT2U/VQC2MpJOUgFNSZ6xRJ74AJH+0wwwAOL4nY0/2GivZmvaajc04neWMJPvMr/h5toBMvLuMTXdOjZGNkZdh5OlyftGgbWeNkbu6bIy8PYx0/elcY+TF73eLkWu0esE1xniPqnrJsiyvuKRdTLxOzpgb3BVPYdOYPpaTnIVJKhmbx2f8e9XCgz0p2xMBA43Jx3n2ZDDjiXwQBKs2IMDAdfnZl4DB+dMFEYNrxht1qjFUxeGtbo6rW+jiIemSSVby6fWr+qy3/TB5qL/G1F5pgYP2mOstQgIPVe4IKuJD9lsLMso6WzzMwJiy6jMfJvaErr4eDz6+qnhMJgSolLQIFvxHHyYAqS1lIdgQ2Jks3e6q5Gkc34aR9Oix6Hr0pD0jgoXnKR+H7al7LWTT4oF+kfIm7ZCqcbM5Pa/R7ul8IvcrzpvennU/U2PkLu883hi5S1fFSOa2xsjGyOvAyDPfWxojH26MXNPZ6gXXsiwvWjuPdjtGI8OerF3Zs3M+DhVHR3HHSoEkQ84CzJO3VwBdVlbM5OyetGbA5T9c6M7KsRSw7hju3CkBsPoxAyV3EiYAJa61Koy+swqYbMTXnKqtxhYAVJ2DmsZiNXiMcQYgriPJJLBK+5fFl8bSQ757e3t148aNeuCBB+rg4GDnsyp1euBXAETfk/4kp87pOPVH4mtnkz9wDq/SVe3Gjj7TF2lT8uI+oPkfeuihs2TNrSds61VFgjYBSW9zEj9aSLA/F+60O2NVf/mgsfuYX8Crvcc+5fB4SsmTunZfEjmfM53RpjzvzyrQHs6z5vNj/O650+NS/Lj8nDeBxUyGpsZIfXZZGyPvAiPxWvjGyMZIt/udYKS0crqcViFm5B+NkW8YjLzWLYVkzBlIxvHkyc9Vu0b14E1tHCQUhB4E+qu+npiTI9IocgS/5Ur52cfnIm8Mek8Ya07s8mtsJTVWQtYSvwLv+Pg4JhONq0QuHn1LTAI3T7DLstmrrm0Oquwx+aovgUxyPfTQQzuAoGTCoDw+Pj5LTn6RN8Y4q87p7Uk3btyoBx988GzP+Y0bN872oN+8eXNHZwQSjus+xy064o3APgMM2pRJlQsYJVYnrw75AoJ2TQDDyuDR0dGFhQh9X33VTm1YnZXN5AOerGVD394i33rwwQfr1q1bF/hVG5G23NCXZxXm2RiMUfcb2ojtNZbrhDFPvlP+Sjx6vvHj5Nf7M4/yeYhUbSSvzgurqaR0rOnOqDGyMXKOkTfqdds+ym+NkY2Rd4ORx2PUaVXt7+3XwVaOxsiHByOTX4mu7S2FnrQTE1T0DGicvG3VbnVNbbwPqzo+P4+pPZM9kyDHk3weYMmwLteaMRmQPOeVQY3j35P8+sukQL4V0L5/n3vQqYfZPBzX+fO/h4eHdXR0dNbOq0nuP+KPYMDP5EEXcqq6qSJIYBNQ7O3tnQHIAw88cLYvnSAivfGHGj0puA/RLvIjbSMQqAhsqFfqk4swJh9/sJZyafxUxdbxVNHc2zvfpiJdayuJ+3VKIrJXAgRuc0j+JL64MOCiRfF2cHBQR0dHZ8e5eKCc7pMeJ+JP86dFoo5Jp7PKH4l65wIt5cEE0PqbYnqNh8sWqtQJt9b44lVjc6HhY3HbR9OdUWNkY2TiL2Iknv9qjGyMvA6MPNNJZcwQD42RDy9GXssFFwNATEmAlMzpKLpCZyJhRWxmRCpIbVxo9fFKiN+m5Lxqn+Zk0tUc6RZr6jcDPzqMgyb15Qnb5fPqAC9o6LTJIfxhWX/g18HZg1RjcGwlJ/U5Ojq6oDcfi3pwgK3a/FaF/IU860cD1Ye+oYR548aNs33m2oeuPejcl672SqhMsvIX+af0xcTLc1Wb1/IqOYpfJkTy7VtcaEdP2JqH/u5VGtnNQZug7PvKE6iLB4LcWkV4jHEGAF695Zj8S73IF/whZfkVeUmxkRK2+qc40nz0XS5YCG6kFK8+VuLN56YcvuhLRL1zPsaPLx5muk/PjfjieLYYabo6NUY2Rl4ZIzGvMMH12xjZGNkY+cjDyGv9HS4JQXLlMPB0t4NXzd7HydvKEL7Xne0dqDgO50vGc8MpWXFMzqFxFchMvu4kvIL2hyTJK48zUDQWK3DkWXMkINWYBB1Vcvgr9QpkypWCUH/1I4gnJydnD+dW1dmDuxpDiZtys9JUdb5vmZVTBbrulp2cnJw9uCveuajZ39/f2XvOH2nUVgnxot8s4dxcqNDGBDvZybdG6Dv9ixUvT1rSmydezV11EbRJBPzkiwJZgpkvFCiTdCkefRuBJxlfVPqC0B+QTn7Eaie/J/BM41Be+jnlpE8xnmlvH1f8+NgpLryK5rnM49NlSbnPF9i+UHS9emXOn6Ok3imT88W9/t636erUGNkYeXWMPOf98PCwMbIxcof3u8LIpXZkaoy8foxco2u7w8Xqho5ROVK2J1SeS4lSbXwsnqciRepDB3JQm13dktKtQipY5/w2rPOS5qCOKINX+pgcEs9MTNo24HqjbhmsStDan1x1XnGiAztYkV/plr8/4lUj6UcAIlkeeuihszln82hMBgMrPQqcMcZO5VFbH1i103dV6x544IHa29vb2Rah30Shvv9/9t6lR5LkOts87nG/ZURmZXdXsSlSoogP2gjiguvB7PUL9C8G80Pmh4y2Wmg9m5GWI0H4BAgCRVHdXd1VeY27x8VnEfmceP20R1ZmVRaJTzQDEhnh4W5udm6v+WvHzBUwNHDEgF2nt2jHtFX1gEy4tz6Mal15ftgRilz9GPSi3REs6+QTB3c6+FHgoz1Rd9iO1olNqVzo36nBHrbDix6zLKu8O0aZPAagMZjG/sc4ooBSF4O0Tm1brDfGHJWjxpqotzjwqLtPBAGtQ9Nr1N7rdBivV+BUm4gxO8YH6mOQpYPDVJ5XEkYmjHwyRnY69CxhZMLIhJH2vxZGPlZebIZLHS86WF1g1SdcPTeeV2d8MQBHkMBB42/arnhNPE+dgOM6/V8HZKecOoLkqXvVteNUUePWa3A2M6ts/6rOxOLXzWZTCWaa021mHtjoO9dTp6ZEaLtUPxoIcAp1Dt0uXtupzqkBnTZst1s3ctpOwAOAYOpg6/r9vvV6PWfsyFfPsswZQGW1YD2i7tSuCfpqlzHnX1mXaEfIpQ6o1Ue0Lk0zoZ3K0iqDrv7HvZke1yCjdlM3MIr9QHcRUBQwVX91cSCCV91grw7UtG8xiNYNdPT3GDi1nIoVMUBzb7VJ/ayDO+qru2dsT53t151fFxN1sFk3MI7xKIJWPFftS+NCnJ1J5WklYWTCSG3XhzDy4UxfL5QwMmHkJ2Ek+snqyYCEkS+HkXV1U170gUuFVjc1x7lm9Qar50Tj0rrUsfT+Kux4bTxHj+l5UcAoJ6YkxDZovWZWa/x6LxwwBqpTgBIBgfNVJtpXsyrTpPdR5zKzygsTtc/oSHVFnQTeyALo1D5BHXvQoMAWqQT1oih+1FdNUVCmE5DT9qnRdzoda7Vafn2v17N+v2/9ft86nY51u11PoWARMICiOuHefCcA056oF5V1nf2pbdTpCj3EoK160yCg91TgiQMcBToNznHAhG0ha02XqGO91I7wDwVkBiz8HtuiclWfUMDVQUZkj7QdsU49Tt/0f6wjXlMnJ60/zk7UAVAM7Kf0GW2Mcgo4om4ViJBlnQ1G2cf2ah+jHdXJLpWnl4SRCSOfipG7XXUgmTAyYeRLYKSZWbmvxzv6pv9jHfGahJHPx8gX3RYewdSxoHUBLXZWGSI9JwZmFQaGFwWldT729B1BiGPartgmSp1w9ZrYX47jGHWGFsFXr8F59bzYbhyXBa2AQl0OOywOMoxORNE88izLrNvt+vUAhtlxy1P6BrumgGJ2ZH1UD6RrKLuU57kzjBo44kBFA46+gJF0iX6/b91u1/93u93KTkv6ssZOp1OxDb0fhfbpfdGNsij0Txddqo3WBe0YFKKdxQFIXZCNQUxtTtuljBYsZGTa1Df0/Lo8efqw3W6t0+n8aLGxyqnONxgUcH/0gE1oPIifI6tH25XVfMy/o6wi28j5detgYtyJ/Yzsahz41Q0oI6OppS4man+i/8Y+RH1RZ0yF0TSbeG0qH1cSRiaM/BBGRpkkjEwY+RIYaWaW5T8mQRJGVsvnxMhPfuCKN4hGEqeSzawSgDQoqXHUCZsAcwoYtA3R2FAo56tS6wCA43WKVeOnP9GgIjhof+K91Di0/VHOMchp2/R8DTD0VQ1FnRPQ0XrViLiG93JEQCN45PkhVYEAEgOe5pPrAJm+60BC26m6J+2BNBCOmZkDA6kgnU7Her2e56R3u13/T7pEo9Hw7W5VDnERJPeiYEtqq/zpsQg2cfATbTMGPtqgKTs6kCIAxEFWtBMNajGYK9BooNT+1gXOeA6FHHNtJ23kmlMDIwWPOLDQwVxM3YmDjRic1Z8i0MZ+xd/1e51fkvKjRZkvlZ/GnVNxQevXeFcXP/Q8jU1Zlnmb4kCoThZxYKDAXQfCqTyvJIxMGPksjMyqG4EkjEwY+akYmee5KRIkjPzDYOQnP3DFjsYbx6BCUaOKxhSv0XrV4fTcqIRTDsmT6SnB1PUlDuJgYyKo1BlxlFXdZ2QQnT+2J16rDEMETwVUfUt5PB5nn2IbCeplWVpRFN4+8r75rkEtAj4Ao/cgRSH2nWChU+4apAES9LjfHxcsAgoKfu12uwIkyu7pCx5VPtp/DXr8VwdVO1QAj31iVg85cB9lWgBdPmtdeg5/yEP9QXWiYK3BGFnRbvVFtUe1MfWHyOqojuNgkHoAqxgctd8MONQH4kCQ3yLwnAq2sZ1RDqpr9Y1Yr+obv4iBOgK9tlfbpHGwDqDi4KMuxsUBZV3spJ0aE3SwEeurk0es90OAksqPS8LIhJHPwsjsQW9WfSlwwsiEkR+LkcfVZwkj/5AY+WIphdp5sx8/9aqyYoPjtVqnsjkRuPT6yCxRVJh8N6susqQ+VU68LrY59jveUx2K6+rOj8ZSZzhcX2d00VFUHrpdaAwOFKbq6XdsM9cqK6cAk2WHvG7qwsF0NosAr30muNCmOgOv03XUG+1kITDgwZ/mo7MAWF/iqECr9akeNCDqb3VBsSzLCnulQKhBUfsY+xl1oXKPQBbrjIwY1+pLp/W4BhwK4Kqgp/KBVdO6Go2GbTYbt5eYVx4HS9Gfox9GcKtrdwQW1U1kmOOAKfqo6rkuzuj9qV/jTvRbBfJoJ1Gvdf3UojZfFyNjKcvTm1tEZjfqV+UaGfW6WJfK00vCyOo9E0aewEiYfas+yCSMTBip8ngORpbS9oSRnxcjHysv+sClzoByTjFlOgDXTuh3rSsqQO9r9mMGjWtxCAWbOkZcr/1Q+dDWj48JXRWIUz12XZ3D1TGYmnKiIEy9ek2Uqd5H/7Isq+TRx0AedaEpFRpIMEymcKlT86XR336/9wDPd7NqcFTZESi73a6nUwAomo/OVreACgwewS/eLwY32sBxGEZsSXWpwVaBUN+foWlE1B2ZV+pSue9rAqfKkOvr9B8HFNqOGLw3m02ljmh7dcFfWVDOiywidWk/aLOuqdA2K1MX9RR9Q2VTp8MYVyJTpnVzfhy4qgzjvevqODUAVp/V8yLo1V2rJYJClHUscRZF/V3PqYsPcTCUytNLwsiqLB777Y8aI6W/CSMTRqqMPhYj67wtYeTnwcjHYtuLvYdLZyxUOHUDobqgFQOkCo/vlFivOmQ89zGDitfXtb1OwBxXR4wlGmhsR+wXdUYwPCVD/a+BQhd1KnOBc34IPDWYb7dbf/eDyhZA1r6fAhllDmmfnqOMFAFF8301tzkCiqY96EsayUEfDod+XFk9WDvuQ/11LJjaKSkPkVnTzwp+cVClwZx+q35i8FcGlN9jW9XOog1pXTqg0mP7/d4XY9MeZXSpo64d9AXw0bZhe7rIty4YRp9H3+SlRz/Rdj4WYGPsiAyl9ieWU21UuZ4CzSiDeL1eF2cStN4oF9Vzna7VBnX74RhPYh9OxT69XgGvLmak8uGSMDJh5HMxUn9LGJkwkvo/BiPzLLO9mWX2Ywzj2oSRnx8jX2yGSwNt3U1PKY3/dQBRJ4x4vgLJKYD40P3rjtUxa6cA6RRgqVyoMxq4GldsawwMsT/xCVuL1h37F9uo9zQz6/f7VhSFb1eKI8J26W5CWmfcvED7EGcklNHRa5WBjAENsKFNpD6wm1K73bbBYODvERkMBg4kAA7XkDJBG+qClAZ7Dci6AYhZdecg5AObRWCOtvShAYK2S0FJjyPHOhtQ/4k2GAO7LrrlexyEaD1al4IL/3UNgfpBtIs624vBjv+6iF3tKvb/Qz6qdcdgWqeHCPKR7Ytpsafacyq2RaYwxrb4Wa+NvmZWXYejJcaz2I5T8TH+/hiYpPJ4SRiZMPIpGHncubu6TiZhZMLIePypGLnnmqxaX8LIY3kpjHysvNh7uPRJlIY8Jkg+x2OUuinWuvvGe0VAgwWpA5dTiqkzpDpwjM4WFa/lMfA55VgRUGNwjfepq6suQMc2qAz1v077R5atTmdRXvo5z3Nbr9dmVt3elrqUXYnyRI9lWTrzZmb+npA8zysAoe8OUQDhOAAEMO12x5dCok+YGO6rbYsyjzKokzNBR1+Qp/dT0KDEAcZ+vz/5no264HKqbVFfcYcn1XW0CUq0M/qi7Jv6Rl3R3yPw1fkgcqzLM9dUkbo+1+nt1D10QKnH6nyHYxFE6mQW/T36i4L0qXIKSPR/9PdT8bbuuPYxpmCd0mMqHy4JIxNG1smrDiPLh10KM7NKuxJGJoz8WIw8da72KWHk58fIF30PlxoepU4J+rmOtXrsOj1PjV9/ix2PqRgaoPTaCEpaZzTy2N/ouKf6FJ3rFFhEY4gyiEAb7183PR3/n7p3URS+qLUucGZZZp1Ox8yqufrqWLFeZK1MmU7fRzkRKLRuDbqx/8o8KQACKuTOAxK6SJnvyE3voTamMtXFsqeCVN3v2u6o37rjWmfUaV0bsyyrXT+BbKLOYzBT+6zbYSjaWbT/2Hb0GFNJo+9Gu499rNuNUG2rrmhfTvlKXYA8FXfqmDltr9rTqT5pe7Rv2p86f9b66uSm8SnGtThI07Y/9pnvyhQ/BnSpfLgkjKyXidb/R4+RnGvVOJcwMmGk1vccjPQ2BRdMGHmUQ5THx2LkqThn9oJruD4UCGMjomHqlKyew/+6IK4CoA1xyr4OwLTdj/0WnSTeXxVad5+6Ptcdr7v/KcDUa1W5BP/IdkbHUCPW4+goz3Pfqef8/Nyur69rQa3X63ngWq1Wld/rgiC6UceKjIrZcVGrOpiZ/ejaaD9xZyJ1HMBC66hjjXRgw0Mh56rMVBd1wVWDZzwv2qfaUtQt99Vtf6MOqSPuiBQDdgxSdcE1BjT1gTqwiz7BeXGRcYwBKke1S87RhcrRPlU2db/V+Zzadjz/Q9/rZBb1rf81kMdBgraHPsQUhrqYRp8/1I4Yl2JRu43xTeup01fduak8vSSMTBj5ZIyUdTYJIxNGvgRGakkY+YfDyBd54FLHpLF1ASsGXbMf57XT0br3GMT6Yp11xlYXVKNR6++x3aecUa/RPkcl1QHOqXrqFBs/xyDC/Tqdju+aw2/KwEXjrJOd3m+z2Xi6AaABs0XqQ2T4uF5zifU33YIXwNJgzvRslGcMKpwLG6cLgzWdYrvd+hvdYe3UUTQNLQKm6pHPKtv4vc7BdVGr7jAVByLx4YHfdH2Atg0dRKCLM4fRTuqCfGyrsmR1dnlqUKL915xtvU5lTx8jM6Q2hlx0MFL30lBKBD8F9VPsu7atrr8awOOATtcoRNDU79wj+kOUnQJCXTzVcup+dXGK/ut1dTp9DGQ+xNyl8nhJGJkwUtv4KEZ6XKnOuCSMTBj5sRjpssura6ASRh7LS2Fk3diE8skPXDEIo/wYxPmMUZxi67g+vtwtKrJOOfE+MYByTd20Yd158ZpTCqkDOT1WF2iiPOoMjXbFp3/9zG/L5dKyLHOHi307taON9kX1ws5LZXlkjzRwFkVhRVGYWTXViBdeaps1tcDs+KZ11TFAyFayql9kT4oD7xNBRiwGZutbficw7ff7ysJcgESDAX3P87zy3o4YEFRv8fOp3wiGdXJRG4m60fZpO9EJYFhnI3qvyHbFQQRtUNaJe+luPtHe0eA/0oQAACAASURBVAG7r8X0Cr139K8oqwgQ2CM2puCv+tJ+xT7Guk+t84qgF48zENFztD9aZwSluKbjlL60nacGkSpTBZDYBq5jcPYUplDbWGf7dd9TeVpJGJkw8lkY6btmZJWZm4SRCSM/FiON6+QZIWHk58HIx8qLruGqazDCq2ucCkR/U8HVBb+oWI5rPfFN91oiGxLvGdkU/c9n+kodui1sFLoac/ytDsRwJJVRXRvUMHe7nbNWdWCY53nlfSF1hqJ96vf7tl6vnQGL8iaIw+RR4rbHnU7Hdrudp1TQ58iIKstVlqXXTTBBp5TBYODvFOHhDRDS93/sdjtbr9fORsZ3i9BfHbToAJ7jEZijHdEHfov56xqg+T0CkTJadUFP26GgX2evcfClJQY76jA7vu0+9iHajMoJuWCLGjT13Bj8VPYxOHJ/zon57U8p8Vz1tRjQY50RIGJ7uJb66mKftuEpA1baRb0aU+qK6lF1GHWuA8HYN21njA3RPulnKh9fEkYmjPwQRsqjScLIhJEvipFlWMSVMPJ43kthZJ09UT75gSsaSZ2C6pg6ijpOnJLTTsU69bM6mBpp3K1Fgy+Fe6oiIsDUgY46UxSwGltk/OJ56oDqqBFo+a5MmgIUQQ6WK74xvQ7IYlsUyJAV9bbbbQeWVqtlw+HQdzQaDAa2WCysKArb7XbW7/dttVrZZrOxTqfj12mb1MCz7PBiRrNjkN1sNp4O0Wg0HDDYuhYQ0YW+KlP9HVAgcOqAQ5mZGCCj7akeY8BUXW82G/9dA2U8ho3UgUe0eXRPH7bbbe1ufcq86EBD7ardbjuj2Wq1almxOtuI/gkYab8i46cyi+CrMtN6NIhFVlD7pTpStvYU4Kje4gA26jh+xy/UX7XOeF4MxBEste3aB87V+KH6OcXAxbYQC+JAVPt9CijUl+rsMJXnlYSRCSM/BiOj7hNGJoz8WIw0kVXCyM+LkY+VF5vhUuHFxhJcNW81CloHTFqHGqSyAHotRYUXn2SjoUZWINYRnTv2R+8RDVzbX8c6aF+j48JA1U09x37VgRwOrkyQtq9OJtqHeJ3e69WrVzafz83ssCCYoN3pdGw4HNr9/b3NZjMrisK3ns2yzLrdrs3nc9vv987AFUVheZ5br9ezVqtlvV7P1uu1zWazylqv4XDoQEJaBEChoDAcDq3X61m/33fGkKDGNvQ4KYCr98FpsdMYOOvSQHSApIP9GExU1/xpAFbmkjpjnrgCnv7XFzDGQK3BIfqM6lbboX4RfZI+85/24tcx0OkAqI7xVBvT64ui8N+LoqjITQNdZOejPvT4qbjxnAeKOn+NJcaJukEh/YixC+aUc+vAQIsypiobjbXR1yOrq21Qm1OWOsoxlY8rCSMTRj4HI80Og8KEkQkjKQkj/9fGyBd54NKnUApGqU+hkZWOwZBjUdCqhGgYUTiUU/dSgUZj1nr0eDxWd09VTgzwauTadpgn3kCOQaBQ3a0nBjeVDbLWN5m32+1KUKC/sQ5yuWHeaENRFNbtdm0ymTjrdnZ2Zo1Gw9brtS0WC2u1WjaZTKzb7dpsNrNut2tmh1x53umxXC6t0+lUGEUAYDAY2HA4rNhMv9/3lIbRaFQJWOSgNxoNz0MfjUY2Go2ckaLvb968sV/+8pd2c3Nj//RP/1RhhzQQoQ/uo7pSHWrbI0jU2YCZOVipDutYKu4XmbM4aMEeFAT1GPeONhPvmWXHBd70GQBVljemdKicon9qXTCmCkjRP+hTzBnnemV6WUugwU0/q13XxRbVSQTDCIBaYlzTgB11rT6vwVwLdemCbWVYY2xT2aNPBSjaFO/NZ7Ur5BkHHFpiXdEPHgPaVB4vCSMTRj4dIx9ilGU2Go0SRiaM/GSMVM9PGPmHw8gXe+DSTtZ1Ts9VRik2Ujsag7KCTAQcDLgu+GKIUVkUzq1z5DohxoCsCjoV7NXItP+9Xs8DtLIg0fm1XmVZVKaq/F6vZ4vFonIdsuB8AKvT6ThTwq5K3W7Xut2uDYdDB4Tdbuegs1gsrNPp2GKxsKurK8uyzNMTCHKz2cyyLLPFYmHn5+fWbrft7u7Oer2e7fd7u7y89Bz4fr/vQaPb7Vqv1zOzQ9DgRYwwd4PBwM7OzhykcMavv/7afv3rX9tf/MVf2OXlpf3ud7+zf/iHf6joTx03sol6jsq9TqcRMLAdzeXH7gjSMWUFm4CFiQ6sTGJsewz61If9xAcStRPO0YBOW5X1jMErBtZ4z1h0kIOcI5DqOgL6S8qFDiA5X9NEVB86KKDeU0y06kf7FeNQ1HVcqK5sl7aD+2gd2vd4rtqZtiGCnYKMglpsvwIUrGC0Ydhrzo+ghSzRodpOKs8vCSMTRj4ZI++nh7bkWcLIhJEvgpEqz4SRnxcjH3voepEHLhVSnCrWhkbDUwHGzur58Tx1Co6rk0QBK+NQ93SqDEI0hseAMrYpOpQey7LMgyELaAnO1MO56uhaV1276s6DyTo/P7eyLG2xWDizp6BqZr7gttPpWL/ftzzP7e7uzvr9vr8QkXsAOO122waDgQdy8scViNQG9vu9p1dwv1arZYvFwkajkc1mMzs7O7PBYODpEvP53EGEAsBlWWar1cr2+7397Gc/s7/8y7+0X//61/b69WubzWb2z//8z/a3f/u39u2339p+v3eGUvsOcBN0sizz9A6+x7zgaCcK4KQExDURylorQxsdH/3j/Pqb2l9c+Kmgwvl1v6udaOoC2+qy+FQXU2uAisCjOfIq07rBYWQtdYAV7RGfMKumgtAfZFXnn9HfonzUf2Lb6nxNi14XU0kUVFRm+l/7zLE4eIj9Upavrh3RNmiHMqlxMKly1DgTBwkUZSLr5JLK00rCyISRT8XI9WZri4d+JIxMGPkSGJkHzONzwsiXx8jHcPJFdylUFqIuQNORmKcen6L1XGXhuIcqgd+iUPisCuQ8FFXHZESDiMapQUmdRhkCzmfaX5/6dZtY2kCO9/39vX9nOjvKF5mhZPK61ZE0z7Xf75vZIYVBp/wjgwpbNp1ObTgcWrfbtc1m44t6lR29vr62drvteiZlwuwASqvVytbrtfX7fbu7u3O2r91uV9IhyGvvdruW54eccdhBjrXb7crv3W7XfvGLX9hf/dVf2a9+9St78+aNFUVh//Iv/2J///d/b//+7//uAY/3oygrpHZBnfwWg7/mVWuwiOCAPmJAjQ8BahsE7xhQIoOsQZw6uRcLj7FbAgl/GrC4Z/RHbYOyNWr38b/aftz1SwO+Au6pABuZR9qgwInPKFBqP2IaiRZ0qwFU5ax+G31Y45deH2OH6isOSPU3jUPIOTLFWk7FOGKmzkrE+9XpTAcRkTXWdun9Y1x+DExS+XBJGJkw8kMYOej37doOKYX9fj9hZMLIyvGPwUjFiISRx/I5MPKx8iIPXOpU2pE64RCAzI5TdirQSuPCdp5m1W04IyjofWKwr1Mk37XNtAMmJ7ILajCnlEgQhM3a7XYemJkK1h2HyOUeDAbOsrHjEUWDFNdmWebnERAwhsFg4CkYsIaLxcID63a7teVyaWZmFxcXtl6vbT6f2/n5ue12O5vP5/bll18600gfR6ORb4e7Wq2s2+36FDsAY2a+C1Ov13O5ttttG41GNp/PrdFo2MXFheeg6zT4cDi0drvt/Wm329ZoNGy5XNpf//Vf29/8zd/Y3d2d/cd//If93d/9nf3mN7+x1WrlC4tJ21Cdax9g7PjPnw7gzY6gHJli6onHYcPQM/eMqQuRyYoBLQ5oOEZKgbIzBDnkhy/qgKfOZ/QBRYEk+oQe3+12lUGFBrs4wFIQjz6i7Vfw5joFmDgI4xyVF+dG0I7AFa/nT/WoQf4UgJpVY96pOKN9PRWw9R5RRqqPGCPrUkbqgLDue52s9J46YKm7NpXnl4SRCSOfipHDs6797qHtCSMTRr4ERio5orE/YeTLY+Rj5cVSCinKMkenMKsKTdkEFZjZ8clVmRKtQ5++Y/0Ytk4tR0DgCRjB6T1VcJyjQq5ThBpcWR62bCVoUD+BCQfj2Hw+93uQ6oDTt1ot34WG882OC3nZfnaz2fi11MOiYK4bjUbO4PH0D/tycXHheerD4dD2+31l1yFAq9k8vK1+vV7bcDisBCbaCdumej0/P7d+v29FUdhkMnFQjMzF+fm56wJA0J2U/vEf/9H+7d/+zd6/f2/r9doBpN/vu6y1zshOKJCoHOgHxzVwqgz5HtMQGBihE71Ot5UlcNPHGJB0AGFWZW7U1vU4Oz4iK/UHBQMN4lE2yCQCH3LhHAUsDWzq18oqKfBE345+CwhSv26TrP3W/tcNWlXfnKf31LikA0/awLEIsMpmaZuV/Y9t4XsEhtiWyHAq0Or96oK69kMHv7EewDbODMSBNedFm41xOJWnl4SRCSOfjJGbBwzILGFkwsiK/3wsRnodVn0wShj58hj5WHnRB64o7NhY7Tif46BJv+sONjqVq05Rdw+cPualR4OkRBBRhauhxPNRkA5GYnBQY4U14zxl3ii73WEBbL/f9/YQ5HkxoQIBwXG3OyzmvL+/t7Ozsx/lZJMOYGb+gsXhcGi73c5ub2+draON3W7XWbPVauXXw0SSjoC8V6uVFUVhvV7PVquVbbdbm0wmdnV1ZUVRWKfTcT0Oh0ObTqcOPvSH1A89hr4IUNfX13Zzc2O9Xs/bF4MguogDAAUOZe0AVAV5rlXGuc5+Tg2EYtCK4EO7tF61PQ3WEWA0QOt5apf8KTMJMGgg1Tp0gEWbou3redTfarU8QOm9CFw6aNP6dGCmLD7y0f5Hf9fgq8dUHmoL2ufIZkag4d4sblc5aLsioJ0CEj0/6kltJaZ1nKpX69d4SL06mFa9aL9PDc4pMYbVgXcqTy8JIxNGPgcjD0qwhJEJI18EI91v8/pZ64SRL4eRj5UXSymMiq27eQzqdYMmFSABTM9B6ByPT6FqDNq+unbU1a2BQR1RX1QXFaJMUJ0ctN8xvUNBCPZNGT5SKxqNhg0GA39BIiDEAl0CVq/XcyZvMBjY7e2tg9N6vXZmT8FhNBrZdru1xWJhg8HA70NbSV2AySPvG3Ajvx4GDxlut1s7OzurrNf69ttv7d27dzYYDJzlUmAEzEih6HQ6/rsuWkUv6EblCrggewUOrkfuyDYO1DlWluWPXkZpVg0uce2QBkRsFJnE9AG1WW2DHov3pB70oYMh+otu1VY1aPAbIKx2Dcip/0SA4Xw9D71oUVDRdmjghqXV/iGDGMTjQE91zP3U5+r8NQ42drud254OQuP9VE7IIvp83UCjDkyUGY2/x77rNbHoIEHBSe1dz9X/pwZHek9+1zpTeX5JGJkw8skY+fbtQ8ctYaQljHwpjDyYVJYw8jNj5GPlxVMKtVF1wuX8qCQVany61Xrq6mRqW3/DSKmDe8Xro8C0LRgaCtO6tC/6pF33pBsZPq6Jgxh97wPv8KBfq9Wq0l5daEyAv7+/dzDRwA8Y0KZGo1GZ0p9MJrbf7221WtlqtfI0CICs3W5bURS+wDnLMme8SMngN+pkQTEB+ebmxn7+85/bdru1u7s7azQO2/yen597oFEHo6/b7daZOpWt2gjAhjwjQCkA8Z/fsAkNhOqYtA37gCUliEbGiWuRvwbgyDCiizjgUP/IsqyyiNzsx+/+iEDIudofPa72xyJsBRL1FexS70UbGDBowNQArECnYB8HXJvNpjKFrwOXKLMoozgQiHWrXEj/YX2H/o5/qI2dAgcNrPQ1Low+BRD6exzssDYl1qHX8llloNcoS49fYsdRvnWyijEae8Y/6mJ9Kh8uCSMTRj4ZI3cPOi8tYaQljDT7dIz0vmc/xsaEkS+LkY+VF9ulUDtOo3DsaJzKvCh7oB3gPK1THSjWx3cdgNUF9WgMWpfWQ4lK04ClDEA8JwY9s2p+c7fbtWazaavVqtLudrttu111209ABZah2+06e0N/2J0IAyJnnSDP1qbtdtud1+y4gw71wuzRT34HKFi8bGaVzxRSO5rNpi8Khl3bbDZ2dnbmrF+WZZ6jrjZCPnfUgzJQ1ElbNY+ZPnC+pkdEvaJL2qH3igMMZZewW47H4K6faS/61OCPvqhH+xEDONfpIIJ7aSDgevVDDUBlWVZSHGjHKXYoBka9pzKFMd1E2ST1SfVv5KjX0i8GBHzmfjqw00GSykh/x1a4p96LPqrM6/yWunUwqQFcdUGdMVhHYKkDCLWNOnulPWoLEdzMrLKhAcexsyhzHbSjL2X3VVdx0JLK00vCyISRZk/DyIOQzTNBEkYmjPwUjHT8y6rrBhNGvjxGPlZe9MXH+rSuzlIXuCk61Xfq6VGFXje4imxfPM/sCCSxbVqPKpuiThkdNDIvyhLqYBD2g9+URTgFfBjMdru11WpV2fo13os2NRqH3ZbYaakoCn/fR3RQwEVZLZUnKRcYagQYDSQwOKROwNo1GsdtZ3u9nt3d3dlwOHS2cL/f+1a4yIIFx8p+RUYUh6AN0WGQh+pX7SIGXpVlBA/awGf+lP1QpirqRlNpCN4VxqnGzkgVMTPXE/fhPGWL1I/qAqPattpMZPiQs/YpDvAo6q/q308JwnGQybkMInQNB/YV260lBmvsOuqzjhk7FWfioFXlqPFGz4l2qm2ra6fKiD5rbIr9iSksWmd88IwDoSh31bsOJNQOtH9xwJTK80rCyISRT8XIwWh0aLNl3taEkQkjPwUjS/txWnLCyM+DkY+VF30PVwQDPc5/GqmGS4lsnSpZz4lCQhjRUKIRRGHXKULvqQYY/9QIYZFUKZQIICgVkNA6ooPqvTGEaMywbto3ArqmAZHfbXZ8GzgAENktNTC9P4uMFUgAGoBMdbzb7ZwRzPPclsult58F3vriRuyCxcPr9bqyEFztivZp0AaINFVCUyZUrpHNU/tRAIn/deAJoCjTSD0qO154qbtkqW1Qh9om/1VeAA39VYYsglO0cwUd9SWO6YLlaOcRmJC/+pgyWGq72Kj2R+/NoEoHLKQNYEONRsMXsWuQVSZO05qU1eb+0W649jF2SgGiDlSjLLRf2l+NF1pHlEcM+gpe9E2vqdOH9lHbo3XFGY7YR42peq94n1SeXxJGJoz8MEZW9Z0wMmHkp2Jks/Hw8JlXZ0cTRr48Rj5WPvmBq+4mMaA+1hgVoH6vq+cUWMTPdUqkvsjinKo7sol155tVgQglxfxhNSA9Nyo6Ojy/a9AltzYCMu0lHxUnjKwS94tsT57nDnhlWTqDRhqBglKdkQNQ1Mk0936/d7Do9XqW57m/+0RZrt3u+K6VqMuY269MDm0CFEkP4U/bpn/UjewiIOBIuvia82KKj9aP3tXW0I0yNZEZi2DFvSjUEe0l3iPamgKSyo7+a8BQxrRualzlrjaq9hDTXPR3vRd9VfuhTo4VReFpNvqb1ht9UnWrbCH3RG5q/zogif4U5afnqx5iMK7Th8okDpK1Dj1HQToCkYJLHGCrDKKvq5wU/HQAQIkzFKk8vySMTBj5HIzMGPBnxzRFs4SRCSM/HiNdLpYw8g+JkZ/8wKWdrzNAZZ0oKuS6/+qIsRMqEDXUGPj1nrQNVkEZhnhcjU3vF8FBDTL2TduurJ4GYj7jMFGJ3EMBgzx1WC1SEXR6OTpQXfsJvtGZNWh3Oh1brVbebpw6OpKCVgz6qhtSKpAZ2+Xy0kiYtwj21M29CHjKwClwRHBTp4nMCiWCvy5QLoqiNhc9y47Mp96T34ui8POpgzbAMKEvdpzSYE8AA8jovwYYBXS1m1M+o3YZg1YcnGhwqwuYWh82oDrjeAzIyFUHTrCb3Bdbo8/YYEwXiPYewTIOIiP4K3hr7KB+jVNKDiD/CEjR5zSmaHu5v54f64uBW+WocUdlim41dik4qq3ENsZ76Dl1Mkjl6SVhZMLI52AksxH0J2FkwshPxcg9DxuWMPJzY+Rj5UXXcNEZ/uvNVTHxWJZlbjQ4pSpAp1spavgoQAWD46pycGRVPOfp9apoVQJ10ybeeo/DE9h0AWKWHff6px1qVCzubTQOu/YxVazBT4MGzA/5yWog6vwApS7YVTmo4apcKNyHdrKYkAXKZuZtpa/qEDG/nYClLA3nahvVmHFE/lTO1BPBBHlENlFtVIMfdsfxyGDpblYU5AHbCDjrYGG5XFpRFLZYLGy9Xju4ANR5nvvOVtpHBQ1NYdDAzX8FhDiAqvNNztXgUTeAUTnFc9SmVWY6kFBgj6ClzCVAwvmnWDGu13toneq7Kqs4uNJ3FmlQxu50IKSDSo1Nmq5CG8zM/YO6+K6DXtWD+kUcOKtPsT4hxk6Nf6o7PRb7pIBTNwCJthQH7nosleeVhJEJI1Uuj2Fk3niIg7KFd8LIhJGfgpFGn8tq3EkY+fIY+Vh5sQcuvVF8yqsTxCnB8p3fMUquo5MqPFWY2Y9zwakHIyZ/W+tXwel1OA7BHqM2M0+N03dl0AdybvmOcdA++txut322h75FoND3jOz3exsMBt5vXRiq/dC2qPzUsON0qxodjqeG3mgc3/9R54gK+vEY/VZ5wmiRkqHOTq77brerpCwo80H9Coy6y4zaTt1gpy4VAuYOhlQX5CrIozf+KyByTVEUdn9/b0VRWFEUDkxmZpPJxPI8t7dv31qn0/F3sRRFYXmeO6u7XC4dmOiHDpjqmHEt6heqX3SMzelxM6uwhvym9WCvHNeBltqLDgr1XSw6SNOgqrao9wKkuC4OCLWeeB33ZNcpbAa/QvcKgOo7cRCmfWRgp3LnGGkeer4yxaq/CCgMQKkLMNQ+xTZq0fjIPfEnPV/1qu2I8UTjbCrPLwkjE0Y+ByPNjrMRCSMTRn4qRmbi43XXJYx8OYyM99HyYptmqJPWgYmeF8FAp5K1EKDV8OqKCkZnT5R9wpDqghxt1vurwRIY+MyiTlgL6ucaAku/37csy+z+/t72+71vhR4DQrfb9TfNKwtAm2ADMHZd8Esb9cWGyiIpI0K7kAH16HQ9sqON9AkGI8uOaQJch6OrI5ZlaavVygaDQeXJH6Aoy+rLEumn5sTv98dUAx1gqO1E9ikyE1pvnYPRN44BItvt1gGAvjUaxx2lAEHef0JBTpvNxubzucsHWa1WK+v1erZYLOzt27fWbDat1+tVrtnv99btdm0ymdh6vbbxeGzb7daurq4qgBrTBxTE9bjKTfVMcEImfMeudWARBxcKvBqQlNWiLchWfTUOAh9jo+L/siwdBNS+ARPemaIDDlKKFHB0UKf3AAQjaMcBivYHX9G4o36HrKiP39THTwEZJfovnxngUl8EuwjscXCg/Yz31u/pYevTSsLIhJFPwci9HWNFfEhPGJkwUnX5HIw0M2s0D/pJGPmHwcgX2zRDAyAOpI3Rc/V8Da50mo6q0DXQ6RMwymLqnrr1M/VFRksVXyc0ZeT0dx4aYFhYrEvQ56k7vh9EUxIajeNCRwI60+44H3LkfST0VaexeaeVypTAoEaEk1N/NBTkoKwI7ad+8qqpH7nptTq9iwxJCeA7W/jifLwwUq+DOYuBIbJE9EOBmrpoD/dRW9V6YNW4X1EUtt1uK+9iAThgcQHV6LjUs1qtvC5AifrH47HXfXZ2ZmVZ2vX1dSXQLxYL++GHH6zVatlwOLTdbmfn5+e2Xq9tPp//yLEVxNXf0FEEetU16w6wK80NBzQ0aCvYwKip/FUv2ibAWtfzMUgidkRWMfYxstXEGxguzqEuZEBdGvC5lmsUnCNTqDKM9anMsRUFjxjU9bj6lAIs10S/iDGUvwhisU0K5jrorqsv9kt9V2WUytNKwsiEkc/ByHJfnelMGJkw8lMxsgwP3QkjPx9Gxnq1vMimGQi5rgH8ro1WBalToMB4fpZlHoz1CVzf0g6bYnbMrY5PuziIpkjQZjVo2sGOR8pkKLPU7Xa9TWqUGBNt1QWgOkCEtYMVoux2OwcrfcKPLAMGouANixef5DEg+q8OolO6ej3nKiupqYBqvJqmAJtFINLUB10EnGWHl1vynW1NqSPaEv1AfmbV91hoX7Rv/M4xBT/YwaIobLVaORCsVqsKQwljq87LNdhNWR7y0mezmU2nU69nNptVZPLDDz9Ys9m0s7Mzr4dATv91UHNzc+O67vV6NplM7P37935fDdzqT6p7ftOUFGRG3disgghBkfO328NWzdg5dqUDCw182rbo23zmuw584hS/ppJwLx2k6cMWdepAUEEPe40DvAjQgJ3qnfNUluo/6jP6m+arUxRANOVH45ACMXV+SN/aHo0Bek48HgfM6lN6bZrlen5JGJkwEt09BSObMhtUlmXCyISRn4yRTTbcsCxhpH1ejHysvEhKoXZSWTMapn8Yq1n1bdA63a5CpC6cVXNcqT8GPH0S16d1vReC0cFaBLyyLJ2NghHQc3kXRmQGolIAF9ITYOTMjlulw+I1m00PZBg+dUew1qAwHA4tz3O7vr42s8N7RgAwgrY+scfgity73a73mfeD8LsGljw/5GivVqsK69RoHBfqEpBgrVhAjeECzARQ/mtAU90TDGBLK/nuD+1TxlcBWJkZBhvKJi2XS1+QrYOHLMscEMgRR1/L5bIy0KceZf8Wi0VFn/1+39m/Xq/n0/sRMOm3+sput7PFYmG73c4uLy+tKAq7urqy6XTq98RWdQtg6kH/yrzyXYGTvscBIjInqOvghHe70FbtSwzGAIaeRztUd9hnHFgQyKmDNsf3i9Bv/FTBCX/M89zf9aPxJg42eUcMcijLA4MfU0aIQ8SBuL6BuokJDFQYdGHDdUClMSXGKuQOa89xHSBEecd6+K6gr/5Vd34qTysJIxNGPhUjC2f+j+utEkYmjKQdqrunYmT+YAPNVrPyAJow8veLkS+SUqjMp3acYwyCNJhpoxRUNHAom0ddKEtBhQCEcWu9WZZ5kKZobnfcKQjl81mfkjHe5XLpT/2wbDABGuRxbNqtv8HmBwasyQAAIABJREFU5PnhvVTr9dodFSBQppHvGsg6nY71+327u7uzxWLhTo1h6n1hqABbDE+NGD3xH70WReFgpEEIWTNVruBUFIUHNILner2unGNmtlwuKwNndKq7USmzSioi7dBBA7pG95o/r4wEx3e7nYMIC2811YXgic0CPgxASNMheAAeMHir1cq63a51Oh1rtVrW7XZtsVjYYDDwdnMtwUqBW4OSWRXE7+/v7fz83H71q1/Zv/7rv9r79+8dXFerlU2nU5eF6gZ5Ktir/DSQRhaKulqtlgdodK0PPLqrUgyGajsKNBrky7J05hofVbvFLyMbiF9rHFBbQR7qX5QY3DVo01bdeUxtOOpJfVZ9jQEMII+9qt8hI+KfxhCNd3EwqLrTmQIdkMTBI21XsKP9+J6CcnrY+riSMDJh5HMwEutA+wkjE0Z+KkZqnbQ1YeTvHyNfZIZLDTDLju+80CdmDEufirUD8QlXpw8xUoo+Javh81mDP4pWBgAhI0xVHO3vdDqudD1Hc+G5J8bHPbIsq+yqhPKQQcwdV2UpKGLAODJ16xN+s9m0fr/vzsH5WXZg36gH+SkDR5uVJcQIqYMgsVqtfIGzmXkAYYGsMj7UQ/vqBhNmVsmthxnTz5FRjfaDfcECapCEYYrMLABEWoTaEGAOy/bVV1/ZbDaz0WjkC4WRIfej77CApFCQAlKWpQ0GA+97URQ2Go1st9vZbDarAKUCJ/JTW6ad6Prt27fW7/ftl7/8pduK2gg58ciaAVi327XpdOrrD9DlaDSy7XZri8WiYkeAnQKP2jl6IDAqq4dN0B/N+UeW7GSl9qN+0u/3HUCwH7UnjTP6hz9wb/qDPFlsrfaEjyPD2PfIMMK+6iCF2EGMMjPrdrsOvJxLDEKmGthhXbEFZKIgovWrXNSe4kMp5yk7S1EGmdik91PfTeV5JWFkwsgnY+SDG5ZWJoxMGPkiGOljL0sY+bkx8rHyYrsUUmLn+E+jMTycUQusBU//BBV9us3zYx66PlFyX4wMISCwyqLUsrpzD3USGHBWNVpYEwIghgnrosFK2TnaBEMGoGgaBkGKYziBMgy0AxlxbWRbVJZmVgnoMDEALOfBrnEN8o16m8/n1u12PWArM4PhKlsJS0KfzMzZN/ShjBMBQ0GAgNjv971N1KdOgqOqDumvDjb0PAAfmXa7XRuPxzYej22xWFiWZR740d9wOLTFYlGxI+RQFIWt12t79eqV9Xo9++abb9yuaDvpKHxnRyaVt4IzrA/HsUGzAwv1n//5n/bLX/7SfvKTn9jbt2/9OAMovTdMNTIhP34+n7vfcRzAhaWkHuTOIEPZKbPqDkzKxEXWlGCtYIOt6e6VcWDFMWW0dLCELeBL9EuBmdQWBooaULEtQIU2qn8oq4nv4I8wjfiosp+qRx3A0l98iHPU97hGAVyvo90KCBGMdLACk89gV2VHH+vAJj1wfXpJGJkwku+1GNnpehtpW8LIhJGfgpFu9w/hO2Gk+XHkwfdPxcjHyout4dL/UfAc0wbB/PA0r4wAzo1SKARirscINRVCQSeyXjrAV0UiUBxNmTYMYbFYWFmWNh6PbTqdOlOCce73e893JXVOn7KVjcTZ1+u19Xo9Z340OPf7fRuNRv5d2blWq2Wz2eygQMmbx2F7vV4FJDTgaXBQPen6APLJ6Y+ZeeDFkaOTKbjAaKBD+k8wMTvuBEWAIEjSRlI5Go3jYmmcHNniPLQDfamtqD2p/VH4Tn41YIlt9no9m8/nHlzIU4eRw5Gpa7Va2VdffWVff/21fffdd/4+GH5nIKIpLfQbW0Ff9BVZA86bzcbZoGazab/97W/t8vLSgzspPMhM2V4GQDB1AEar1bLFYuGyRi/YEHLRaXTWJmhQVnviN7MjI6tpIMqOo0u1S2WClWGmXwpcyuRyjf6u9osvjUYjlz9AjV1pWowOSrFT2qhBnwGJDlyRHfbMMfwNP1Bd8x996yBRwVhtGbkp+04cpN8UfZEo1yrgRAaVc+hjKs8vCSMTRj4XI7GXhJEJIz8VI50oKy1h5GfGyMceuh594Mqy7H8zs38uy/LmsfNwBDquCqBR2nAz82lMhBCfNAlY1EUdKtzIUGnAxPhQuE4/UqiTKX8Ur31pNBqeJsB9er2e7XY7Z5T0KZg2afBXNgn5wOKUZWnD4dCNFSDC+XACBUO+K4gSwJUJQm7KLuq5yFenvEkX6HQ6tlwu/RhtRmaRIVXwVPZJAw0sDDInrUFBnQCjDo48kaXam5lVnJmgjz7UCQBbdQi1mdFo5O1otY67KbbbbZtOpxU2ErCO6QKbzcbu7u7s4uLCQV5Zlmaz6QtQsWNsHv0iJ2VvdFCFLQJkm83Grq6ubDAYVPpNQFG5KTu+3x92EaM9g8HAzMzZNnShAQm7BuSVNcdmGcwoG6UsloKrDkIiANQxewAUflTHruGT6iPqPwTuzWbjawewkxholdkiZnE96Uir1aoSq5AL59BXGHntF4vbNT7yR50qGwZIyl4rCKFXfFTZQrMjcKoeVD6xKLBrnE7lUBJGJox8aYzc7auvXkgYmTDyUzGyjihLGPl5MPKx8ugDV1mW/8+jVz8UGkYnECr/eTJXpkOns5X144lWAwHnq/AADDU2zUFlarjT6XiARnkKWLSBa2A4yJmFLcmywzSymdlwOKwEaGUZYVkwVhS63++t1+u5MxNYMJCzszObzWa+IxPGpPViKCzQVTnTR/KN0Qvy5LvKHKNS48bwYIHoP/duNBrucOqgOg2PE9DvLMtssVh4kKG/i8XC5vN5ZSGsphLgZAoe6F2ZPGSquldw16Cudofs0RP95EEQ9mIwGPhCXphNAivXrNdrZ7P2+719//33vkNVHOxgp9iNpv2UZem52gpCZlZZLI4cleVDHzoVrlP3DJR0gKEMK+sQkAFtiaCA/xA0ucdgMLD5fO4+q4NKZZcAMw10BH1ld3UgyOd+v++61AGTp+M82KzeDxvAZ3SggC5YpI2tKUBxb37H9tCLsn7IjtiELnSNjcqQtrbbbR+cKJuLz0VWVwFWByN6rC74Kzun/dNr8Tkd2Gl8TuVYEkYmjHxpjDxumnGQccLIhJGfipF542GGvZEnjPwDYuSLreEiQMHSaJCmE6c6iNBgt/RJlmCl4IQTwW5jiHHaVp9qCaqcQ1F2StkUrs3zvLLgl/qHw6GtVit/8kfgAAAsAQphin02m7kMCPL8jnOqrHa7nTMAtDsypDApGCaANhqNbDab+TSu9hXWjvYiH6bDcYiiKHzBsQ5wdQpfdQWrCfunMxDqhBpAYYvMDo7Z7XZtNpu53jBsdNnr9fwa7st/dUIKTgxbqiwIQQMWBz0CRvw+HA4dFDUQmh3Yqvv7+8q7Sdh5CbDBB0h1wDewr/1+b+v12nUTwRnWDN/QAKPn9Xq9CnvHfXkXCqCrdWG/XENApj1cw3fsRgeJyvBpAGSARoDs9XqVtRfKKqEXvisocF+NKzrwVH+g//iVDiR18NNut20+n1tZHhZtMyCgXmSqg1UGp+v12hqNho3HY7u7u/PBIXYOKwc7jY9p/5SB5T4UjXXYIsdiepnGUHSqa3j0WvUJdKay06Ky5Ht64Pr4kjAyYeRTMLIhqWEJIxNGvgRGuj1Z1QYSRr48Rj5WXnTTDASJ8vW4OrzuaIIxIghViB7nWjq02+1cQfwRCBVgzI4bNfAH28N1OLOmNOjTq+adA3babnVm2sRv5KITQPr9vrM/Zla5L8ZK/jlOiyPSF3Jku92uvxRxMBh4CgT1wjQxDY68kaUuStZcaWQHA9rr9Wy1WnlwQKcEBtpIXerk6kxmh2BDXRi6pjdosFEHwFk2m41Np9MK4CnTlWWZgxEyQ8bqVDAugDigyba0Wi9T+tvt1nWHrWEXRVHYYrGo9Au9EzjK8rCb0Ha79fsrM8o13E+ZKLY3pm30SwG3KAqbTCZmZr5jFsER+8J3Go3qzkLcS4MHDDa7SuGfMQ1IwY7fVWZq49gIMlRZIQ/Ai7aaVXdN4/4cj2shOA4zRzuQ02Kx8FQHBnAEaK7VmIJN3d3dubx6vd6P9EYa1Waz8RSr9XrtslV71T5wL/oMk4dMkCvt53xNvaDdtEdjqN5DjytzT2yIJcZeBbxUnl8SRiaM/DBGPryM+mGXwoSRCSM/FSNbzcNnZo45njDy5THysYeuF3kPl+Zd0mCz43S92fEN3So4TTOgLv3M+Rq8AX0MUoGLNAWupz5lxlC8thWD4zetE4CIQmW6lXti7ICG2dG48zz3haEYHE/maizcQ41cA6tZ9ame72ogGD4AyXahKnOz45bspCzgfN1u15mH5XLpdaO/3W7nfYBh4lqCTVEUlel45IT8zY5gx2faRz0K/DgRZbPZOHsKo0S/aY+yXVwL88EUMMDcbret3+9bs9m05XLpqY/7/eFlmTrdzb2xnf1+7+8T2W63/pu2q9Pp+MJi9KYPB+qoyNDsyLAQJLBTArWmNcAWk7YQ0zBgPBl8aIAlqOmaBPUJBoAarOgD9RMgkS3nqR4mk4nleW6z2cztjJx+M6u0i/7xnYEMQKNMv7LgyFvlijw11UAZMey01Wr57lO8sFPXkcBg6gJ1lSE+zICQ9wbhKxr/sFEd9FIn8Y1UC2U10XF88MGHYBGVbdffFWwUGDTmxWuIUfqAkMrTS8LIhJHPwcgyYJ1ZwsiEkZ+Gke4bjdxne1WuCSN/Pxj5YjNc+gSpHUTxCA+h8PRPIYjHJ1TtIMrAkPRpXp9OFVBQNsGDtiiLoKyhBmumWmFYlE2hr/oUHNsbn3YxAAJCnucVwIGpwHjW67WnORDcABzuT9qAsp8qJ7MDk8MOQ8hNZW1mFdaR9vf7fZvP564r2sp9NNggz3b78CLExWLhU+AAEvceDocVecDicAxdq2OpjeGE5H8TsMwOaQ13d3cVMINBoc2AJbnO2Ol6vbb9fu8LnwmCWZbZeDy2b775xrrdboVhRoaLxcImk0llkSwARO4zbNhyuXSwp8Am0z4YM4IZQZ7petYxsDMYAVhTT/ABZYrUB7EBTV/o9/tuawykYCHRh9YDcOpgBSaROpVlZ8F6q9Wq7Ex0c3PjekKm2As7kdG/0Wjkete+YI/Yi7Kv2EKz2fRti7vdrg+YAL7oXwqsLJ7Gb/VdMrqoWNO3suy4UJogrzFAGT0dhCrTrCDOoAHAjucgC/2vtqDxSGcfNOYCHnqNxtdUPq4kjEwY+RSMzPKjj+nDX8LIhJEfi5HuD1lesauEkb9fjHyRBy46oUBCJ1EQ382OT9E4rwa4yEphACoIZWR4WiXgYlwYc7PZ9G1lOYf7xKdpNQYVcAQ6gmhkVriOQKaMCwyEgpfZkUXTttAvBUyCeWQvYR2U3QOEcNZer+e7TLnixeFIUaDt1IPTZFnm+dYs+AS0lIkkR562q23AupAmQHCEIdGApFPkjUbDJpOJTadTT1Ug0BGop9OplWVpt7e3P8ozx8YovV7PXr165e0gWPKOE/qgdtBut52V++KLL7xO5EVeM+8WWS6XDh6bzcbOzs5sPp974EIP6JSAzz3RM4wvcl6tVjYcDq3VatlyubRms+myw5Zgl/A3wAd7ZK3DbDbzwRWDJA16sEf6/hlsDlslWCuTqAMrbJXPpIkgA9pSFIXNZjNnppSZL8vSt5HVtB78SO0LX6Tv2BNrMpQxZ1Cgvo4MFOhg0JWx1H7pMXRI0RkNHVwSFxjgMPjCZ3QQFXPRSZPqdDqVnbz0Phr4iREak4kVOgDWgSUDOv40NioYpfL0kjAyYeSTMVLew4UME0YmjPwUjNyF2Z6EkZ8PIx8rL7ppBg2nUzFYI8g8z63X61Wm8QEKdXwFh6hMfsfI9ToFIM7DqQhCmt/Kee12u5KeQJ6oAkdkBkmXoI88ZSu4qNLokyqW81TxOA6BRgENhoc24/BaJ2kHGCMBCflpbjLTv3xXAD07O6vk8+t/wAYZq1NpTq4yRuiBfHCuVfAyqy54xflvb28dfJj+ZwHuer225XJp0+nUBoOBdbtdu76+tv3+kDIyGAys3W7bZDLxdIiiKDzX3+yYc0zuOXKYzWb21Vdf2Zs3b2yz2di7d+887x4WaTgcOqDSZg1EvV7PmTbsFduBwWLKHZnA4iFv0gmU9QJwI/vKOdyv0+l4wKKdysrOZrNK0OQdKrBvkbHFLtkKeDwe2/v3792XlP1BttyPRcuwdeT9A2C8WLPdbttisfA1HQAgbBx9pV3q68rE4ZP4GrLX/jBAAayIDXEAir0om42+uUaDsv6m37U+7qvrUBQYIlunaTSco+1U0IwDcI232i6V1Sm2juOpfFxJGJkw8ikY2e4cY5BemzAyYeTHYmSrWX0JcsLIz4eRj5VPfuCikQhAAzg3V2ZIWWgcAcMgmCmDpwJFiNopNRazat6v2fFdH9wLB8GoED7GhzA1gOuUpgZTnYqlXgxN20d7MFxlHnq9njUaDVsul34+AQXGi8WYtF+ZtzzPnZnThcYEGK7TaxQMYGjIaScvd7fbVXKIR6NRZaEs8lQWj7YrSAJ0yJM0geFw6CyTgp8Cq5m5zZyfn9v19bUtl0tf9Jplmc1mMxsMBt6Xdrttt7e31u12bTKZ2H6/t9Fo5FPu2CGsLo6KrM3MQbwoCpvP53Z7e2v9ft+Wy6X1+/0KEGdZ5jpkCp3peO612Ww8mDJIgfXSIMRgxuz48kNdAzCbzWw+n9vr16/t7u7OmanBYOD58Z1Ox8F2u936FrYsTNf+sdibB2BYI2VvaBsskzJQAAb3++KLL5xR4r6NRsOm06n1+33r9/se7MvyMMsJqOT5IdV0Op3aZDKxdrtt5+fnDkDY8Xa79XfB6BvslcHWXHEd4AGg3BcdwFriN1mWOfOqi5bVlwB5ZeMYiGrsgoXjfhrc6Q+D6rIs3TaIo/ibDrjQI78Rn3RRPHER3+Se2uYYp5AzctRzI7Ck8vSSMDJh5LMwsnUY0GaWeVpcwsiEkZ+KkWZmjebxQTJh5O8fI190DZcqj4CsncMoeeongGiAoi6CCkEII2B6m0JHKQRcnnYRqhqYWXWXFb3v3d2d5y6jHH5TFk4Xs+p3nU6lb8o+KsvIdQpG+mSP4dAWNSRNs9Cnct1mVY0SGcBgwnDqeWowMEUwY69evfLAwBvcleWjP8iKhba0D3aJevmdNnCOTqGX5SFHnnSPr7/+2nc5ItASpEejkX377bd+72azaT/96U99IIHMdAq73+97PWzrSz5+p9OxTqdj7969s263a/f3986+6QAAffHwyM5ZBHjs3+zAYGXZgfWFFSUQEdwbjYYtFgu/DgaPYLxer+3s7Myur689vaPRaPguW8hLWWEdEJHTT447AY33qGDH+Fr0XV2sO5vNbDgc2tnZmWVZZufn5/btt99aq9WyL7/80mazmRVFYa9evbI8z309QFked/8i2F5cXDhTuNlsbDabeToMgfjVq1fOcpK3T9GUVR3EmFlFRjCYeZ77Am0AXAsxhKCPnlSOGmNoV2Tq8T0N5HoPYiSDONYHMDiD3dN6GQw3m023MYAmDo6JyXzX2BNjpz4UUDQuKDOZyvNLwsiEkU/CSAa3mSWMTBj5IhiJTzfyho1Go4SRnxEjHysv8sClN+eGOtWogkA4sB84kE75mx3ZCwIuggJcNHczdhphau4v1+EUCiooJz4NYyRxoEFQItiuVqtKUMBwqIOigEJ+qdavhqksJ/1Fnhi5tovfCAD0RQFVHQf2AVYPkGKb3bIs3WEB//Pzc5tOp84UUC+fkS1yxeC1XrUTBUCVy3q9doas2WzaaDQyM7Of//znVhSFfffdd3Z2duZ963Q6zvhkWWaXl5dWlsfUMuwDGSjrSmDo9/t2dXVl33zzjXU6HRsOh/bnf/7n1mw27erqysF1t9vZeDy25XJZm+4Da4Zdj0YjT+XA1gBN6ms0Gg6i6IJBF+woW96yQxDpH3me2/X1tW/Xi/4BTeSK7Nkaud/v28XFhb19+9ZznpfLpdsH7Bu6Jqiu12ubzWZ2dnbmbeGdMpvNxgaDgf3ud7+z8/NzazabNp1O7eLiwnVAv5QtZ6E0dcGmkU6hjCZpHMiQOKOpSTrQwZZ1gEg+OH3UXdPMzNumrDssogZpZa65DhujnxofOa5+Tb/4XePTfr+vvBSSe8bCuRrsub8OEBXgsFn6yTnKSlM3dvoh9i6V0yVhZMLIp2NklVVPGJkw8lMxUmVHfxNG/v4x8kW2ha9rZGTiCJDK1PFkzvsgGDSZHXdVQniwMPokSTDXYGx2ZNR0UK9CI+grQKEQnsrn87mzDZqfDbvAAlGAJDIAtA1gMjsyUvyGbOgjv3NP+kM+L8ZFoMS4YQeQM3KnHciPaXP6oQAU2UXdPrbX69nV1ZU7OTsIoXcFVmUXabO2lynlqAPWc8EmwhDCUAHQ7Xbbg8h4PLbdbmeTycRTIshXVyYD/YzHY2s2m3Z9fV1JjyDokOIwGo38t/F4bGVZ+pQ/djgcDm2/Py6wxQ7IbwY8VquVjcdju76+9kCCnna7nQcL0lGQrcqLY+12287OzqzRaNhwOLTJZOILabH5LMv83jDWBFDaRi79aDSyq6sr2+12njIDYKBb6lHWE712Oh27v7+3brdr/X7f3r5964GXvPXz83O3OXQD6LOombQEdKKDDOya9QZFUdj5+bm3EZuDyeYlptQDsAO2q9WqsrsVg0f6qSkN3FdjCqkSCga6IDvPc5tMJpUHQHxdGXMAlRgXAZEdr7DPLKu+eFMHCNSPr2nfaaOCCn02qzLuGs8pCjh8TuV5JWFkwsjnYmS0nYSRCSM/BSOxqUbzuOYsYeTnwcjHyovMcNEZNTgaxneEq0+gvEAPxSjYoGCMT5+aIyOnBqhMlzJfBEIN5vqdJ3EErNufcl673fYdWprNpucIDwYD76tZNSVCmR1VGEY7GAwqObg4qhqE5vVrXTGPVZkEjB8QQW6R1aCNgJ4GE4yRQMY7EwASZVcIGKQLKOuKXSA3ghrBl/sTBMk3z7JDjjjOSxCmncPh0K6vrx2M9vu9b6mqQVmBlIDSbB7eJzIajez8/Nzu7+/tiy++sMvLS7dNrp9MJs4aTadTWy6XdnFx4SkV2Ba2MRgMbLM5vshxu916XjupENgIL3nUrXuVeQRABoOBM2QscKYecsyz7JCyAIOHzxHsdWB1cXFhi8XCptOpffnll1YUhQ2HQ38xqA6CYNzJs8d/eHcPfeTdMrB1MKMxxQlfR2aA7mKx8PoBhU6nY7PZzBqN40sT0R8+wKyArivAxwD+drtt8/nc13owSFqv194vZRd1DUGjcVgUju8ysMzz3Ad3AK4GXvTFwFbBA/9CV8Q45M0AkvbqoJEYhT9rqpbGHmIS8iGu6gNAjE1xEBZBhjam8rySMDJh5JMx8uHFx5kdt5FPGJkw8lMwEozo9/s2Go0SRj6Uz4GRj5VPfuDi5oCGPuFpZ5V9UYaKRawYOdfo0y+dUIZOF7/xNIvxE/ipi+MEFT5zvQrMzLyNOLc+ILD48fr6unIuDkD9ZVm6o+BQtItzzY5sFWCAI9NnzUHnN2SlhgDA8hATWUrOXy6XFdYKGWE8HKeN6/XamUy2c4WB4t5cp9O62jYWgmLQgA2BiyDBlrB8pw86+GBR7mQysSzLfMeju7s72+/3Np/P7Sc/+YkHJXKuAT2cnAHG2dlZhaFloSmAdXd35zLa7Xa+qFQHLgR09YNXr155Lj+LvRl0sCUu/oM+8jz3rYrpOy9i5D0l3377resUJhMW5+Liwtrtti/cZj2CLk7udrs2m80cSJbLpQ0GA88tz7LM39OC/eFD2Lqyq8jl+++/91gA88RWwJeXl64DZdApsIa3t7e2Xq9tOBxar9ersPqNRsPG47Gz/fjqYDBweaInwFsHiv1+36bTqbedrYIvLy899YW4hL/AtOHHxB/6ooyY2XH9AaBC3GEQpLEHGcByYwcK4sQc9U+NDTHgE4+IE5HV5TP3pGj8U7ZOUyTibEoqTy8JIxNGPgcjd/vD8U2zbf/3//5/VgZ4MQUqs8z2pa55a1hZ7m2321ueH/S6+h+sm66+IFpnRNWu8jy3zc82ZpbZfr+rDH4552BzDcvzTOKGmVlp2y3x77g+qtyXtt0dX4590I9ZWR7TPc2oexd8JLey3Hv9+31peX7UGb838oaVZrbfV/ulM4kHLGn6fQ/3Ky3LDrIzO64jImVz+T+Wjndm9tAXHhwys9Isy3WH0cxoflnqOr7cNpvCZaAy5zMxIstyMysrthOJBrVF9K22jlz2+9KKweShQcfdQxNGfh6M1P7G8iIphToNrIxQZJyUjTI7TskTsJXBIs8XBaJgAiGDN2V9uBeCbDQa/qSPEmKw0SdrVQoGAIOk+cRv3761VqvlO/hwPAYzdSYGz9qGwWBg6/Xa1uu1sztm5oZNSoYGC7PjCzF54GD3HGUlaYOy/mbm8kGex6B1ZAGYgqbu+Xxuu93OnWiz2dibN2/s/v7erq+vf8SQwbTRV7aYLcvDgmuYiclkUnnh4mZz2Omp2+3afD6vLMhGtzyQdbtdm06n9vXXXzvAIRd2TIKNYnEr98DZLi4u/N0zDDrIQ9/v966v6XRaYZGRBYMA7JMHMZjNPM+9H7A4bPXc7/d9gTHvu2AqX6fh2R0JxpUUC+736tUrtx+YVwYqPIgxMNvtdnZ1dWWTycT+67/+y0ajkf30pz+tsGEsGMe2o+3t93tPbQGcaZsOAGHLsHuYWmxJWXHSOYbDoX8m/9zswPBNJhOvp9vtVkAL5piHeR4SkXO/36+wcchrs9nYt99+a1988YX7XZ7nvsMTgV19C6ZSg7EO+BQ46Z/GStqMTqkfEgCbZ/DINfqQCjhwLjYIa4nfcb0OwgE5jikrB1AokBMjtB2pPK8kjEwY+RyMnN/fH9qR5fb2/OefboDDj7gLP2MlAAAgAElEQVSm9+m3/W9Tzv7QDXi5kjDy82PkY+VF6EoEQkc12MF8KWPnU+cPgycGngzcUAwDOJ1K5BxlWjCg1WpVUSyDPIyB/zowBOB0BoY0BMARlpEdXJiSNzM/V9MP6INOX+rTOlO+5M3u93tn9mkfebUMoOkLBkN7maIHLMhnNjPfiUcBJc9zlxd/9/f3ngpBHeRVb7db32WH/GLSGXRgDuuy2+1cPoDWfD73Rb9/8id/4jMfWZb5GoAsyxywAS3kDkAp27FYLGy9XttoNLKyLB2wYFizLLObmxtrt9s2Ho8dFIuicNvAeZELKQecn2WH2R7sigdBbI/ZJWychw5YHOxiu936g5PZ8aWJahNM2TPLpouWi6Kwq6srtyt2T7q/v7dWq2VfffWV3d3dVRaP0n6m7Klrv997/9SXsIHxeGzT6dTy/LgbEA+p2Af9YMDIA/BisXB9XF5eWq/Xs/l8boPBwF9qiU2gQ3xQdUJqRL/f95k2mD3iAEz4YrFwRpxr0elkMnE/Wa1WnvJAmgTtx6eZ3US3zOrRTmIbumbAh9z1HS5m5nYR0/Doi85MMNgibmIngIqmm1FHZOj1uLJ0Git10A/JACuojCt11zGGqTy/JIxMGPlUjPzZT39q/8f/+38dtk8fjayR55ZlueWN3FbLlXU6bbed5XL5kBrYsSzPrN/r2f391Pb7nXW7PVuvV3Z+fvGgk8MszWq1tn6/9zAg3TqBRcbJYZBaPmQVTGy7O+g2z9ip9BD31uv1wz379u7dD9ZstqzbPbz4ebff26A/sG63Y9vtzprNhhWbzUGnrbb3f7s9HFsul7Zare3VqwsrzWxTHMjM+Xx2iGXbrVlp1uv3bL/be1sPNnWwx31Z2m67tWazZfv9znq9vm22G7s4v7Bms2k3tzfWyBvWajUfiMvDYLzT7dhquZK0wMJ+9rOf2fXNjWWW2Xa7sSzPbbvZ2t3drZ2NxzabzSzPHh6UrbTtZutkYZaBkVkFI6+u3ttisXzAyL29enVpd/d3Np/PbTgc2nw2t9VqaZvt1tqtlrXbHVsuF2H2K/e43el2rN/r2939nS3mC+sP+mblYYbvQG7sH94tNvfUz1F2fNl0wsjPg5GPlU9+4OKpkc4z6KXjCF+fFnlqpTMITQdUu93OLi8vrdE45IayYJGtRKkPICN4I3Rd8KpgpzndOivCQBnD6na7dnd3Z7vdzrcaZXvJ4XDoAdvMfDYDxatBsGvO+/fvfTDNVqk8KACQ2+3WA7jZEYCRE4bKQwVOwoATufT7/cruMsPh0C4uLuz6+tpubm5cVrSTN7NjqDjBbDazsix9Ia3Z8X0STD0TpGBI5vO5LRYLl5uZeYogW4+ioyzLfEZlPB77gl76Rwoezs/7RAi2rVbLAwQDb3bwIW8aBodBAc5Pf29ubszMfJ1Ap9Oxm5sbazQO+e95nvuD1WAwOATGh1krHgyw/cvLS9vtdj4QIB+a3HLYUH3A0Rki2COCE4WdsFarlfeFh6PlcmmvX7/2mS+z4+5XzN4xgAPof/Ob31iz2fT3rqCXs7Mz7wvMDay0pgLRJlIJNpuNb+GrgzACFD6V53llG2BshwGTzm7yrpHBYOCywo7I84dp06C83W59O2C2yKUN7XbbZzs5Tp+Wy6W3kf5in8hO04iwPw26+iDJjAGDW61LZ/cgABiAFkXhM8AM2GDYOaYpHbQV5hC9Af5m5jn++Bzn0wZl/OIsRoxDLH5O5eklYWTCyOdi5Hkxs/lqbuOOOd6Nx2NrtBq2Wtx4//aNvS3yjfVah3hfXN3Ym27Xil1htlzZsCytcbO2sZntlwcC8rzVsmJ6GORv9hsrH9IhV/uVbfdba7QPM42tvGWd4t7evXtnLTPHsSzLbPb2vXUfMLK33Vtma9uu5nY5vLRsciDaNqtbG7VGDy9y3tpmv7X+F2cP8fOgk/lmZdNiapOG2bZT2ig7rEdbF2tb3i2t/WA3vQf928ysledWFoX1mseNFXhQv7+/9wf0xvrevhiPrbz73l6/fm29xtaKYmHZNnNcXK/X1lg2LH+Is43dzia9nl3/6//nGLlarXw8MWjuzObX1lgd0k6b+4XPbDYf1lOZmbUeZnybWdOKVWHNommv871dF/fWnR/Izv7MbHlzZbvl0rLFjV12u7Yu17barMw2Zu1t2zoPZOePMPLu2rZXWxv/yZ9Ya7ew726+s+aq5y+75iFpMBjYMMtsv1paa9eyVrtt27JMGPkZMfIxUvJFHrhYuIcwEbzZkcGho/yWZZlNp1Nfh8JCSWXk5/O5r9VBgff395U0AJ3Sg8XSVAWCuj7lwtLRLn3Dtz5ZY2Q6vcp5BHIGegzMsyzz4zBXBHUGjew8tVwu7erqygfq9JOpUxaJan59p3NgkLSdrDm6uLiobCKAUU+nUxuNRnZ5eVl5CidXl/VNu93OZrPZgb2RKVJYS1L/MDrYDwbSOJumpcEisIB1s9lUFnjCtKxWKzs7O3NwwqZYxNvpdHyND2DVbDbt5z//eWVXncFgYNfX174odzKZHALlYGDj8djm87nvWmRmntbIbNFoNPIAwAMSqS66FS+yL8vD4tr37997/7///nsPVqT9xN1+OI48CSxm5vJutVr25s0bWy6Xdnd359P6u93OU20ajYbN53Pr9XqerqnMOQ+XvV7PhsOh/fDDD5VFvNgefoWdEoCVwdHgiy0SA169euUDlOVy6e8XwadJb1ImyqyaQsCMI+u3vvvuO99+mGBMXfjvYDCw2WzmtswDOrtoETh1RpFreTjH1zmP2UhSuNjxi/N0VgGbw050MMx3nXkkDmL3rPHTmQ0GggwqGTjTdmXnkKHGPM21xx+Jx/SdazSFlbbD9OliZcpTWLxUqiVhZMLIhJEJIxNG/nFg5GPlkx+4mLHgqQ4n16c8gjTHuIYdcBCUmXlgw3l1V56404hO+TFl2mgcNjoAXAhMsIT6hI0RDAYDOzs7s81mY4vForJGBmYNxhG2CUURHOgTyl4ul842wtTt93sbDod2c3NjFxcXHsD1BYqz2cza7banTiCPPM+dQcQYCObMUvA0v91u/Q3k9Afm8auvvrLpdGrz+dxT+AaDgW/wYHZg6H744Qdrt9suI1jQxWLhMz7oWvUNMNM3Zj8AqPF47OmEGD0MMO+4wFHW67UHezZQYLBBwCCgbbdbu729td1uZ19//XUF2KgLXeu7ONrtto1GI2u32/bu3Tvb7/f2+vVrD67MaBFQmCUClIuisMViYdvt1t9FUpalp9b98MMPlXRB2k16Sp7ntlwuffMPmGre2/Hq1Svf9GK9Xtt0OrXhcGjD4dB1cHt7a5eXlzYej51xRo7Y/WQy8W1eAcrtdmvT6dTKsqwMYNhYAx3iZwzyNHWPwHN9fe222Ov17He/+50DM35IIOM+sN6wYJpPjS/p7lsMHM2OazjYAhhbI9WBwQygeHt76+wy/WeQwuCMQSpsM/cirgA82Cf3wueYgVTA1JQH/Av2nlhIypEOThVkdYCFz3MO8RAfZNCnzB56BCzQBTLVGQf8Bn8H9PQ+qTyvJIxMGJkwMmFkwsg/DozknnXlRTbN0MBGUNPFrxhIbIgGSaYXy7L0dAR2mFP2g86jCAJ+u932p3qO4ZT65IyzMyXJdphMletuS3me2/n5uTtRo3F4L8Pbt289X/zVq1d2dXVVUQT18F9T6JrNpn355ZeVdTIKfOSkAwhmx4W97CIDY8cUM44FIGdZZtfX125ITDNfXV3Z+fl55f0FbFrBVC1OTdoGwUOn7pvN47tKYCbYtAEWBkYKBnM4HNr9/b1Pz+sb20nBwFZUZo3G4W318/khD/ns7MzvDbjf3NzYcrm0V69eef1m5jsXwox89913NhwOK9vHsuGG2QEIz8/PHfzu7u783SUELu6H7YxGI5dlo9GogA66geEh9xl2ldxg7A3bJ8hST7fb9XQ/Ahdte//+vQP8119/bWZWWStFkND3p9zd3dnt7a0zbtgMqZS6xoPBDumdBCsYZdit29tbm0wm1ul07O7uztN05vO5Dwqm06k1m4fNMbAp1n50Oh3fiXEwGNgPP/zgMuD9LpPJxNMd8C/WdbA2Rdlz4gn+ix/BMBJj2DyFQR11MwjEDkllAEiVpadNsKgw9cQe9EycRD8cB5wAGV1rg3/zO9dqeho2pMAKm3lcW1CNv5piEe0Q3ersBPaXyvNKwsiEkQkjE0YmjPzjwMjHyic/cNEZfdIn2CNobQwBlanwLMsqa2x4UR95uPp0T7DkqRtWjjYwZchTMouBUQpToCgXsAHs2ApTc9gJ8mxkkGWZffHFF/5uC9INGo2GByhdWKzpJGVZ2vv37+2rr76yV69eWZZl9v79e2cDJ5OJB1ReJkhbYd8wlOvrazs/P7cvv/zSzMyZJ/qhC7EJZrB1+/3eAyR1mpkHVl1vA1vF2qfhcOh9QY+NRsNz0VnfBKMKqNMWmIOyLH2DBgIaoDefz/16wJN0EGzBzOybb76xP/3TP/VNGRh0vH371n7xi1/Y+/fvnd0hkHLvm5sbm0wmnkKx3R7XaJEWQX7zzc2Np8sAvmbm6SgXFxfONDUaDXv79q31+327vb2129vbii02Gg2/jjSh/X7vgcjM7P7+3uW73W5tNpvZaDSyd+/e+QJn1jlh6+jx8vLS1wjQb+7farVcP2V5eFHlYDDwtIHValVZkwZQADCwSEyzn5+f22Kx8MHCV199Ze/evbPb21t/6aWCIsCYZYfNPAi8eZ47A4qO8KGyLO38/NzX6lEnvgZYKFDDgusAju2aGbwSbFutll1fX7tda7oDA0b6jVxgdRnkjMdjH0gxuCINjHQNwJf4hU8zsNBNCLSNuu4DENHADuNJm7XPgCMgxL2oRxlK6uBc1iiScgOYpRmu55eEkQkjE0YmjEwY+ceBkY+VF0nIh/EhUGEMBA+YFtIems1mZWMAjAa2A2AyO+5MUml0fszNRICca2YuiAhEbIKgwUun6nF2lDUajX6UpkAghs2AeTA7BoHRaGTj8djZHViJ4XBo3W7Xvv/+e39q1sDPFC/b3yrzQNBeLBaeX10UhU2nU1ssFh74JpOJvXnzxqdO7+7u3MgBP/5/8cUXzriwK5PKkDe1M206Go0qL/7r9Xo2Ho99Uwsz87YjR9oKe0T6Bgu7W62W5zLDSO52O1ssFh5gAGMAGscbDAYedLRdrGP48ssv3dFhGJE1LzUkjQPnL4rCc7oXi4XvDkj6BFPMBMT1em3fffedB4zIgpCOgYzPzs78XAZE+AepK6RW0FbYJNjGPD++qLDVavmGFaxDgF3iPVuALLbAoImBFi/iHA6Hvvsj7YMNAmh1LcPV1ZX7IWk4bGyC78NEYht5ntvd3Z2vB7i4uPDBCJuA6CCPgWWWZXZ7e2uj0cjMzK/B9wj6+AzH2ISD2QP6NplM/NUBulYAWROMiTc6OwCYjsdje/PmjTN0yIfBKT6tAZsATkAnHSH6H+DG2gRsQ4GE2KcsIHFFB3qkzHAN//lMncraKbgAPMoupvK8kjAyYWTCyISRCSP/+2PkY+VF6Mr9fl/ZwYRjmibB1C3TikyTYmCkReDMpCZQL84JUwdAAGQIMc8PC3NZoApDggMRlGgjAczsmIeK0tnRyMwcEBaLhTMV3333nQMkOacwjDqlz/QsaQHIBaPWNALkQyoAyoZ9BJD4g/nAaM7Ozuzdu3ceAF+/fu3MA7LCeDVnmCBDOglgzwJr2FQWM9/f33twp25NN8IACVDsFEXQP2yTemUXFxduF2y5HtMvCGikuwCc5D6PRiNrNpu+hmG5XFam70k1gYHDjugnwZLc4kaj4awiThwZSMBEbfnNmzeeyw6wYN+73c5f3Gtmns6iudCwJGZWGSwBFMj/+vragxnXwsCx66L6oZlVdh6CJYPZxdbb7XZlZ0AGSwRpmGFe8kmud7vdttvbW7u/v/e0AuyR/psdAQDbnE6nnhcPuwXzxgYDgBb31WCJTtCZMsoEWFKYYPhg92BrYfEAFII816BzBqj4yXw+dznA0GOn2h4GPrB4mppHHGCwBcNGrFMA2Gw2Pjhmh0ZNlcD32Ipf+42NIGdtI/WorRFvdGExKVUppfDjSsLIhJEJIxNGJoz874+Rj5UXWcOlU5tMZTLtj/IRNAoFDPgddkkNud1u2/39vQdNAqqZeQ4zSoaNaDabdn5+bt9++60LYDKZVKbpCeQIWBVMkGEbWKaNefJn2vXy8tJGo5HvIgQrRhDRtANYQdIuAAqC93g8doWxowwBXJnl1WrlQQCGJM+PC0qLorD37997ELy8vHR28P7+3kGXgA0o6xM9xtxqtVzuyG23O+T3Xl9f+7QxDkEfcZ48zytTzJeXl754li15zY4gfX9/70CNE+k6A9qw2Wzs5ubG2VHYEuyF9zaxqJVBCVO/ZuaLdFm7AOPEwEDXRHAtIEsKDMEPO+Y9U69fv7a3b99almU2mUzst7/9rY3HY9tsNm7XyJSgS6BAbzoo4x6bzcbTaWA9Ybvxiel0aqvVys7Pz32g1mgct0JG57DSk8nEg7UOqOgjQIkNT6dT97Esy/xFxTDzjUbD1zWQ8nNzc+O2ExezKzuYZZkvhmcr3pgWcXl56XahgdnMPChijxp38C30p0DLehBsmDpg83RwyUCRGAaLBxNGWhL3pk4Nyjpjwf3R6WAw8BhBG2CUiZ2ApbLMpMyovyFzAJJ+YCvRRwFeXSeksieVBBmk8vSSMDJhZMLIhJEJI/84MPKxWa4XmeGCgUA46nAEBDqA8NTACJYoAeYLoSI0s+PWtcrYcS7sFmwZaQYsRsRwUShOTduXy6ULGYWU5SFHnPxXXsY6n8/tz/7sz3xxqLJ3WZZ5v1BCnufOTlGvBkFYOcCKQG9mdn5+7qxRURT+LgZlz3AQnB/w4+melzJ2Oh2fRgcEYY1wPt6pQvups9U6vOyRnWZYRAmrQz46aRoKhjgXAXQ8HjsARzkNBgMHX+on1YHceAwfO4JhG4/HdnV15b+RrpFlmQdfZUfoG7bR6/Xs3bt3PnAhUPD+GV7ua2ZuczBj//N//k9PB2LRsTrxcDj09pGCQoDDofEP5K4+AvPGi4thCbHfsix9G2mCKHK7u7vztAqA6Ouvv7b1eu1sL77H9tPb7dbZc4LO/8/emy3JkR3X2isi56ycxxpQQIMtyiTe6FZ6OMn6mKiXOI+jm99MN5JJZuxDgmyMNeQ8z9N/Efy8PMEWRLChm0aEGQxAVWYMe7v78lh7uW/m5XA4WCBjPvF3xqBQKGg+n9vnYShJ0nxiyP1zHfw0lUqpXC5b0Oc+kZMgwyFBkmSa/9PpZGzq6XQ6A0dJZju73c7azAI2Phn2UhgAhmdCfpFOpy0G+DkBwEmWiYOcDzaQGEnM8zHTA48/D1IRWEzs2cdSVgOwK5JqD8bEGn9/3APJCFp3bDU+Pu+IMTLGyBgjY4yMMfLnj5Gshv3Y8UX24fIbw/ng4ZfPk8mk6T8Jzhg2rBCBxmuRKVD0A+iXvAkax2PUDtVLLDCi4/Foes/pdGr3g4EzcQyevx5aaoxwPp9bsSfsFsV7LP0iNfAsGcBFkKZ9KRpUJCEEST7PZ/2bP8XC/CGY0rEJA5JkARinzmaztmzvAxmOk81mlc/nNRwODQCYA3TTXAPQ5v+JRML2wUCiArMFG8KYJJNPbU9hkVgypoATFpN9QmC2ABlJJhPgvATQMAxNF57NZv+4k320Fwm2Anh9vFmypDO2hGdkfpDYrNdru6/r62s9Pj5qvV6rXC6bnIQNlZEJUAwsRcXTxWLRgjzBz88L4JfNZjWdTi2Y4UMcMMLUY8AYMmbb7dZsNZ2O9sb44YcfDDjwP+Q3JIgwcwRAlutJvAAt6jV2u52azaYuLi6MFaR98263s4CHHeGL+/3e2FSeq1gsmiTBs1YwqYwZvoHvkfgATB6ckdtMJpOzQM05ZrOZpCeNNnOBjRBQKXj2SS0rCCQnjBvsIffiE0V8GFDy0hFszidN/vs8H2Po/ZJz+5oJH599ss5z8m/P0Hk5BiAbH593xBgZYyT/jzEyxsgYI3/eGPmp44tICk+nk+1N4H/G3/6heSv3gcUDAwbnHwpg4W2Uif/49/l83jb8gwE6Ho9WMMvEco9+4nFaWBda0gKWOC+AOJvN9PDwoKurKwNAgJFiShwAI/VLo4lE1MqVJWdJxu4R9Anu/B9W6OLiQp1O58z4NpuNarXaWZtXAh/jgD6f702nU3Mu7smzZbTsxOna7bYxFDgW8wQYAkJop70uHdYKx8rlctbCFoaEYIexe7ZkOp1ae1L02Ywjwc8nEgRrxhiWDcaWAA2zAxMnyeaQgFAsFq3dqyQrWkX3nEgkbD+Wer2u0WikzWZjAYZEZT6fW/DxDJ5nEFkKB7xItnBs7MUztel02toZkwDgA9Qt4GfINhaLhe1NMhwOjTWXnvaTQEYCKOOnzBXJz2azMc02BemMMfaDBIhx8JsvMu8EVsAfsEWq4QMlrC8AijyA8T2dnpb8+TmJEd9PJpO2v9DpdLI59SsNrASQGBKkkfqQTEqy1QtWDnzM8nPm2Wn8kbknbsDWMyc8n5fWeBDAZhhvxgEA8dIIXp6wPX8Nzskz8dycMz4+74gxMsbIGCNjjIwx8uvAyE8dP5muxGj8kq0fcNiwj9kmSWcPxORhFIAMb+Ocg+/BJvA7H/QoluW8DILXufN9z4b4t9xEImEadc8qsuRIIIOZwVB84Oc6gB3sYq1WM60xBoAB9no9SU+tgb3khKCMc/A52BmWtwmYjCdsHsC22+3OCgVhSZFrnE5Pu85vNhsDgWw2q16vdzaPOEaxWDSGw++gzlzgPKlU1KlGko0/oM3P0E/jNMfjUbPZzDTsjPN+vzdWEskHzuGDDKBMoOV6u93OOgt5LXwQBGeMIUwN5y4UCtZKV4qcczweW1KFLXuWmSV8xgJ7Aui9Q0vnm5sGQWDsM8kM54KpSqfTarVaBur+c3RVgg3dbDZaLpcqlUrGnPrNHUkSYOXoUsRYetYcOQMSotlsZjbAfZEIAlZe/8wGlVwDG8QmYf4ovF0sFjYmjCO2iA9weN+SZPMFQ4o0BHvHFr0sYLvdmq/AXu92O9s8k3MCmh/bLayen2vsg+cNgqc23cRRL0/xjBtxwCfU/I0/eoDgD4w1Y+YTfv8iBWiQXHMPnNePb3z8eUeMkTFGxhgZY2SMkV8HRn6KlPxim6owqDgqE+wfKAyf+trzEPzs42I3HprP8PbNeQhE0rmUArYLA2cQptOpMV8UG7LEj7aZgMR3uFcKitFbAwywAb6ugUll0pFIwKAcj0fTPUvn7SUlWQDc7/dmvNwnUgH2lvAAylI47ITXv3qmAJmAl4rAeDEHBFS/x0ipVNLj46PG47GxJjgBrCXj4cERB4XdxZEIRP7zPhGguw/PTptUAjmFwbAe3W5X5XJZyWTUzYbn9xp9NvKr1Wo2XjwL7CUJA3tapNNpsynmCVaOACDJwNPLZggcXuJBwSuHl4ggE6EGAABfr9cmP6EQHt21l1lIsj1SkKewcSHs436/V6vVMtkBAIDfjsdjY1yDIDC2DfaN5MYz7DCvyWRU+H04RAXeAC5Mq7c3/JvkhaSG59nv9xb8kKVwTYqQSYy4N/yAtsu0V/YsP3YH24XEAvD29wgIkmh4iRUJGdIcbAfGktoNn1D6ecY2qKcBHHxXL8+WcQ3iH/HJM7ych6SJ7yOjARSw5Y9jtvQkXfmxlRfGJj7+siPGSNm9xBgZY2SMkTFG/hwx0seqj4+f/MIVhqEFTQaNm+aBPCgAAARTfs7PeKsmCPEQDJ5nTBhMbyj+bRjWwu/ZgVFjlBgXhYVe8kHhKw4OG+AHnkkhEMIqYfCS7No8H+wKzuALY+ng4jXv3iBhKXyXHhxzuVzq8vLS9lTAaXECKQqoBHOW95PJpOl1MVien7FPJBLq9XpmlBgwDCoJgGfIYEEI7q1WS6PRSOv12tio/X5/dh3GCnb1cIh069PpVLlczphKxpFx8nPJdz4GRHTq3Bc6Z5wQ1m+329k9E5T528tdPMMqPXXAYVy87eIj/E0xJ7bGGHMNnodkhY0HPeNNEORZCM71el3T6dSYb6/fR0LB/PhgyrN5f95utyZpgSWFueMz/lnm87kFeLpseRaZcYG9IjgRuACmQqFgdQ0AEgwbSaqfR+4lmUxae2NAkvOS6PlElGflufg89wwAADbj8dieget5OQTjSmzimswFdoMUyLN2xFLm2GvNsS1sknHjPolHxCTPsFIQ7Fcn8C/P2vnf8efHXgri4/OOGCNjjIwxMsZIjhgjf94Y+anji9RwYZAMHjf0Mcj4G/TLw/5z9MeHUcG42MzOD9Zu99S+0t8D7EyhUDAGgr8pZIQFIQAhr4BZ8/fEvVLMyQQkEgnr8ON/xj3550Nbyhs6Y4Rml6JdQBDHw1D5PhsZeuOhZSvBNZfLWatRDAY20H8OZiadTqvb7SoMQ2srOp1Oz5bsuR4MBU7tl4cxyiAIzrT8y+VSlUrFgofXNjPmBCsA1rMzUtRBii4/6/VatVpNo9HIAis6Z0lWDAvLRULjGRbmDBvyy960XoWFSSQSpm9HykEhLSCNvfDZRCKh+/t7NZtNhWFoRb/eLiQZe8XYLRYL6wbkgZx7WSwWKpVKkqIAc3l5eTZXtE7NZDK2wzsBl8CFL1WrVdVqNQMAScYeMSaAbz6ft7bJ2exT1y4va5Kk0Wik6+tr1ev1MzsiWAHgJCEUV3uAgNF6fHzUbhdtJurBAlkEMiX8gzhDYun3lIGVxo8Ads4Jc8g9wLDB0MEG+3jA+bbbqLsWSSTMI4kNBc4kwx4IGWfiHIkEz8C1GCPP0uEzHpgYG87rgc4zoNiWBzxiEjbn2UP/ufj4vCPGyBgjY4yMMTLGyK8DI7UxMccAACAASURBVD91/OQXLm4qmXwqUGPgPHPHjfg3Q/9GilFkMtHO5HTn8WDhAxcDwJsykw9DwYSynNntdlUqlWxygiAwOQL/RorgmS3unZa5TDZBA/YNGQXnY1mc9rU8lxQFLJgFwMGzEIwdIMC4+rGcz+c6Ho969uyZut2ujseoyxSb+mEI3tEymYwVqzJPBAKKZz0jkkwmbd+KXC6n3/3ud9Y6F8cDCDkfgYhg0+/3dTgc9Ktf/cpAkgJkxgaGiO9LT62NYZ6azaYFWwJeNps1vf7xeLT9SwBZAshutzP2BIfEFj3LRrEuzKhv5cy4kIwwptgTbDX6Za7NPIxGIwMs78zYP85MMoOdE/hgZzkvdoRfYYOwlfV63ca2Wq2q3++b/foWprDOi8VChULBfMrr+0lYkHYASvgcYzabzUxHzpiyySAAwnwyTjBZ+NXhcFCtVlMikVCpVFKn07Fgjmbd6/59suTZU3yXMfJzTkzZbqN9WHwi6GMSDDCSnu12a/IV5huQX61WllDx7LD5nslPp5/aEXOvJETMOT+DKSe+ekAB1GH5sFPA+uPA72MkcZTxJl74331sox8nyPHx5x8xRsYYGWNkjJExRn4dGPmp44vUcBHEWdbEwDAav8Tr3xZ506VIkiBP0PDgRLDHyaWnNqsYKM7Id/xyZBAEevfunVqtlv2OzwNIiUTC7vd0OhnrBAMEG4WD4SRoxQG8bDarZrN5FozpZOOXRGEQUqmUhsOhsQUYPACSTCbNgD3TgQHBqjE+vV7PWEE2KcSR1+u1SqWSttutBXMpYrxwfFhJNLbFYlGz2UyDwcACKvcJK0QgBbwB2u12q1qtZuwkwC5JlUrFAiP3kM1mTSayXC5tyRzWzRdDA/jcI0FmPp+rVCqZU/qd3NnDxbOL6Hd3u51JZA6Hg4bDocrlssbjsTFoFPwCNCQ8zD3a8CAIdHl5qcPhYEXLXM9LG0iMGAfsY7vd2uaGUqRjL5fL1h0sl8spnU7rw4cPZ52RJpOJJpOJEomoSLvT6ZhvSE8MDuz4YrEwX8D3CDzeDrkv5p3uSJLMZ9D0k5SRLOVyOfNvfLtarZ6x54lEwjT3o9FImUxGt7e3ZwCCz1QqFWOpAFvGkmJ0Aie1IbCQ3C+2AUgz9gCK12UDoqvVymoEdrudFdnzTAT8YrFo88kqAYDEtRlHfJiNWUmofTtlEm/uD7vDdzkHwCXJYpKXbjC3fp79ixTxmTn3gOLtID4+/4gxMsbIGCNjjIwx8uePkZ86vkiXwtPpZI7olxT9Uh9vir7gjwlkwP2bPAMPW8FDw2T4t08GPJlMajgcmhQAp8HIt9utJpOJyRc8i8HAs1xLsLm4uFCpVDIwoBCUiSsUCrZ8TwEj18ZImUi0z0z44XCwYOkdkU5NjGGhUFClUrEN+uhMw1J7JpNRJpNRv99XMplUs9lUuVy2MeLajAHjzc/Zn4RCRACCbk6FQkHdbtc6FdGq9ObmxjpJYXiMC8xou922AtRUKmV7bhyPRwsaNzc39n0AiIDFPEhREKjX67a87XXpzBXMG86CNne73dqzcy0AYLfbWbE0CU02m9VgMFC/37eEAeAmIGAvMI4wVwRXClKx8f1+r9FoZM/GPeHMsEWMMYGLg4SK+wAUpadOV8ViUel0Wnd3dzbmQRCo0WgoDEPb+ySdTqtUKpndIa/g4P4A2cViYXbL5/f7vUmT1uu1KpWKJS4UdEv6k2SNcccffGEudgmj7JMw/AP2PJVKnbFh/Iz9Tg6HqLPUcDjUbDZTr9ezZwB0jseouxf7phC/PLj7hAFfD4LAJDC5XE5XV1eq1+vWXS2ZTFotBkwc88z4+RoTnoN4QdIISMBInk4nS0BJXBgTEi0SLMaLOQDAfNzm3yTD+LBnuaWnNsteQhEff94RY2SMkTFGxhgZY+TXgZGfOr7IChcBENbOSyi4WZxU0pkj8kAsjTOYDCLOy9u6X3LmYfkZn314eFC9XjdnQ0LQbDZt8AmudGXirRZWBOPM5/N6fHzUarWyzRBxxtvbW6XTaU2nUx0O0eZ17FHBsjdFjLzBAzAsVWMIaG1Pp6jjE113ADoC0Hw+N6CAzatUKtaKlsAFC5FKRXtl0GVJkkktJFlXIzaqhKkrFouaz+cGkKPRyDTHlUpF8/lclUpFlUpF9/f3xiix1A7DwvWRlSCFAZA/fPigVqtlUhY/zwTOyWSifD5vAZGkgyTlxYsXGo1Gmkwm1iKVYAbrBvvHsjaMEGwFQQ5GJgwjrT6BDSYIO4Z1orZBepKKHA4HK2olmLBZoNc5Ax74C8mWpDN9MwmU1z6Px+MzxhhGJgwj3flgMNBqtVIqldJ8Ple73dZoNDKbQCIBMKOxhjXd7/eaTCa2x8dyudR4PFY6He2vQ+F6t9s1W4AxJLkLw6f9OXzA9yw0NRLMJ5s4ksggGUEihN+zjwnsJjGGZAE/YL5SqZTu7u5MTgDTBgBJOmu1C3NGvGJul8ulyXWoTVmv17q5uTFAHQ6HxkSSIHmmDf/3MUySyZKIIT4+Ajp+ZQLGj+5ax+PRElliwW4XFbez2SqyHOzGH35uvJ2zQhKvbv3lR4yRMUbGGBljZIyRXzdGfpEXLiZOeurA4o2AwMnNsEzHz1jS5CF8AbCks7d1v/QL48ZSIqzLfh9tAOidHKCivS1Mo3fWZDJp+msCHm/0BFve4oMgMKctFAqazWa2zM9E4TwEtEqlYsGsUChY1yMcYb/fn20qR3CmaxLARWArFotn7B1BGwaOQuh0Oq3VamXFwDAASEIAFwzw8vJSqVTUonc6nSoMQ11fX6tYLOr169fK5/NqtVoWDNBGUw9AS1oAYjqdWsACvDDOq6srJZNJG8cgCCzwM0cwgdgFxanYHswtNoWTsnM91yIRIEgw7pwfpnQ0GhlLSgI0n8/VbDa1Xq9tfgABOjKFYWjBWpIFExIItNIAlyTT5SNRkJ52Qoc1AtCRnqxWKyuG5pqw3tgzbG6r1bI5YAw9g0zx8X6/V7FY1OFwMMnCfr83qQ9MOMA4mUz0/PlzK0D3hd/ISkjYAGHsjsDsOyeRdDBmJBIUkp9OJ/NpAinnI1kiJgBMxAcpYlZrtZrJYQA62DeYW2ITzOBsNjPdPQwxjCctfe/v7/XhwwdVKhUDegDYs8LMFXaDPIr4x1iVSiX1+31JEcDN53OrCQG8JJlNIIsCIPEHCrEBfnwEJtn7j2eR+ZmP18R37jM+Pu+IMTLGyBgjY4yMMfLnj5H4w48dX6RLoSRb4ibYwsDAKjCIOAYsxmazsWDHm6xn7wAZut/w4DBUXJM3ZJg4mK/RaGQdiYrForbbrbFofmnWG/DFxYVp0tnB+3Q66eXLl7YUud1u1el0NJlMdH19rYuLC+tQI8lai3qtby6XU7lc1v39ver1uoIg0jCPx2N7u6d7DmMDoyFFoAjYoFemRSkGCKDClHFez2IQDHHw0Wikfr+vFy9eGIOAfALwu7y8NIDCgRuNhna73VkbVdqyMn+VSsXGj4JK5h4G5+rqSkEQaDKZqFQqqVAoqNfr2fjgaBQOs0/IaDRSu922APjNN9+o1+vp8vLSWFpYz1KpZI7IUjKF49zrdrvV1dWVBfD9fm8FwOPx2Ngsvo8NYe++OJquUj7In04nA3j2EMEn0OJj+9JToMdH8I31eq1qtapyuazD4WBafOwFJgcfLBQKFuCn06nJOpCBoLWGucpkMhZcCK6wqrCSjBEAhQSJmgZA1ssECKyegQVAmUPkDqfTSe/fv9fhcFC73TZpAIzVaDTS5eWlfvGLX2g6nWo+n6vT6VjiiT8DCLvdzuQcnvEcDocGeswRMWsymZgchzoWEh/sHtnEZDLRZrOxZLVQKNgmmvg/SRLJm0+yAVX+JgZQiI1EhkQAUKawnphKPJBkY0YsAGCIm8QCAIax8zGbFRHpiXGMj887YoyMMTLGyBgjY4z8OjCSldofO75Il0KMnODJhLAEDCvE2yWslF+qxnh5aIoEfXBmGRoDxCm9NANWAhbLvylTZLlarSwoJZNJK9jM5/PmAMlkVPB7fX1tRaDrdbQ3Bl2MaCnqi+8wUpxpsVioUqnY2/x+v7dC1vl8bnpl6al4sNVqqdfrWeDG4RlHzrHb7VSr1czQmYNEImGMiiR7PmQeOBnj32q1NJ1O1e12Va1WTZIxGo0MzI7HqMORZxPYtR0WAcaGgJFOpzWZTKwzEu1kmWOWyXHCRCLawI/rYhepVMra2MIEZjIZlctlc9xXr14ZiLCsjqaeYIojbTYbffjwwZhBXxyL89/c3Gg2m+nVq1cql8v2LIDMdDq1cb64uNBsNjtLREiOYGolmRQgm83avjUwz6dT1K3Ia46x7Xw+r2azqcPhoLu7O9MmX15emg8iU2Cjy1QqpUqlYqANg8k4UOMgSZ1O54xZXCwWZ9IP7BZ75NmOx6MeHx/14sULpdNRcXI+nzetN/NNcEdmUK/XLaACUNwbMg2KxWGxkD4g+clkMmq325rP55pOp3bufD5vvsVzl0ol259mMpmcsf8wz1zf10Ngy4VCQcvl8sy3AbRms2mShdlspiAIjOkDvBgrYtPHLy1+VQIA2e/3VogMKwiowWqHYWjSEuIrjOLpdLIkFXDGv2DjPLvppSk8K8/r45ov4I+PP++IMTLGyBgjY4yMMfLrwEjG7seOxHffffc52PEnxz//8z9/R9Eeg8xE+eVB2AKMO51On71dUuzI2zUTjSHx8JLMMT7WwfuAfnV1ZTIJHIXB99/hPhKJhCqVijnhZrPR3d2dgQdBjMEfDAa6vb21JGSxWFiBK07hHYag9urVK4VhqEqlYufEqWezmdrttrLZrN6/f29Lx7zNo4lGg804sezLGLF0jGF7yQpjAjilUikVi0VdXV2pVqsZ0D08PEiSMQcvXrzQ69evLSBfXV2dGWIYhtaRh/tgTmHJPl4yR1oCgwBbxf3R5SgMQ9srAyeaTCaSIglCPp9XoVCwhEF6Sj6SyeTZHi5IOgAxv3SczUb7u8xmM41GI83ncw0GA1WrVQM1ltthSSn+RAdO8CDg0q6VMUKDDnOHJIJ797UTjAeF2HSZajab2mw2ms1mxrRR7H48HtVoNKwd6m63M+kP7Blz+vLlS00mE5PjsHGm9LSnBIEPBhcw9EF3vV6r0WgolUpZ+2WkLel0WtVq1YI4z899odH3rPvhcLAW1cfjUdVq1WIBQf/58+dKJBL613/9Vwu+3Ct1C9gOBewUHBOAYRyJI8QIEg5fWwFTCbtF3JjNZqpUKga+u93O2EISAf7PuPA77lV66mJFfCOWpVJRVzOSKVYPmENkVsQHruH31vErIMwpbCCxF3/wsdFL1LBfSZrP5w/ffffd//0LIeOrO2KMjDEyxsgYI2OM/DowcrFY6B//8R//z49hwReRFHpGjQlEisCNMBG8gbLsyRJutVq1pVcmVJIFFK/hPp2e9nrwg88So1/ChnWByeL7Pvh69gQ9qiTrPFQsFs2JmMT1eq1er6dqtWpjwLIxb9OtVku73c426isWi1YoixQAQx2Px2YQu91O1WrVOtDw/FLESqZSKdVqNVueRn6ADjaRSFiQaTQatsx8PB6t4xNGAivXaDTUbre13+/1m9/8RtVq1aQTjN3t7a3JAMrl8hmIEQiRQFxcXJh9cF8AQa1WsyA6HA5N/kDR8mw2s3mBufIdc4IgUKvVsj1BeD7YTeyDc7CjPDaaSqVMrnI6RfvJ0C0J6QyMKZsy8hyr1coKLAEAEhbYuIuLC0taZrOZyRoADQAUOc5kMjGdMXIiWG4AbLlc/onEYrVaaTKZmJ58u432E0H///DwoFQqpffv35t9w8xKsqLiMAzNzrBdnoNAQ9JDspJIJM6KwgnSpVLprPYhnU4bgyfJWCXGGfAaDAYGCL7+AXZ8Op2qXC7b2CyXS719+9Y6k1EfwbjV63UNBgMb97dv31pSWSwWNRgMrE1uGIbGYNZqNQMi4hX+zbggZyEpHg6Hur6+1v39vZ0TP2Ds8G1qXGCuOfdyubTNLAEs5pjidiRZyCUymYwVEHO/zFsymdR0OjWgxE+IkyRNJC4AKf/GX30CzfPEx+cdMUbGGBljZIyRMUZ+HRj5qRWuL9I0gwtzURgrBpSJ4IF4y4SB2mw2xuAkk0lzcD7LQ/LGT1BgkFiOxBGQPbBfBsvOLLnyXYptJalYLGoymdhyIUV2yAFwUIzrb/7mbyyA5vN5LRYLW2b0hojcAaZSiooTR6ORnj17pvV6bYG5UChoPB5bNyaW5gGfdDptDuU1pP1+/2xJ1BsBrUoJUrvdTvV6/ewNH4aCotFKpWKgD7vlmYNqtWrjAMBy3ePxaSM9pAy9Xk/1et3kSDwX48TeGdw/bNV2u9VgMDDpBcxvuVxWv9+3BCWXy1mBMNKPxWJhLC9ButPpmBxlsVhoOByaXCeZTKrX65k0o1Qq6XSKilCHw6HNM4Xc4/FYjUbDJBYEAvadoSiajj0Utt7f31sgIJjCONJ5iwAjyRgTAjSJA9IFkplMJqNut2vJGclMv983sGk2m5pMJnr37p1evnyp2Wym+XxuTBsgiDQIe8XfGH+W0jebjbrdrhW2Z7NZvXz5Uh8+fNDV1ZU6nY7tdxMEUccpJDcENc/sMxYkQ7CNl5eXms/nVhgsSW/evDEGEoCn9oQ4ks/n9e7dO7Nv2NLJZKKLiwuTk6xWK7VaLZP5YKfcF3bqW+PiE8SaTqejWq1mSSE2SdKLD5Kc4aMkXsvl0uYN9hB5F0kJqwQ+hjE/xAJaZSeTT8XLJI7o2UkMKFgGJJBaMe+cH0aQpCo+Pv+IMTLGyBgjY4yMMfLrxsifLCn89a9//R2tYAly/M0bIRPCm3IikTCjZQk/DEPrrgNIECAJypIsoNNFJQzDP2GH0PLytsxyLEvlsCGpVMqWlTFkggHaZ/YYoX0ogDOfz/XixQt7U8aZCEI4JG0xmQQMBWBjgztJ6vV6plvmbR75CMuY6IsxaJaXYacAbMZ+tVppOByaQ8McwUjCIMDMUDhL15xMJqPr62trkQqQcA2ewS83A8LT6VRBEJgG/HA4WH0ADCvjIcmAk+Rgv98bm4ctSE/ymcFgoGQyqVqtZoCdz+eNeYTRq1QqlgxkMhnreoQ8YDqdqtFoGHNK0S/sJQW00hNjw/3T5QnwRVbw+Pho381kon1cKGgPw1ClUumsY8/xeFS5XDaQq1arxmwhF0H/T2BOpVK6vLy0+gjsisJa2Em/1A973O/3Va1WLThKsoJ15tOzTwR47HixWJg/IB2CaQfs2Lwzn8+bvwKAAAMSlpubG9VqNfV6PRWLRSu2pWAeP3///r2xWhSkw94SVEke+R0SBd/FiGswbqVSSdfX18b8kih6uQ3JAZKscrlsoEhCgJSH5O10OhnTTuxjHLxMiJUJZCh+7LfbrUajkUaj0dk+P8w7gMz9jsfjP/FPYiN6fOI1sZfPE0uIt8RfAOWPfhNLCj/jiDEyxsgYI2OMjDHy68DI6XT630oKv9jGxyyZc1GcD6NmIGFKWHakeM5/lomTZMt5sICwaZJMW0nXF78MySDzOS+F4a18u91quVya8xF89/u9aXoPh4NNdiIR7aFAQPzNb34jSXr27JkuLy+1XC71+PhoQQrmkE3lYI6y2awFrFQqpfF4bEEKffh+vzfj8BpT9MS8edNutVgs2vhVq1WTKxQKBduEjrdyxhOG0hfdZjIZ042fTidVq1U9e/ZMp9NJ9Xr9rE4gmUwamwBDQfcnH3xgX2FWYGsLhYIt7+J4nJe9LegGxfz6pV/kJJvNRoPBwGQpSDjy+bwqlYqurq7MLilY7Xa7FjCpT0DyAuh6h/I2TFBmztrttjGbMGGwIIC3FIHldDrVaDSyWghJZyDSbDZt7khg6DLl75f7A8ByuZwajYYlRNgLjFGpVDJpx8uXL60dcavVMvshSYLJgwUCjEiQkLXApF5fX6vRaEiKis+z2az6/b4FL+a2WCyqXq+bj2KD2WxWw+FQ2+1W5XLZJCHsW4JcAHAlMaK4tVAo2POEYVT7weah7XbbJBWeRUV2QZIGqABabJqKPGS9XqvZbKpUKlkSip8WCgVjiy8vL21vGmIZbC3g5kEF34PVA3RhiqltKBaLajQalqjhgyRIMP50XSNuMfbEwM1mY3GNPzB3jCkrJsQB4iVJbnx83hFjZIyRMUbGGBlj5NeBkcT1Hzu+iKQQw4BBY4L88jg3y9Ijxsn3JJkRSbIle79cyYHMIplMajabmVPzVgwbwZ4aBDMCy2QysQmVZMEJ7TfLpkx8Mpk8A0pJqtfrWiwW+v7775VMJtVoNFQul/Xw8GCTNZvNDKSy2awKhYKBIgGKiWRDQgIPBiDJOrHAhBFAOZcH70ajoc1mo/v7+zPJBs/EmLIRJcCL7CCXy9ny/OFwMIYnl8upVqtpMBgYU8HbPwwly7EUBgdBYM7H+OZyOXW7XWWzWZufy8tLdTodY+4+TlAANuaRPR4ImABlMpk0zboUBelisWh1C2EY7Y+xWCysaw/ShH6/bw7MHMAsMW4EVoI5y9Gwn17/TwAmadlsNhaAgyDQu3fvjJEMgkC1Ws0SJ7Ti6N7DMNR8Pjfw95r3h4cHtVotq/fguhQIS08F2qlUyoo+W62W+SjJHZrm4/Fo2mb8bDKZWBAdjUaWBBKU+/2+Fb4yFjwPgIof7Pd7Y/BJKglWMPyw7YlEQre3t8aasU8Pc4rPwnjBAGPjgHsmkzFpD2w+oCnJkkfsm32DmEvkRNgggXYwGKjdbiuTyahWq6nT6RjjzL0hlfBMHT7E2AMIJGYAgU+0pSf2GPtAElYqlVQqlWxzV4AJBj0IAtuoFVmJPzeJBD7FWMLqwYJyH/HxeUeMkTFGxhgZY2SMkT9/jPyUpPAnr3BhrNJTK0eW7mBBeFMl+BDEeQh282ai/Jslb7ecFyPEwQlyLMmzMSDnQi+OE8JCwdThDDAgOCLBGUkCOlY0s6VSSZeXl6ZDhj2pVqs2EbVazZ4xCCK99uPjo00yLWOvrq5suZ+xIIBSOEowZDzpzuSXQQnCyCOQZOC8LLPzOQybNqHr9dq0xrCVODpyCgAKrSpBhaC42+1MpsBzAMyJRMIKGtk8czqdWuHm4XA4m39JtqQ+Ho+NOQvD0OYEGQTzLsnOK+lM/51IJIyhhxnxmlyCO0EA2Uw+n7exoOMTQet4PGo2m2k8HluR8Xw+tyQnk8lYO1rsj6C+3W41HA41GAwk6azLEBIbxgemx88ZLK1nXDabjV0f2ygUClasi+3QxQu5EiwZOni6GlEbQWDHX4fDoY7HqNsTzHGtVlO327XAOpvNtFqtjBHcbrcmAcA+qWVYLBYmGzkej8aOwvACHNg0/i3JlvcZI/yCLknISEqlku2dw2oNANzr9c6A9XA4nHVA8ywkLZBJlmEWSR5JriQZiBYKBbsHbJGAj2/ia/6axA4YOcbCs4OS1O/3NZlMjAX0sg8fK47H458kxpwbf/OSE/wAZjQ+/rIjxsgYI2OMjDEyxsivGyO/CIJS9EYBJgcT4JflmHx0l3yO72G8sCywK37ZFkfzLGAqlbKAwoCMx2MVi0ULdtPpVH/4wx90cXFhA84gMkm+CJJB5M2W3bTX67V1w6nVaqpUKlosFvbGzvl5w2evgUQioU6nYzr8w+GgVqulZrOpZDKpcrlsoIhOH6DDOHlz53e85QfBU8Efy/aAPEEDYORNnPmARZ1Op3r79q16vZ6kiO30S/6wWiy7wh5+nCzAChJoGc/VanXW4ef+/l7L5VIPDw/WrhWmloCGQ3BO5seD4/F4tOBM8Gf8fUet/X5vS96ACOzpbhe1KvW2iXQBG2IJGUddr6PdzJvNphUKExwZM8YE5gZ7ISACuh8XaKK3TqWiDkww0+xf8v3332u32+ny8tLuj6JXLwFBioF8hda0nnllw07+sGEkrBB2TLAulUrWgQqGCzBqtVo6Ho9WHF8oFKy7GnECP4ZZ5WelUkn1ev1MooRtwghTt0HiwvP6836czMJI8z2kIdPpVB8+fLC4tVwuLTGFbUUShH3BBDI2lUpFw+HQfANAYO5JumAMmUekQ+l0WoVCwXwD2wMAYPA4RxAEJuk6HA6W4MBmesYXKYUHEcabueZeATA+SzJNAo+txi9df9kRY2SMkTFGxhgZY+TPHyP/V1e4GCyWv/0bNAHreDwaGDDxfnnT68r9pmM8BG/UBJuPHwhgYfmXgWYAGPR0OmpPiYaWCWVJ2z8HTBWBjPsloByPR/V6PQveLJuz9AmQssfE6RQVZtbrdWNWaPWLI/AmzjMzPoDaxcWFaekxAl9AjbOgrYVpwdm90eFsAM5yuVS/37ciZron+etjyABSGIam+WZuT6eTaXWliEmj049n39iThaDgwQLnQWuOY/Ecw+HQ7p1kgPkFXFjiHgwGZ5IGGDLmR5KxKIPBQP1+X4lE1JmLscZWjsejRqORJpOJPSOgRevR8XisarWqFy9eGMgxByyrwxIlk0lVq1WTkiDLkaKkCjtMJpNWE8ExnU716tUrGzvsl1qE6XRqXXtIcB4fHyU97WuxWCw0mUwMaJCTeIaYsSUIAXCeafUJIIXjBEzmEm04TCaMczqdNp/BvrAbpAZorz14AMKw8/hmsVg8C6beJxKJhIHUxcWFLi8vlclkrA0wcYtE0kuIAEiKw73kgCSTJNAnuKVSyWychJtx5Nlh0WB+N5uNptOpFfsTz7ALJEQkTZLMltiHxtsP44YPEV+YAwCbuWbOfCwlZpCAxseff8QYGWNkjJExRsYY+XVg5KeOT9ZwBUHwD5J+OJ1OnU+e5I9vyMlkUv1+34I9b4AwahiCf6vlLR8ddyKRMGeD0WNZnAfjPFyTCWICKexjCZPzMyBIMCgohcUhcAIKBDgGnefkzZzAPJ/PVa1WZdzJ1AAAIABJREFUzaBYpgZAAAYmFacNw1DValUPDw/GuFGgy/NIsuXe0ylqfYmsA/Zyu91aVxuCMLpUNL/ofSuViiaTiRUGsrxLkSdBAGfykg1YFMCsXq+bgXFvdJvBmGG6GDtAZTQa6Xg82n4XjBFBulKpSJLpidGqkzQcDgdjOyUZeCCH4N5JDjabjWq1mgVZggdjW61WFYaher2eMSI+cFH8SWDEidPpqH3pZDKx5f1er2eF0UEQWFchWFQfuCgGDcPQGOdms2lBCR/IZrNqNBoGsM+ePTO2EeAPgkCj0UiJREL1et2Alf06DoeDyWUYx2KxqLu7O2PF8UvshiDjGRxJZi9BEEkdcrmctf1NJqMi0nw+b9IBpA7z+Vzr9doAIgxD27AQNpTvk3DREhb9NAXP2AM1JV5iBbNcLpfPWiBT94I/wvpTK8GcELMYD+aOmBaGoSWmsNx0bOP32B/sLX9gJ2HT0+m0JUbYAUEd5paADpvm2TXsEomDX8FIJqOubZ4NBWhJ8Hlu2DuembhJHCIOxMfTEWNkjJExRsYYyXjGGBlj5KdIyU+ucJ1Op//vfwISAokkC8IEOm4MNoCgxE1LOnMkjCWbzVqrWgycN0quB/PC+fwbN2/FyC1gpDgoCGaAWCL3AwpD5XW5yARg8NAcs6laoVBQoVAw7asv7uX+MJhEItJqj8dj/eEPfzjrAgRYYRQYs39eWCMYA/Y5mc/nFqg9owUbwOeZE5yJwOULWblfZCA4KMHrcDhoNBpZgS1MgyTTZ/M9kgGCBQBHp6V0Om2B1Y8Z5yBRQK8tRQW/JAeAPkwvGyXC0sE6zudzu3eYDJyOBOR0OlkRLQGF5MEzNATJ+Xxuth2GoTF26XS0Mz3dtjqdjk6nqF0zCQHA5Vmu0ynao2Q8Htt4+eCNk9Me2fsUzFShUFC9XlepVFI2m9WbN2/U7/cN2Pk8NRL7fdS1qlwuWxAkEOInngX3ARibqFQqarVaqlarFtCJDYyjJJMTEQRh7bh3bAP2k+8tFouzPXYAcgI3AMN8eMDnGTk/97zb7UyKsVqtLGkCqHwSQfJ6OBwsQUkkErYfEn7zMTjAjiFP+Fh2gOQJ1p0ECP/0Bd3YDGNLnCXWsIcIdsi5ATQPhkigOJ8fS+mJ4eY6zEN8PB0xRsYYGWNkjJH4SYyRMUZ+6vgiCMqFfFtSJpw3av9G7m+cIMlSOF1qGDQCqWdXMehE4kn7TuAl8LBsT0BE95vNZk3zDLvEEqQkA0Imx3fJCcPQ9rVgfw5JBloEJfTPDD5L0kgB0LUej0f98MMP5njr9dqCPy1A+Sz7kOBgjBHM4ng8tmJb9q6ALeB8dNVJpVI23sdjVHzJPiowPLACdL5hPsbjsX2f8eW+0FKfTieTSex2u7NA7oNhOp02EOca9XpdtVrN2tcyxwR6GCK/e/vpdLLAut/vbck9mUxae1uujwYau0KGAttxeXlpc8n1YVPX67XG47HJH2CivX4eIBiNRprP5za+kuwz7Osxn8/t2UgOYFpgVGCy0UbD8mJL4/FY3W7XwBxmer/fazgcajKZ6PHx0YLPYDAwGQQSDfZgQW4AswiwM5bcO2wxQQx2jfHEt/m+Z72xXfTe2+3WvkuCA1hPp9OzvYQYa+7fjymxAekL7CvfRSozHo9NigG7StJ4OBys6xZjSMJB8KddbiKRsFqI5XJ5VkALKw6bTxtaYiVj7wEgDKOaDTqWkRRJT0CMnIq4ig8TB7xEDPv0KxF815/Xy234vpdI8B0fe+Pj848YI2OMjDEyxsgYI3/+GPmp44u8cCEJ8B1JGDQvbwBUYFkIPgxMPp8/Y8J4u8aheDuGrZNkxXyewcBgMQImA01oGEbFmZlMxt7mmSiviyfgEBxhnebzuV0TdgTDG41GtkTp2ScfmAEAlvhhHabTqTkTrA4FrJ4Z7Pf7xnQyNr1ez1gwCjC5d/Zb8PNBFx6CFsbI73ByNMeMKfONLAaWjWvAZjCXLJvDfsJc7XY7AwxYIs+sEGABHhgn9MKwXMhAjsejaZOZT+wQXfRoNDI2xt9LrVazcxF4Li4ujJmbTqdnen4fEDgfwRPmjNapQRDo7u7OgjBsKDpkmDCSr0KhYHZRLpctOCCBOf1RgoN8BlbT2z5FulKUyKzXa11cXJi8BBDwDLZP7giGFJ6SKGA/2+3W2HbGGpCXnlo0cz6uD1Di49wvyRnsZLFYtMSIjRMJ5uw3QlICCPhngmFlZSAInoqHOVen0zm7P0CAOWOcSFzxURjnwyGqoanVambnACRxjgSEuSAgE8Q9synJ7CubzZpf8nc2m7UCamRN2CT+wr2yISZsI3HCJ/mMnx83fx7iNQff+ZRcIj7++yPGyBgjY4yMMTLGyJ8/Rn7q+GIrXCwvMoGSzlgaDNAvNzP4vDEy6bzx8pbPmy5vlTCBfmmQJWX/e9gp/s8SM/cjPTEL3ANvvp7dIVjBqtzd3anT6RijxdIp3+33+8pmsyqVSgZqgKIku4disaharaZ8Pm+72k8mE9XrdTsnu6XjcIwxzs9mdiy3wlax3A7bWK1WbSNBAIfnhWkggOKgAG6/39d0OjVWjNatAKQ3asYUOQkOwjhUq1UDMozUM7i0y4WVYMmWv5lDxkaKdmmnKNNLajgPz+brFWD2PPNDAPStbzudSC0EU4NTkxAEQWBzj4OTtHQ6HSUSCesahJMTiPADxsknWtg1Nug3JhwOhwZ+tL6FTUbTvNvtrL0rMgRY5e12ay1wkfsgIULHTIcezrXb7Qx4V6uVBXv8HLYpm80aI4SEgHvgngErGDtYO5hb/ANQPR6PNr/ITEg0ADds0Etg8FtsEdlEKhXVZAB62+3WCvthrSWZjwVBtFcOmnqC7WAwsHnn57PZzJjrzWZjQLPf7y2BIAnwIJ7LRZuXApaSzlYwCPTYFr/zqyH8gV30rBvJHtf0unO/OvFxcgnocV7uLT4+74gxMsbIGCNjjIwx8uePkZ86vgh6ws4R6Ah+BClYJPSk/oZ94OHt2rN6PLAfeAzIv3XyGQzBs7EYL4aWzWZtoHzBLgesBawcy5k4NrtmZzJRm050qplMRpeXl+ZsXJfvYhR8hx3DN5uN2u22bSQHe0k72Pl8bp1VWI4noNbrdevkBGvYaDSM9SkUCrq8vNT19bXq9brpjnFU5o57Y8xgKLzRAqYELQK2D1R+WZfNCcMwNOZjOp0qmUwaY+U7C3lNM110YC15nkQiYQCayWTOWMbtdmsMnU8gJNn3JJm+Hvv0XXEI/qlUyroClcvls3tgTBg7SWf20Wg0rI2sJP3VX/2VSqWS2u22druddeDy7DDBzcsIJJkm+erqykD1dDppMplYsOTzMJ504iFhAcByuZwVdHO/fA/G+HQ6mQbcL5cz5wADjNx2uzXZDOOCj3rdNXbgZVPef0+nk9kBdomm/P7+3u4RUIX1Q9rg2Us6F3kGDO09gEmSAZggESKoEl+wZf69Xq81GAzsXgFdgjrX84Ef+RVBm3n3f5PMrlYrzWYzY98+BkiAxL/4EL+QucC4cl4+433IM4bYMfMShqFd72N7jF+4/rIjxsgYI2OMjDEyxsifP0Z+6vjJ6OnZF/82z0377iNMOEEjCJ42S5NkQMBnJdlbPIPDH9gwDJuOQ5yHyZWeikvR+GLEDD4MFIwKz1IoFDQej+37OAyBSJJt3BgEUacddm6n4wtSAIIZOvD9fm/Fp+y7gXRgPB7rcIg6DG02G3348MHe/kejkXa7p70reGZYUfaHyOfzevbsmarVqjEyfJ4gDGvR7/dtI8hkMtpTAV05QIY8gSCD0RaLRTWbTW02UZtOlodhI3FU5tnrxykMTafTVkQL04mGG+2wZx7YowOg73a72mw25nTFYtFACAkGyUs2mzVAazQaqlQqxubSnQldNQEIttTrmZkvmDWfRG23W9VqNTWbTS2XSyu69VIQLxOSIhkC7DNFocwxchPmDxvj+kEQWEA/HiN5Sq1Ws/09ADFJKpVKBrLD4VCSDNxvbm6s+JTPYafosvEB9NgEw8lkYoHZF53DkuZyOdXrdQM5QAUwww/ZzwOfgPHCdxqNhvm1lzXxHEhxkB14ZhH213eXYvy9rCGReKp7AbzH47HJdACeer1uNTkwiujLaUfr4x6g4rXlJADYPbbIygV+5+UexBPOh397/ToJugdwWGOeidjoYyvf98wef/uEOz7+/CPGyBgjY4yMMTLGyBgjP9kW/s89PFhgXLvdU9ccBsnLHDabjS3h8WbIsvpmszF2iiJOJpk3UgYF5+ah/VstzJx/Q6VwVHraXJLvMMG86QJybNjH23QqFbXSDIJoLwf0uW/fvlWz2VSlUrHWnplMxtrAFgoFTadTM5JisWhvyIlEwjahTKVSBrYUXlJci8Oxe/x0OtX19bV6vZ5ms5kymYzJSkqlkkkCxuOxsafIHDBamE3G0G++J0Xs52g0suV3lqABSt9xCEPknO1220B7sVioWq0ac3pxcaHxeGxzxDI912DeYONg2ViSRy7Bhnjz+dwKSLl3xpH6CQCU1rKMK8xvLpdTuVy2wlDPRH1cKJlMJlWr1ZROp23jyEQiYSBYqVQ0m83U6XTOADyXy50B5eFwMOkEc4Jteh9hXLCrYrFoncpghrzch8BH8gEDBzBh3xQ4M2YEno/rQQAvpBmr1cruDY03Y891SZi4N+IAkpDRaGTMP2wcLBWsWCKRsLklyfD6bM+gHo9RzQjJDM89Go20Wq1sfxq6JnEt2GySNj+mxAwAtl6vGzCFYWigTCLqmTkkI8Qbv4KBzeM3nhXmb/97bAOb4Zx8F/+RZHZA8gdg4+P4nGf5SLjCMLSx93Izfh8fn3/EGBljZIyRMUbGGPnzx0hexH7s+CIvXBgSLAdvwbw9w7JgQJvNxibaGy8DTWtW2j7O53PV63ULKBgOg5FOp8+67vA7lhYJBpLMAP2E8sYMu5dIRN1VYLV4FsAEiUKhUDDDa7VaSiSiDQeRToRhpMeezWbGrtBlCXau0+nYsvDNzY0VoDI2xWJRV1dXZtTPnj2zDel6vZ7q9boWi8WZThcgZzkYTbEUvZlzzVarZe1xKWadTCbGQuAYsGbX19cG9BglQME+C35eCF78DEOFhSAwAtw4BE5J8MYJADDm5/7+3rrxEFxKpZLNYy6XM8BFugLDAxCR+DC36XTaGNJKpaL1eq1er2cgRpCTZEXr2CrM5HA4tN3qAZPb21urMdhut6ZTBvzRno/HYwM35Ckwh7Bqu93OQJNuVVLEOKdS0R4cnt0BJJkPGNVkMqmbmxttt9szucZ6vbZr+/lfr9fGZgJGMG347m63s81OG42G1RvwTMfj0drqMm+TyUT7/d725zidos5T1Hk0m00lk0l1u13bX6RQKBhIwiquVitjnkkyYR0bjYZpx/FD5htGDYYVOwNMAdbVaqVOp6N2u63Hx0e7z3K5bOfG94mLk8lEjUbDmGoSNaQqHmgA1XQ6bewuPsj1PbNHLQRsIHOAvySTSYuv/JF0xggCNCST2DFsoV/58AlmfHzeEWNkjJExRsYYGWPkzx8j/9dfuBgA3/mIN+nZbGaOS7DjDZ5J9B1rTqeTOZzXdRL4AQfeYmERWK6WZPuJXF1dqVKpWDBgfwWkC6vVyoI7y7NMVqFQ0P39vb2BUwzqHZFAMh6PbUnXs0uA5Gg0ssA4Go1sOZuC3fV6bc5B61LYHN7oh8OhCoWCarWaFYT61q2eJUB2kEql1O/3bXm8Xq9rvV7r/fv36na7ajabZ3s5oDlmiRWARbMNcwXTAkCzZAwrhdHiqEgWGLv5fG6B/ng8GtChsefe+bcHfZzQM5FolmHekB5gRxQzU5Drn+/m5kavX782pnQ0Ghn4+aJR7KZSqRirfDhERaHz+Vw3Nzcaj8cKw9BYSoC81+uZJr5er6vf71u73t1uZ3UQPC/AwX4o2L8UgR/68eFwaEADkOfzeQ2HQ81ms7OldVg1mG9/bRIEOv+Uy2Utl0vT6fvlfh+YPIuEtAR/Apzn87kajYbZ6Hw+13w+V7PZtMSARDObzVqXMkADaUmz2VStVlOtVrMYwjx5uRRzCwsMU+ylA8Ss/X6vWq0WBcI/SlmQcAA2AAGyjcvLS3348MHGaTQaGYjgO8fj0RjR0+mk+/t7BUFgdSUEfLqUscrgVzVIPj1z5nXqnnHjgGUlznJ9Yqb/PrEJhg/G0fs1QAPr7eVZ8fF5R4yRMUbGGBljZIyRP3+M/NTxk1+4CGbpdNpaYEoymQQ3y43yHc/wwdjwto2BUoQHc4Bx8XaJzIG/uaZfPiQYJpNPG8V5xm4+n6vdblvXHe4JJyU4AQgsz1arVe12UdtW3siLxaL6/b6azaa2260Z4Wq1UqVSsU0IMd5ut6vT6aR2u60wDPXw8KBEIqGXL19ae1PGigApRW1MCZbIBnh2SSaxQHNMsexsNlMiEXW6IaidTidVq1XVajULEOi3JRmwstw/mUxUKBQ0HA4NxNlDpFAoWFIGOBIQYWOurq5ULpdNgoH2nWV82DoYPAqjkSvA5gBIzDE2gLSFfVZge/f7vR4fH22viXw+r36/r0QioevrawNgtO+A4vEYddjC0Tm2260xbg8PD6rValYw+uLFC9MwJxIJ/e3f/q3ZMAlrOp22lsOweUgzptOppKjrUyaTMXaXoLJer/X4+GiMCsw29ktSx1j6DmPdbtfGhoBMB6sgCMwO8Bnud7PZqNVqaT6fazAYWCclEj403clk0gpv5/O5bm9vVSqVNJlMzP/G47G14uU74/H4jKUkISPh2G63KpfLNo4A9d3dne7u7pTP5/X8+XNLxgALAHCxWOj29lbD4dBiA2zhbrczX8zlcup2u5rNZvrmm2+sTuXq6kqn08n2C1qtVhoMBmo2m+bPsHm5XM5Ak/jE2GcyGauJIEjDPmIP+Knf6BL7wJb99SheJxFmXg6Hpw1ZPVO8XC7PWGjmm2TUgy4+QM1G/ML1+UeMkTFGxhgZY2SMkTFGfhFBfjabNYYMcMlkMiYlYEmP5TeYJgaZQMxApdNpY5aQV7BsKT0V+HJuv3O9X+rE0HFyivYIvLAws9nMmCYCZL/fN0OHTVutVnrz5o0V++FEQRDYzzebjQaDgWq1mulZ0+m07VOBnpl2uNwXLUdZevf68/1+r8vLS2WzWdVqNV1dXeni4kLVavVsDIrFopLJpGlzmXyCBDKCdrut29tbPXv2TFdXV2dv7DBcs9lM5XJZ7XbbwBDHI6jBesICwaK8f/9ew+HQjJ7g7OUcBA3AP5vNqt1un23USDISBIEBShAEtoGl9AQ2MFj8HFvjPml7m0qlrAPX+/fv9W//9m9KJpO6vLw0KQWJCkkOtrRer/Xw8KDNZqNqtapcLmebbj4+Phqg+CDo6zBgqynsJjACeNtttFkiHYtGo5EWi4WNB0AzGo2sGB4bAHTy+byur6/VbDaVz+cN8BKJqOMQQIx+HIkBciBqDfDrRqOhdDraz6TT6dj9oBOn0He/32s2mxlrSBJGm2ECUzqdtg1NO52OfQeJxukUbTzZ6/Us+P7yl7+0c2WzWdvnYzAY6D//8z/V7XatxXSpVFKz2dTz58+VTqc1HA6NPUaWkU5H7Yi73a6CIDC2czqdqt/vKwxD3dzcSJL+3//7f3r16pV1XWIuvv32W0kRU/7s2bOzZBpG+vb2VldXVzaWJMqDwcDmdruNuoYxH/gs7CO2TFJJfGWFhI5qQRAYGBaLRdtvh3hHNzFsBRaP8wNE3vZhB9Hv/0/sXXz890eMkTFGxhgZY2SMkV83Rn4RSSHFYxhtKpUyiYRfikun02dacl8AjIFQCOmXev0D+8Fl4P3/pagwj/MSqIIgsELaSqWi5XJpk7Hf763Fqy9ChBmD/WDJHXnDarVSq9WyglT2QRgOh6rValqv12eFojAYQRBpogeDgVKplCqVil0PEKGLy+FwsJayvFGHYWggiWwAZoV2tgAhwL7f722c9/u9XYPl57dv39oSK6130Qcj8eDaQRDo8vJSi8VCk8lExWLRlrgvLi6MfYBFyGQyajQamk6nlmAQ9GezmRUJo7kFrPzyuWeJYJaQIJTLZUkygCb4+wQGmQFOn0ql9Mtf/lKPj4969eqV/v7v/96W3Lk/WCNkK9gNeuzFYmGbAyaTSZOxFAoFe45yuazxeKxer2fOSRei2Wx2tpRNPQdSgel0agExk8kok8loNBppMpmYhIHAPh6PLYHxDLqXDrAHCyyQDw6MrfTE/jIW+DY+CQABmNVqVWEY7c2yXC5tr5x8Pq/1en02XgRMAh/BGS37cDg0ORFsEqDFfTDP7969UxAElui8fv1au91O3377rcl7OGe73TYwhpFm3NmkFTkQtRqn00mtVkudTkfj8djs6vLy0ljpMAx1d3dnm2YiF6lUKsa0Uhzv6zdIqgj2JEteAkGiNZ1OjaGGwYSh9bIIahdI+JB7UHSN/yJLY0yxIw8qkmw8iMOwjPHx+UeMkTFGxhgZY2SMkT9/jPzU8UVeuCTZRncEd5bxWLbFMCWZ5AD2DodF70lQ8Uwby55efuHP7Y0ZzTKTA7vFQPf7fWM6GDQmFg364XCw5WiYr+VyaRvhwULCXqFNRdv7+PhorEE+H23ayDOhQ2d5tF6v682bN2aIbHgIY4Tx8F1+z3lo47rdbtVqtVSv103vz/PAKkkypo4x3G63arfbCoKo6Bi5AePwzTff6Hg82tLw4XCwe0EXvt/v1Wq1TI6QSCSsUHI+nxsr4MEVXe9wODT2hY4+y+VSzWbTAiiBD8eEEZ3NZvb8iUTCNjhcrVZmRzBfsIAkESQz+/1eHz58MBaO5APGi8J0SRZA+D+SlSAI1O/3jTX55ptv9Nvf/tbGmeth6zC1QRAYk7fZbEyWQ5tZdP+Ak2fH6dY1nU5tzxuSC4IARa3IZUqlktVUfMwEAbywXLR4hTVF0sK9wIBTm+GZH5hXnp89USh25VlIetBpfywP6Pf7lqTe39+r1Wrp+vpa3W7XJDPYEGD+7NkzSdJ0OlUul7M9gcrlsp4/f65Op6N0Oq3r62uTidANjWJnkg5Yct+O2UsNYFBJoCjCh6ljvPBb4iCM+d3dndLptGq1mo0n0jGSIC97kWQyF8aQGHk6nUwmwneJjXyez0kycONFyv/u43oE5vVTcon4+PQRY2SMkTFGxhgZY+TPGyM/dXwRSSFv9TwAzhWGoS3VSbLgQbABSGDlYO3Qj8LooNOk8w/BhqVmDJfAChuEw2McvjgSCQZaaN5kPfBR4AtDRVEwy6G73U6//e1vDeD4LuBzd3dnLMLpdDJjRQtLa1W61PjN/laraGNIDGU+n9u9Hw6HM/ak2+2aXnixWOjt27f23Owa3+v1tFwuLbAlk1Hb0cViYUvBFNA+Pj5aEKJlbSKR0H/8x38YK5PJZHR/f6/RaKR+v69er6f3798bm0UR7mQy0WKx0Gg0MkYjl8tpsVjYGKOPh9mqVCq6vr5Wq9Uy8Gg2myYHWa1W+vDhg+1p4nXPOEMiEXUH4n4oGCdpYZmc///Xf/2XVquV6aI3m40Bw2KxsI5Hx+PRbBpHpLsOXXL4LE6dTqeNRSOp2O12arfb1g2n2WwaEJ9OUSecm5sb5fN5dTodW16fTqeq1WpWZEsLV7TsFHZ7SQPXIPDCXsL00j2N8cGeYZ9JnOgOBZODPyELgjklsavX6zocDppMJmZz+AhAQ/0JTCPnZaxhXGu1miaTiVKplH744Qe9evVKyWTSmOnjMWp1S4FztVpVtVq1zmDD4VC/+93vbE7r9bp2u521Ps7lcqpUKqrVajZHtFGez+fqdDp6+/atRqORtd9lHxviyMPDg40xbBtFwEgO8B1i5fX1tVKplMk6isWiMdx0F+NaFCoDwMRRZEjITYg5xE9iKysrsOOADCDiEwriLPOKrSOxiI/PO2KMjDEyxsgYI2OM/Plj5KeOn/zCdTwerfiRwxfirddr7XY7ewiWfgEflsXR7WKo6F0psGMSKCYFlNCKewaPyaO7EbIHrwf3BZpIHQjUp9PJAgYFfDgB3+eNv91u63g8ajKZqN/vazAYKAgCPX/+XFdXV8pms7YJ5P39vVarlW02SAcnSdYpaTqd6vHxUb1ez5yeZeP5fK77+3tJsrdtjI2uUMlkUh8+fFC/3zcNNfptGEwAmGVv78g49mQyMSChnoCAuF6vTSNOQuDZpuVyqclkYs4uyfS3sGG0PM5ms1YkzDI+rCdzVq1Wz9gd9L3JZNSyNZfLqd1un+na1+u1OS8ADdAA2rAXsJ6///3v1Ww2NZ1OjbHz3cPQR4dhVOy63++tgJolaEAkDEPV63UrHKcNLIkKCQJBk449Xr8dBIElMFLEXBaLRTUaDQPier1uLDkME77gk6l+v6/5fK7Hx0dbasdnk8nkmd6ZYM+cHg4H04XDcAPs63W0qSYSKFjDUqlk95NOp7VcLg34ODKZzNlzH49Htdtta5ebyWR0c3NjMeL6+lqn00lv3rwxZhHZQxiGxty+evVKs9nMEqxSqWQMLhINivD/+q//2pJG7I6gSp0Lnc8Wi4Wxv9PpVIPBQK9evdLr1681GAyUyWQs5uz3+zP5ValUspgnyeYG+UW5XDYNPUyv70zFigUSDN/oADYQaUs6nTbWEFAjBnomjnMxFyQHXibhV1CIG/HxeUeMkTFGxhgZY2SMkV8HRn5Kdp/47rvvPhM+zo9f//rX3xFE0EVTNMikoJeEeTkcDvbWTgDgIZkIvwcEhaOAkncSAihBAyaRoJjL5WxSGPBCoWCBD2nH6XSye8LBDoeDAQntcaVILoDUggLW+Xx+xg4CUgSh8XhsBcuAHx2XarWaHh8frSDTt9CUnvToQRBp/GEa9/u9ST/QlPJ7OkGhCeeeYLUIfMg2uEc02c1mU91uV+/evdOLFy9th1prAAAgAElEQVQ0HA6t4xG6436/r1KpZAWvFM/ivLBTjCfME4WNaH6R2qRSKZNW4FDT6dQCPk4fBJEm+eLiwjomAZbIEdAyY1csCQNqsCeMd7vdVq/XkyQDADrlkGRIsm5Uq1W0KWMymTSWQ5IFWp5BktUhEJxhhmCdkCGcTicLHsfj0ZhD9P3b7dYChS+ETyQSdq90xEIuksvlTEaApCeVSmmz2Wg6nVq7ZhjF3W5niQD+cjpF7ZVhLP1GkgRyAjXHaDQyuQ6JQ7FYtG5RFxcXJuGp1WpmV0gMyuWyJTDv3r1TuVzWbrfT3d2dwjA8k0OR2MGcITl5+fKlMWr4fbVaNRYrDEN98803Fk8YexJXXyTPfFBAv9lsrA6gUqmYjIPNXEnOAD2ehW5ontHnPMiLut2ugRhxED86nU5WsI0d+ZjjJSrMD98jiQAQ+JvPcj/8DKmEZwL/WG/w8N133/3fz0eLr/OIMTLGyBgjY4yMMfLrwMjFYqF/+qd/+j8/hgU/+YXrX/7lX76jIBNDh93C6He7nWmaYWAABZZncQAGneAuyeQJMAS8gXvNLk7Nsqwktdtt087yNg2bQOcl2sUixWDyKda8uLgwxi0MQxWLRXM+pB2ZTMaWpuv1umlNkYLAcBK4Wa5nA8WXL1+a/INATMExUpDhcGiykuFwaEEc0EJqAKP27NkzAwd02AR75iSfz1tQQqM/nU5VLBbV7Xa12Wz04sULbbdbjcfjsy5GXBe9M0GCN3+AgwQinU4b++O7UXkmFUeAucMZmLfj8WhShEajYbUCjCXfwY4ADVhY7ImEh7E+Ho8WaN+8eaNf/OIX1tFqMBhYYuJZtPV6rcViYUvPyWS04SIMGEFivV6r0+mYnQIIxWLRvhcEUVcc7gEGDwamUqlYMlGv1zWbzeznjUZDkmypfb/fm7wD9gXJEa1Ob29vdXd3p/l8buwO56H9M4mH1+jjtzDizA8yIHwPmYGXofFc1DUQnEkEKTimyxGJ6eFwsL1UuF6j0TBbQeZBnCBRQ07Rbrc1Go0sHlEDASBuNht1u13VajVL3pCsEDMAOPYbgTkmsSGWkUDzzP53PqFbLpdaLBbGjBHYiWEUI+OjsGzENQ5+zmoDCQoMKT5Dcg3zCksOoKJF55w+ySHh9uAXv3B93hFjZIyRMUbGGBlj5NeBkavV6r994frJTTOYZAICjsfkBUFgTBkyCAII/+fhk8mkvT1WKhWFYWhL217TTmEjk86kAQi8/bPcjsHSGQmWAunFeh3tH8GSPW/D3AvgFQSBFRky4IdD1M2F4lWuD5uIU8B4wUIghbi9vTXZAkv0+320HwaGDxvK0jgFpG/evDFwQk+OEU2nU9NAAww8S6FQOFuu5R5hDMIw6gJ0c3OjMAyNCcSpYE4uLy9NH4xU5e3bt2eFxzA0gGomk1GtVrP9TSj09Y7C8j9OQqEpDALFouxp4gtrkepQq7Ber41VQvtMcGMvFc6dSET7u/z7v/+7/uEf/sHmN5lMqlarKZvNWq0EwEKhNmwX+0bARG63W+t8RODg3zAr+AiMqq8fKBQKGo/H1g53s9kom82eFa7WajVjecbjsRaLher1uiTp7u7OCtsBgu12ayzkaDSycQH8kArwBx/j/iiCJlAitfASEpbdYasHg4H5ul/uh6lHMoUtUCfAeEgRq4rfZbNZC/ZeV09suLy81Pfff6/j8ajnz5+fJXOvXr3Ser3Ws2fP9Pvf/96SKTpP5XI503Mj6QBcB4OBxQ4vTSEwS0+di5DOANjYO/53PB5tPqlroLaFGOUDOvGWcYExB7y9bAIJCnUGXI8GAbDisNr4m7dTXgCQQHkJTnz8+UeMkTFGxhgZY2SMkV8HRnpJ6MfHF9n4mAnHGLgRjIwb4A2VAISR8xBMBpNAsSGslTcsX+AmyViRQqFwtt8JgY5lfJg3JAIEJulpMzycFYdFY8pknU4n04vCBDGhLNnS8pad4TudjrF5FG6yH4PvVvXu3Tsdj1FXJt+JpV6vK5GIuhr96le/MiM6HA568+aNWq2WFUMPBgP1+329fPlS4/HYAoRnI1ju9YwK7MloNNK3336rRCJhQDCZTOwc7XZbtVrNloZZ5t7tdtYtCTClQBOH8EvFACudp/icLzZFbsL3/ZJwGEYdrxaLhUlICAYwo764lBbIgFwqFXV7WiwWtr/Jhw8fNJvN9Pr1a93c3NgYAYowlJ4JxtbG47EtX19cXNh9IQMCGGHEfJD9WEuPLQ4GA+sqhhxkNpupUqlYm+YgCIx5/d3vfmcF8afTyVgjxgEfYU+fYrFo9wobRGCTdMYekoAAIvil17qT1Gw2G5MLUMwLa0dgg0FlPnnmdDpte+7Qchp5he+qJskAHzadpPP29lan00nv3r1TMplUq9UyVqzT6eh0OumXv/ylEomEGo2GNpuNPSMbqrLqQOJC7UkymdTDw4NKpZIkmRRkMpmcATdxAxDBRyRZzcVyubSYhB0Q4wAQHzsZE1/ES0ySdLZawvysVk8blDJmxC5iwseMHTbuaypYuYiPzztijIwxMsbIGCNjjPw6MHI4HP63WPCTm2YAFLvdzpyOZWkCBYZN0P5Yn8kbOAVn+33U958JBlwABpiGzWZztqyOLAEtNJpZHI5JYiApioVxZMkTQwrDSBe+WCzOzj+dTi3Q+65GpVLJdoxHSoAx0vWFoMi1uP54PNb79+/VbrfVaDQMAGazmQUkWERYwlqtpuvra11eXlqgwiEI1ARQluhZTkcaglMSNAkehUJByWTSAJJWte122wobCdbdblcPDw/q9/uqVCoql8u6uLgwg6S4ERtgLxaCPAaLI8L8oPXms+ijkVnAcElRwXGtVrNORBRYwhAdj0cDc5heuljBaKBtfv78uemIkcv4ccdZcTKCNvd8Op3MZijwXK/XJjPwc0QwHY1GpofG6QF97ycUrcKGjsdjS3xg5WiJjI1j0yQtm83G5C+FQsG6BDFegN379+/1+9//3oKPZ9wBfJhvWCNkP4wF/sl9eR/keWHtE4mEtU3GbtCLS9L79++tixd1BYwn10V2EwRRK9qrqysrWEYGhZxkMBiYnySTSQN/EsX5fG6bkyJbYGw3m41++9vfajQa6c2bN5rP5zbmJNHYJgkn14Ft9iwuGnpsn6QJu5SiDSR54SExI5HFxqUnGQRSK2wKcPA/O52edOl+BQvmnPsnSWAu4uPPP2KMjDEyxsgYI2OM/Dowkhj+Y8dPXuHyF4Ct8W+fPCQTxBv8x0uMLKVLOrthX3hHAGbJmwfnzZSBoltMGD4VDjJpxWLRghcyitPpScPM/SLpQKcOOyfJdOmAWjab1WAwMA0vPwcEuRYaYK8nv7i40N3dnS3dp9NRtxo07P6+WEbn/zABz58/N4cajUaSZKwSgY+xZSmbv2FrCBiJRNRGlaJYdgnnnJyHgDMej5VIJIx54Zz1et0cgoDFhn/j8djY8uMx2pCSe6NbFVIXPsdSPPaw2+1swzofMFmaZg5wSJ94FItFkyh4OcB6vVatVjPGqdvt6he/+IXtZYLOmIDhAYoxhj0CPOmylc/nrVMWPpJIRB2oarWaPSMyIZIhHxh9h6V3794pk4naDiOjyWQyurq6suJY7vl0OqlarWq9Xps/sPN8uVw2CQjsIEkb3ZjG47ExVSy9w65eXFzYBp3YBucCiH2XLwITyQbskZfxzGYzK6oOgsBYzcPhoOvra3348MEKxEejkbLZrBqNhtUL4HuwdcvlUtvtVoPBQM1mU998840B+HQ6VbPZtI5PyEBOp9OZn+73e7unbrdrBdLIfbbbrcrlsvr9vgE+wEs8IyHm/NgoheskTePx2OyLMSLO7XY7kzd5qRpJjWcxfdG6Px8rJSTVxBnmjp+RYBMvf0wjHx//8xFjZIyRMUbGGMn8STFG/pwx0q+6fXx8kRcuAMUzMDwMRkTwI8DAEgAMGEEQBFboSVBIJBJnb6gMCkvkDCLnxyEosjwej1ZMzH1yDn4G48J94ThIJ3q9ngX7bDbaRBAnRrbBEjGgAlPkNbvL5VIXFxd6fHw829n75cuX6vf7tjHgfr+3SYQ9WK1WBsIAniS7dhhGbVan06k2m40mk4kxRDCPQRCYDh6WAfaTucvno00oX79+bSwrhdL9fl/dblfffvvtGbgB3gASQZ1r3dzcqNPp2Bixu7xnwujU4+U1BHruA10yy8W+QxNO5+sEYMpgS+g8VKlUTF5CsTmJAOOG/v3i4kLdbteCA0HkY/AvlUrW+Yj9MGCw0JbDziAXYE6xfYI/II/MAVtPJBL2PfaSIeEh6JJkYDfpdFrValWTycQYvTAMtVqtLIiiZyZosoQPA02gQasN4LfbbdvZvdfrab/f2zlns5kV33P++XxuPkVwxfdhw33R8W63syBOEAWEqd2Qnloq83Pmu9PpmGb9+++/V6VS0fPnzzWdTtXpdIyxQ+IDa0iAhT0lWQBgeA7063RXWy6XNt9easFGmcifuD+/igEb5z+DPAI5GOwd38du8F2SdUnmI55R9NIW6SkR53z+vmCIAcOPvxsff94RY2SMkTFGxhgZY+TXgZH/qy9cPACBBAOgFSzSgCAIjCXyb7Ys6eGYBDseAg05BoKj8DbKgPD2SnEguk82RmQQATBYGJYut9ut6XL5PxIaHxhhSSik5I2folPYpUKhYAGcsWDC+RwTxn4MGDTABlgwNr4dMN1i0C6zVJzL5Qx46MC02USb3PliWEAKA4NZubi40O3trS3fs4zunfndu3fK5/N6+fKlBTCWnWEb1+u1tT2lSJb9MpCccG+73c6Kn7kX5gWAoLYAWQ3zjGyGAIdsgCBDcMBGYcqQWjDGLEUDFASKw+FgO8RzTuweR6SzlBR172EuvW6ZdqbYH9cjUZJ0ppeHlarVapYc0OmM5KXX6+nv/u7vNJ/PVSwW7VnYdBDmDxaacTocolbOfB7wGg6HajQa1iaX+aBLEWPI/AJwSAw8IAO2tOVdr6PNN6+urkyK4ouQ8eP1em3dxxjf0ynSlBPc0fvjFzDXaO73+70lgIxzLpezeaSon8QrDEOTECWTSVUqFdNht1otA1UpAu5yuazBYGAyjyCIpBV8BltBAgHDS+KExt7rwH0M8isMJL0AA9p+VgYYR87FNRgHkk7PnDKu2LGXh5HQ+JURxp3YFR+fd8QYGWNkjJExRsYYGWPkF9GH8EaHNhw2BSDwemweyBfjEjx4y6SIEWZJkn2OB8eIGAx+B3NI4B6Px9ZpCeaFok0YIz7H9fzSv3/T5g2bAC7JOsUwyQw4+lwYKs8a9no9eyOGpYH1kGTdmAjiOBTMz3w+t7HGqWAGGEPamwZBoH6/bxIGNpeDIYE9Ys8ODKjf7+vq6soYM4C8UCjo5cuXVoyaSqVsf4vdLioeBvTQzDOm4/HYGLH379/r2bNnOh6P5gySLCgT4KQn/TJOB2NGcgCY8XNAizlF40zdAAwoc4XcggB5OBys0PiHH35Qs9lUs9m0BIikhQBB21UYQ5zZL2ej8w+CJ1kCc0pQoEvXu3fvrKUx56GAHLvNZDLmTwR/kjAf2Agc4/HYZEdo8i8uLkxLDoNJwrHdbu38JGWbzUatVstYLvTMu13UUYxOYFIUdJFoMK+bzcYCOjIbxhsmLggCzWYzFQoFGxfux/s+zz2bzQzwwzC0jSPT6bQ959XVlcIwVKvVUiqVsnbX5XLZGKlyuWzXOBwO6nQ6lhQDcMht6ARF3QE+yrVJWJC3IKfhWQj4JJgkcdgz3ZSQLCHXok7CxyL8i+clbsH8cXjbxS6kp/oiwIM4RSykcB4mnbgXH593xBgZY2SMkTFGxhj588fITx1f5IXr/2fvzXrkupKr7ZUn53meamJRpNitti0bhu9s2P5hbUAvbP+J9998vrLRDdtotCZSFMkasnKe5/G7SD9RO2WZaL6Sb5rnAES3yKrMc/aOiBVn7RUROCaBlyAISKAb5igTg5V0UrDL8S7/fTgcrKASpoC3Vh4cjbubECyXj7M6YIfco1aABwaHz+FtN5lMGmOHw7OZLDDgxue5m8rP47wYLgYQCARMh7zb7fTw8KBPPvlE0WjUBh1KsmNs7hUAgAHACd038Gg0qn6/f/KMADea50KhYPvCMS5DAxuNhqrVqhUho5n1PE+DwUBnZ2dKpVIKBALWbWm73Vo3JmQhdIdiPQKBgNrttjEX7HE2m9V2u7U2rZFIxJgmjJf1QIITDAZPtMl0ugJQYf5wPGZk3NzcWNerXC5nNgUDQgCFpYtGo+p0OopEIiqVSioUCsZqwhwRFHq9nhWuZjIZ68Y0HA7NnlzbJ5DyXLB60pExyuVyxjC7PgV7HY8fh5V+//33+su//EvrYpVOp+3zsUfWMZfLKZfLqdvt2h5wz7vdseWrpJNgQmDOZrO2vwQzkge3uBvp1Ha71XA4NNAKhY4DDW9vb5VIJE7kH8QCwJx6h0AgYHr+YrGo+XyuTqdjRbkEWuQTsJYwsIFAQJeXlyZn2G63Np/nzZs3CofDKhaLtj7b7dY6kAFW7DOJCDEomUzafqxWK5MFwdqv12v7TOyfmg6kKtgOiR/7QLtm2DhYUOzE/XwkUG6SQ6wjnhIrAAziMUwsCc8PJRwADYkh8dG/PvzyMdLHSB8jfYz0MfKPHyPfd/1sbeE5VsN49/u9veEScHmbRBsNI8PlailddoDfcaUSkkz/S9AmuBLQWACCCr9LkGYjObLk+3EIpBAAlOd5NtuEAMhxPbpR5CCAHKwQxYqe55netlQqqdfrnXRtgoWiYwwsII5KMMWQ3CGLOAO/w3E0bNZ0OtVisVC/3zcHBGgGg4G+//57SdLFxYUxDHSp4ZgVx1utVrq9vVWxWNRqtdLDw4OSyaTK5bJCoZCy2azG47EBMHr0cDisUqmkcPg4WO/s7My00DgeSQHAC1gi72B/J5OJSqWSPR/sk8v48j3ITNy1SaVS6vf7piemKDKXy2k8HisSiehXv/qVHcczBwT75blYcwJ/uVw+SU7xgXA4bECP9MFNQggSBDGKwvv9viKRiAGI6zer1UrdbleJRMLska5O7C9rSFCh1gB2E7/BT364VgTpUqlkTKLneRZMb25u9Gd/9mfq9/taLBaq1WqqVCpqNpsWQGFu6diEnyJrgiXOZrMWyNCA4xsEati06XRq7PxqtVIikTDWcjabqVAoaDKZ6LvvvtOTJ08sHsCkAaDEJmQTMITIPEhUR6ORMW6bzUadTsfumWL2TqdjsQsJBMCAlEk6JioUIzebTSsmf3h4UDh8bM89nU7NNmazmSUxSHmQCJEws3+spcsQE3uwHfabGMU9uxIL1++wC/+E68MvHyN9jPQx0sdIHyM/Dox83ynXzyYp5GE8z7O3fG6eYM8bIhpMNgXmwgUiJAtsMkb/wwfFIXkTxtH5DhaK4/7FYmGsCYGOz+X+p9OpbRbFrYAWDu15nmmsmYpdqVTsc3Be2EnP8ywY8XkEvGKxqM1mY6wjkg9+B+YGJ6B7DevgGscP3975XTTyzG7odDqKRqN21C9JZ2dn1mWKI/n9fm9g5u61e4wKGwYDBhvmHusul0sDEeQj3A/FxK5UhQDoOolbxxCPx83ZmCw/Go3MwQCl8Xisu7s7mzGBffCckkxKwPNynwQbdMPS43wNNOyTycTkPThxr9ezzkh0VoLBQ25D8XAgcJzdsFgsTL6SyWS0XC5VrVa13+/VaDTMRtGG0yEKppCAncvlLHhEo1HrNtXtdq1OgNqHcDhsLZVzuZzm87mCwaAxupKscNdNEGGwYfZyuZzNywiFjvM3AOHxeGzPTXJIzQhJJnvCoMlms6larWa+yffk83nN53Oru8DmAaXBYKDhcKiLiwtjVkejka6vr803SX7xGxKrer2uYDCodrttiacLTK68oNVqKZ/PK51OK5/PmzRnNBqp2+0qk8koGAzaoE0aBNBwQJIln/F43BIvAIX7I6GABQYAXalMMBg0SQodzZDkIGNhHQHj7XZr8iYkZ5JOTkWwHxJYEmxiuX992OVjpI+RPkb6GOlj5B8/RvKzP3b9LOjpBmw3EHDcRwAiOGMc7o0SaDF02BwCnStdcI/00B1zNO8Why4WC1WrVU0mE3sTl2SBFiPmSJ/jX/TS4/FYZ2dnqtfrevv2rW0Uml8cG6NutVoqFAoWAGHu0um0dWGBCQgEjt2AcPZwOKzBYGBgFAqF1O/37Xu4T4oe6W4DQ0UhbDAYNEbAnSkSDoeNNQSEW62WLi4udH19bUWTBCL+l/apzPCAwSsUChascYr9fm8B0u1KNJ1Otd/vdX19rXfv3mk0Gqlardp+wkwwCNNNTOLxuBVIUnyMo8K0JBIJKz6OxU6nvqN15rnz+bzpk/f7vUkpPM9Tr9ez+2CN0F1jb/F43BjUePw4PJLj/2QyqXQ6rU6no3K5rFwup3a7LUkajUa2nhRLw+qwP7BvkUhEb968MbsJh8N2j5FI5KSV7Pn5ufr9vkkHYHdgXUKhkPL5vIbDoUql0smatFotlUolRSIRdTodY0Txpd1uZ3IFAguJC3sKEBEwQ6GQXr58qXa7rVwuZ/IUfGW1WhnjRcIVDh+7R43HY9N04yfD4dAGNDJhfjabqVwuW+CVpGKxqHg8rna7bQlKNBrVJ598os1mo16vp6urK7XbbdXrdXuOwWCgb775Rn/xF39hNQisTzKZtLUl0FLAjwxHknq9nsliqtWqJcj1et0YU5IralHoAkYSyV7B6kqy/ez1epYsk4TSoY5YSVxC306C4wb/4XB4kqxLj6cN/H/u0z3NYl8BFP/68MvHSB8jfYz0MdLHyD9+jHwvDnwgbvzoFQweO81wo9wQN0ExGjdMcOQImTd3iiuRCkiPGlS3MxK/yzEhR7eTycSMMJ1O29vz4XCw43wc5HA42EYQkGCsSCy4l6urK0UiEXW7XQUCAWPC2GwAYzKZWEHqbrfTZDLRYrGwt2QCEs+USqV0OBys6JBiXdgbGJJkMmngx7E6TABHxJ7nqd1u232z9tvtsYUpQZFOQBzv4pyTycSO1zkmJ1jDCuCAq9VKzWZTlUrFiiFd3fdsNlMsFrMgxB7AiHEkTlee/X5vXXE4Bsem4vG4JRrUDpAosH6wqLB/2+3WNMwEX4qiAVh09Tc3N6rVahqPx6pUKhoMBsYCEoRTqdQJu8hawSoj14hEjh20wuGwvv32W5ul0uv1bO8AbdYzm81awMJhSWiQAMDm0lkLCQMF0NfX18ZWss6w6Pf398ZwSlK73TbWlQQEGU29XtdgMLC/p2C43W6bJGC32534HZ2h8OVA4KgJl2TsErKUHzKiyF54lu12a0XHnufpyy+/1OXl5UlNSTweV7lctkBKzQXtpwEO/IaEFdYKaRMyiXw+r0KhYPuLLAj/JUFB3gCQDQYD+9nxeKxCoWCSCNc/pUemjhbgyLw8z7OkZrPZWCevd+/eGdhiY4VCwZJoGHXAmAQb8HBPBrARYo9bjE5C63meSZMAZxJuLlcW518ffvkY6WOkj5E+RvoY+cePke874frJdCVv9Hw5X0rgiceP/fsxtHA4fFL8hhaTFo+bzcYmuANEMDy8US6XS83nc02nU+sMwx8cij8UqmL8fC/BDHDi+J6jeN6GuUfP84x14PsZPscch1qtZmwimnSCz2q1siN2GCOcU5JpgmEwuUee0y1CjsVi1smII1TYKYylVCqZQ9XrdRWLRe33e5s0z9F+Nps1XWqz2VS32zXtOse/sIi0hB0OhxZ0eSYSMZgAijBhAQEIin5Ho9GJYxeLRS0WixMGDobOPXInWMAC7XY7KzR2kweY30QiYayIy+xhV25gJDABWNw3ml2KMAuFgoF6Mpk0vTXyDwqL2UuSCuyQYZ7L5dK03zDHrL3nebq9vdVkMtF4PLbaDJKSVqtlARzNOvrn2WxmgDiZTNRsNq0ugGder9emYUf3T7AaDof2mclkUtls1toUs4ZnZ2fK5/O2vr1ezz5vMpnom2++sUJeJDzVatWYM2owYN5JnphPMhgMVK/Xbb5LNpu1IZ6wn0hAgsGg+WkikdDl5aUxc7DAMH/dbtcSDcAsnU7r/PzcAIrkDTB0AVQ6MrH8/uFw0IsXL6yltBuvAArP80ziQLINEMZiMWux2+/3TVdOC9/D4aBMJmM1IiQd7gDZ3e7YlY1YQJwgDnNy4jZf4PQEf+CUBFsHCInBMJh+DdeHXz5G+hjpY6SPkT5GfhwY+b5TruAXX3zxk8Dkn/7pn75ggBnMHBtAkMK5uEneKtlsWBGOBQlM7gPzILyJEkgJ0vl83mQMvB2PRiN78+etGeBwtdmweLz1suDMJSDwEvgJHmjTXdaEokfaWwYCAZMBwAJJjzpnEhi07ARmQCiTySiTydjbdSqVsueHXQyFQsaixGIxY7MoCCbwEiBgVzESl7kIBALW9QomkaLZWCxmDAwzKiKRiMbjsQ6Hw4lsJhqNGnuSTCatCxXO4nnHLjYEQD6b42QYNoIcrWphG13ZzXA4VCAQULFYPAENEp3D4WDaX0kG6DBLvV5PxWJRuVxOkvT69WsrEKZugoCCVh5WiPtIJpMaDodqNpuKxWJ69eqVPv30U5PvBINBG655dnZmiRNgRTJAAEun0ydtmNkbggzJBUfu3AcBh+N7pBV3d3emt5aOLHq/31exWNR0Oj2RqkSjxzkx+BdzNgCdWOzYVQmQA0RJJGCW0JOjgyfZ5Fk2m41Go5Gi0agqlYpJIv7jP/5D1WpVhUJBL1++1OFwUL1el3Q88o9Go8pkMrq/vzfAS6VSSqfTtl7D4VCdTueEgSwUCrq9vT2JQ9RNPDw8SJLFLFhSZC74P3KIwWBgMSqbzVq9R7lcNvkEtkJCzeBJYgxMtSQlEgnrltTtdnV+fq5YLKZWqyXpKNtBboVPE1OxdXybhAHb5xJGow0AACAASURBVGQKXyJuwfDDnLogg5/D9AEiwWBQw+Hw4Ysvvvi/Pwk4PqLLx0gfI32M9DHSx8iPAyOn06n+4R/+4f/8GBb85Beuf/zHf/yCzizb7dYM8XB4nIlBMOdBARxkDev12tqnSrIFQLMJE8Pi8SYsyQIwbEosFrMA6R6ZU8BKa1AK4jhCxYlc9o42tXxeo9Ew5iKXyxnoobuVZKwjhkNrVe49FAqZJp23cIyYt+3xeKybmxul02nVajUzAroNuVp/3v453sUwOcqG3QS8cSAkJgBjoVA4YRtZ/0DgsZBxv9+bzh+Zw3A41Gq1MtbFfVb2H408wL7dbpVMJjWdTk3ywD5Go1GTHOA0JBDT6dSkG7B/SCNIDAi0sA8E6PF4bEDJ/fHs2FmhULACS5gxSQZksC6ed2z/S6DgqLzf7+v8/FwPDw/2mYBnrVZTMpnUbDZTNps1lm8wGGizOXbjIiGiDgHwZF2q1apJKKrVqjzPMzaU+0Vig7SIgtU3b97o66+/Nh03IBAMHgt0qSeAzZFk907XKPxTkrHpodDjsFJ8iQTRZev7/b5ub2/V6/XMD/BPYgRBnXoIwHI+n5s8BjA9Pz9XPp+3gmW3i9doNDLmOJvN6uuvv9Z0OtX19bXZADIVZp7AHN/f32u9Xpu0CCa+0+lIkg2c9DxPzWbT/PvNmzcqFAqSjgwYrXeRE8GSuh208AEkWN1u1yQXAAn1FIAKCZord2DNiWuu9IE4QTwOhUI2y4X/xk/ZQyQ7brEw/x4KhTQYDPwXrg+4fIz0MdLHSB8jfYz8ODByNpv9771w/fM///MXbCgMCawQgZ+3Q96sYY94WI5DM5mMMVO8XfNwBCccmOCx3+8NkFzQgenbbrfWwjQYDNpCo0lnc0KhkLF1FCkTzFKplE3ZpnsTAQ4mgntGDw57RxEtDpPP55VKpSwYwcJwvAozlMvlbE1gABOJhEkCQqGQBX3WAHYHMCQIYWi0U6UWAO04xk7A5YiceQuVSsX07ExsDwQCdryLRh9mgn2BWUDTDuvXaDTUbDb1+eefKxR6nJ2Bg1B/AOBSFMt6cxT85MkTffXVVxoMBnrx4oU5RTD4OBOGQlrkONgSTsVaNBoN25to9DinhWC2Xq9Nv45tkQAsl0uVy2X7u2KxqEAgoPPzc3333Xe2vyQnJEp8dywWM3uZzWbGsNHOtVar6fr62hhhZADlctnAcLc7FhWz19Pp1EA7Go3a997e3podUQOCjAV5TbvdNhnDdru1I3YXyAEVjvUBuGKxaEweQxmJBYBXLBZTo9Ew5hRWmELpRqOhdDqt3/zmN6b37/f7xrJiF9RAvHv3zmoQYKhZJ+nYvvjVq1d6/vy5MW0vXrwwZm21OrZ7rtVq1q0Ilpd7XywWarVaZisAKcw9a0hgpwuWm4DwfdglJwG73c7kF9QwYH8UmzcaDWO7+T2YcgAdm3flX7DjxDvshPoEfAVfZZ8AEPwM4EF7779wfdjlY6SPkT5G+hjpY+THgZHD4fB/94ULvS6buNls7NgNyQDaSR4Eto+HjMfj5sCJRMI6DcViMWUyGTNEpovjLLy5EsRcVgvAgNlzfwbncBMJgHA0GtmmuW1fYcFYfIyBzjtsgtutZj6f28T3UChkk+ORGMC+tFotY7lYD9aUo33a1UajUWMCgsGgafAJ7LAGrBl6djTeGBh1AMzT4DspHu33+zb9nc4wdBuC0SwWi6brhe1brVZW5EmAzOfzmkwmJisoFArmtNgGTFAoFLLvwTEkGeuHc6xWx0nlg8HAgBhJCoMZXRYStjabzZq9bTYblctljUYj3d/fm36aWge38xGsGBpzSXYMDwMci8X03XffGVjf3d2ZLTCpPZPJGKgnEgk7kqaNL1rt5XKpN2/eaDweq16vq9vtajKZ2IT6QCCgh4cHFQoFNZtNJRIJTadT9Xo9u9/ZbGZsaDqdNukDrW4pzK3X63rz5o0xm/yhExHJIK2NSWpYRzoiMTMH0MNHYGFTqZQymYxJi0jCYDaj0ajevXuni4sLffvtt0qlUnr27NlJjQbM7Hq9tkQUzT86f5hMmN1isajhcKjvv/9elUrFurKtViuTfrhgDtuPP0ajUVtzWNV4PG6nAXQvwz632+MASfYV0OBnaSBAVzFmk1CU3mg09Mtf/lJffvml7u7ujHUlIUJHLsmAiv/P3mezWWvXCyNHPCYOcmJA3ACk8DmSVSRDwWBQ/X7ff+H6gMvHSB8jfYz0MdLHyI8DI6fTqX7961//6AvXT+5S6LI0sVjM9Khoi1l4NNJoaTnWdAMKQR1GZzgcql6vG+jwXbvd7qSYkM5LgUDAjpkJuiwGzrDf7621KG/Truad4342jGNXnIHPYeI4xZMAHIwEzjKbzSxAsxmwNv1+X+v12gr/OGIl+PDcvJkfDo+tg3e7na33fr83jTGgF4s9DuPD+VhrjC6VSpkkBLYDAx0MBqbvBZTQS8NolEolhUIhm7/BcfVms1Emk9F4PLbE4OHhQd1uV9lsVp9++qkSiYRev35t3+sekXueZ+tOIS3Bgha7BOLFYqHxeGwdoNbrtQqFgjExzHxARuDq3Ukm2He67MA+wszAKt/c3OjZs2darVZqt9t2XN/pdEyvT6CnEDMcDhs4UwtB0Ad4sVXWIRwO22fP53Pd3t5ancTTp0/1L//yL2q1Wrq6urLCX9hXiorpFhYKhXR/f281GwRvAJvOVDc3N8ZWsx6AtMtA3d3dmZQAm7y6ulIgEFCz2TTbhJ07HA7qdDoKh8MajUYWxAAohi92u13F43Fj5drttl68eGGAhz0AxtJROsBQ0Ldv3+rs7Mx8d7VamQ2SRCG7uLu704sXL2yAIjUEJHXMDqG+gz3L5XK6u7szAEKeVCwWNZvNDMyGw6HNBVmtjm2tSbIpXKd2gKJq2D5suVKpaDQaqdfrKZfLWXtdN9CTnLsAKj12O1sulxaXXaYXMHF168RUQMkFbFhx4op/fdjlY6SPkT5G+hjpY+THgZGc5P7Y9ZNfuHBQijApcHVBBYbODeRu0Sk36R7R8raIJICBcWiiYeZg13Ac2Lper2daeUCIoj4kE7A4gA2B1G1/S/ck3sxhkFh8tJ8EXBgKz/PsSL5QKKhQKJhulqNhhqwx04BNDAaD1nkJvTRGsd1u/1vhNAA7mUxspgr3DkhyzMs6BoNBtVotM0hkHvx/z/N0cXFx0gkG3TfrSoteAnO329V6vbYhi2iOYR0Xi4W++eYb/d3f/Z0kqdvtajQaWXccdLnSqSZ8Npup3W5rt9vp/PxcodCx4w/MyPX1tRUWD4dDY6/ocIVzsQ6sD8fewWDQ2pju93tVKhXrOkWBZyKRMKAkeLtH0dgmTE8wGNT9/b2xab/4xS9MzkOBdSaTsT0iiUHj//r1a5PXAOTY2V//9V+r1+up2+3q4uLCpEB0JoIlfPfunU20j8fjevfundbrta6urkx+wBrd39+rWq2q1+tJkkqlkklF2IftdmvDMV0Wnn/bbDY6OzszWQOsaSqVUi6XM605bPFut1O9XreaCD6PjkPSEVy73a7tHRdyqGQyqfl8rlarpf1+r6urKysCpj6DRGK1Wunq6spAB3kBgzmRZyyXS93d3enJkyfGIEuyltZIarAVpE8kh8QXhjsi4XDlDbvdzvYL9hswSSaTymQyWq1WqlarOjs7036/N98l2UQihg8TE0jIWT/XzhnACfDANOLfzGEiEaLBgSQ73fCvD7t8jPQx0sdIHyN9jPw4MJIXsh+7fvILFwYgPb5Nonl0H5y/R25QLpft2BVpBTILgIGe+xzzSbJCNbc4EcYHxhC2jTdTz/OMFYnFYsYIciSK9GI4HJ44NcBVKBQUDodVLpft7Xk8HqtcLqvf7xvLRzEqR5NozqvVqvL5vBXicl8wNMvl0sD18vJS7Xbb2Ad0+TAXsHbhcNicMhQKqd1uq9frqVKpWHBAesLMBXS87MF6vdZwODQAp/sT7BwBYLFYaDKZqFQqmZY6EolYoWQ8Hrd5DJJOjsxvb29tfkUmk1Gz2VS/31cymVSv1zMNLIFZOgLJy5cvJR1ZukKhYKwsGlr2hCALA4OdcbTNTJXpdKpisWjPmEgk9Itf/ELNZlPffPONzs/Pjbk7HA7KZrM2rR0GlWBIsgDYz2Yz69g0m820Xq+1XC51eXlpPx8IBCxpePv2re0bbAprGovFLMAwIBLAXC6XJ52V+FxkCa7OPJ1OKxgM6vb21oIkTC1rDIBgU65tDYdDCyYERQCWNYQ5putVIpEwSQx+HAwG1el0NJ1OrRC1Wq2eFMw2m01LOLHzSOTYFrlSqajdbuv29lZ/9Vd/pc1mo/l8rlwup06nYzUwz54902KxUK/Xs+93ExIYZtYE6Qygg5yBGAETjB+hiz87O1O321Wz2dR6vVa9XreEmmLnZDKpXC5nDDKfBVsHiOGD1Hcg3drtdjaThQSSpIXYStLuAgfJCiwhLC0xy/0+1oKkGUBxWXxOXfg7ZGj+9WGXj5E+RvoY6WOkj5EfB0a+74XrZ2kLz/wKHBl2yC2cBEh4q4f9ouuPWwDKAwIEu93jDBM6iVCYy3+z8OFw2BZhtVoZGwIoocfleJp7xDnQ8UqytpWweQS8QODYNpL2rDCAsDDcK+0ss9msKpWKJJnem+/M5/NqNBparVa6uLiQJGttyWcGg0HTv8MMuEwlGnnYU6Ql4XDY5rCwN67OFokABciwCGj3YfNgALmXdrttxaSDwUCJRMKSBtgrtNEYHyDCZ9DGt1arWcHjdrs9aW0rHVmkdrttQYvi8+12awMLYY45MqboNhKJmKab9ZtMJioUCqYd3myOM0uwi+l0qlqtdlL3EIkcO9u4cgQGBabTaWuLCsCuVsehl7RpBYhgwsbjsc20oD2xJJudQgcfGKZ3796pUqmoVqtZa1YYKvyMOT2AArIHkjXYOiQMBGX8jUQIqQRJjOc9FlNjv64kgZ/ZbrfmYyQN0WhU6XRau93OkpBkMmm/EwqF1O12LbmCBVwsForH41qtVlbUTXAOhUKWaGKvs9lMuVxOq9XKfAtmi1oUGKh8Pm9SApIDPo8C+v3+OK+FomvX5+7u7iwgwxBjC9g18dDVeCPT4Ts4KWIdALRCoWCtl0ejkbG7rmwKmQbJE3sI4OBjMNeAnZvIIgfiHtxiYJJxPhefxq5Go5Ffw/UBl4+RPkb6GOljpI+RHwdGjsfj/7Fpxk8efMwiE/wojCRw43wYGhO7J5OJHTMTsDgODQaD9mAwOzAEaGVhPXB4zzt2d0JTDCOIocCwhcNh06QzNHA2m9kbNW/VGDWOSeHmcrk0EOQoNBAI2KyGm5sbY6dKpZIymYwFBhgLju5hDHB4z/PUbreNVYBhjEQiVsAIWKClpnsLQEbgSKfTNgiOAOwaUDqdNolQPB43UEZ6AlMpyYpCkSm4jvH06VP7DjpGeZ6nfr+vxWJh2vx0Oq3xeKxQKKTBYKDBYKD5fK5Go6FwOKxOp6OvvvpKv/3tb/Wf//mfxrawt61Wy+QVAM5gMLA9Go/H6vV61lJUOs4KcWsg3D2GITo7O9Pz58+13W51cXGhfD6vfr9vbBeBknkaDOvkvgDyTqdjQzBns5nS6bTu7u6seNo9Jnd9BnsmWLt+0Ww2Lch+8803ikQiury8VCqV0uvXr61jFlIRSSf1H9QnoOlfLpf63e9+ZwGcBIJkYb8/1jlgS/g39RTRaNTaAk+nU2PyYJg2m43VZ8CwI7HIZDIql8tma7PZzFq8kvy4Re7MfWm325pMJiqXy4pGo8bS0llqt9tZ3chqtVKr1TJbJmCzFkiIYDuZVE/QzGaz5tMkJeVyWZeXlwZugEsoFFKtVtN6vVYul1M+n7fiW7TwfDfSmslkYrGGf+dZ6vW6DQulMJe5I4AZTDInF4ApSboLIK7mnriIfQAeMIAu6LhsNacYyLcAHP/6sMvHSB8jfYz0MdLHyI8DI//Xa7h4MB4AFonAxBsgb+W8ZbpSC9gm92iOByUAI4Hg39wCWRyVzeQ70Q3D8HmeZ8fdBCxYEt5qaePJfaAj3e12NqU8FAopl8up1WqdDHFkUxlUyLE4QYFuPQQSWAVa4w6HQ9OD02YXEIH1oIgQPTjfORgMNB6PVa1WTf++2+1MMxwOh40ZgGGESZVkgYG9QZuPjhr20J2Mjv6VgYYYJEkARZ/RaFTX19eKxWJ2XI2GdzQamVaeDjYUQk4mE/V6PZ2dnelwOOjrr79WKBTSZ599puvra7MJumYNBgM9efLEBvk9efJElUrF9hSAIDBwv4lEwlgmJDKAFBIAgmYoFLJCVWyaeRG1Wk2dTkfZbNb2IpfL2WwWlxHD9gh+sVjMClbT6bQFsOfPn9sRPbrl4XCoTz/99CSIELDwPexfksbjsS4uLixo4A/UDgAodCxyEzWAhPtmP10myvM8G9C4Wq10eXlpQRy/6Ha75gvEA4DAHcgoyYpr0+m0zThhHog7Z6dYLJqtD4dDtdtt26v/Oo0xuQD+O5/P9fLlS2WzWaVSKTUaDZNdzGYzSyBc+YbnHWtkrq+vrQUuvo1Pu+2tKSpnjyOR45wV9gpmH0kU7DCdxZAyeZ5njCfNBTjFgOGGwSXu0BXK9W+YdJcZJ267MQB/4MLeia/EXv/6wy8fI32M9DHSx0gfIz8OjOTU7seun/zCxYf/UI/uGiPMGSDAcSTHchx5wx7xJxAIWPtPjuJxhO12a2/RPCiLgeHwGbAhuVzOFs7zjl1v0LtT3Mtib7fHYYHFYlGHw8G6qRSLRTWbTRWLRS0WC5sBIulktgBFdzCazJTgqBOwggGJRCLWtpQ1pFXper024OTNHlBkXQCnxWKhdrttgx1Zh1qtdjJ93gVm3tIpPKZTzeFwsOfEaGF5kCS4/991EtgQ2AUKOmFACSBPnz5Vo9Gwgt/hcGgOt9vtdHt7q/l8rs8++0zL5VLT6VRnZ2c25A5pTLPZ1OXlpRXPDodDu3eKG3HGcDisarVqz0jtwnQ6ta5Sq9XKAj7MEgENJ4Udpr5hsVjo/PzcmFHkFIFAQOPx2AKo2x1nOp1aQkD703w+r9evX+uTTz4xOUs2m1W73bZ9+OUvf3nSZYt9owaCgIGsgGGSMFbYEQwPyQ5SF1gn/A0GCqaIC/sMh8PGxi2XS52dnRmAslbj8dhsnWN9kj3WWjoG3/l8bl2ZIpGIhsOhIpGISqWSdaZCXpDNZo2JAoSRrbD3+/1e3W7XJByr1Uq3t7cql8vGYC8WC0sqYDK32635MzOJNpuN2TYtcVkLGDyCsisFoaaAJJrYM5lMTtaU74e9Jx6w9/gbwEPcdcGYmITEwi2OB7zcmMn3cvFzJPfILZCZ+dcffvkY6WOkj5E+RvoY+XFgJD/7Y9fP0jTDvQnYOm5A0gnDxjE7b9ewDjwsxkcXGY7+OFLF0DFwjvcAERYABsINeDg9b+Toa3FgF4jQ+sICUhzJ50+nUyWTSRWLRfV6PWN2eF4MaDKZaLFYKJ1Oq1AoaLlcnoAfnZHQmsIqJZNJWxOe02UWfhjEYWOSyaQGg8HJEbAbaF3tLwEFQOHzpaNDwyKyP26QJxAC6tvt1toAoxdHrxsIBAysK5WKPWM+n1en01EgENDl5aUlFTA6BN1PP/1U0WhUNzc3isfjVrz79u1bed6x+9fV1ZUuLi4UCBznbsznc5XLZc3nc3U6HV1cXKhSqRgzgQQACYV0BA26MAHMrJObyNDKl8QB+9tut2o2myqVSprP5ydMLQGNImIc02W6CQRnZ2d238Vi0SQfDw8PVkj/5MkTAxIuZsaEQiGbYYNcYzQaabVamWSIAHZxcWEgPZlMTIqErAH7cH0L0MP3CDTIF0h2YEUlmYYcFmy3250w3uw1a0IiSYxBbkOheSj0OEiUdr9IiiSZbMbzPGUyGd3d3WkwGCgej6tWq6lQKGgwGOji4kLVatX2nuSHKxKJ2LqRWDIYMRg81nu0223lcjkDgIeHhxPGGI2+2xEOeQm6fthqkkZqDrCNUChkcYBk2gUs5BfEOOwgFAqdJN0k8MQ9bN99CXBjNVKLw+Fgpyj+9WGXj5E+RvoY6WOkj5EfB0a+74XrJ9dw4XgEOW7MdRTpcb4ChsbP8IAEJJgit6e+y3CgGyWA8geGCLDYbDY2l4Hgh/4d5oCAcDgcTNONVjgQCFi70sFgYOzYdrtVsVi0trfu5vHWzZ/pdKput6ter2eaT4weY9rv99YhyC1GzGazxioSMDGi1WplcgTkHDwXMxpoq3s4HIt9aduKk3qeZ87AsffhcFC/37euRpPJxKaquyCIE/f7ffsOHBc2ZbFYqN/vK5PJKJfLKZ1O25R1prpzPF2r1VSv1xUMBvWLX/xCxWJRT5480dOnT3V9fa3r62tzOuQVT58+tdkLz549069+9SsLVOl0WrVazdgSnA12ikAAa8e6ISHheBy2DSCllgHbIwDBMMOmAQrL5XEqPO1cSY7wDYJVMpm0+0WGQV1Cp9OxIYisF4kYdj8YDLTdbq09a7fblfTIZBGA8UXkPd1uV4PBwJ6PhAZGjUBG8TCFr0h4SLQAHTof1Wq1k8Sg1Wpps9kYE4etSDI23WXvFouFsZyuJhpQcxs4uFKG0Wh0MuuH9aAF8pMnT07kBCQMxC7ihvQoG+D78V0A2gVXZDPEkXA4bN2q0PnD/BLTYGvT6bTVPeCXbnLuSheQouCPsKIku/x/Vy4GWPzwv2E1sUtiATGb+ATjzuXXcH345WOkj5E+RvoY6WPkx4GR7MWPXT9LDRdOCLvhvmFiOO5RG4GdmyQoI2tgUTgO5CgR+QBSA4JYOp1WJpM56eZE8ME42EC06DgaDo4z0VGFRWUOAwwCGnf3eBO9sud5Bo6SrFgX1pFja+4Hg85kMieaeYyQ9QFMJBnDzHNLj4YBGwdTMJ1OTc9OsSwF0ejQYSsWi4V1XgKwkJoQzFhzSVYYDRtIMMU4YRvC4bANtORIv16vaz6fmwyAtYjFYtYhZ7/f27R5WMVqtSpJ6vf7Go1Guri4sK5Fb9++Va/XMyCuVqsGzgRSnh9bGwwGxsLCcrG/m83GuiZJMhaOZ2MtdrudtX5lKvxwOLSWrOwpwQc2hyCDfR4OB41GI9Nbf/XVV0okEiqVShZQ8vm8SXW4z8lkYoMdkUIg++B+a7XaSQJF8J/P5ybBwF+QGriyIZIaEhrpsX4Ev/c8z0AaeZTLnqZSKUuCsCuYYP7/fr+3+0bTDXsKkw6zBji5yVUmk1Emk9FisdBgMFChULC5M7CRDHqUZKA0n8+tzTDABJjDisF6kTgGAgENh0Nls1klk0lJj6w1nbL2+721J2ZtGfhIA4Fut2ssPMCIXAdmkmSFpB1fwiddlo1nWK1WJyckbjKOPRNb2L9QKGQsKs8Jm0vS7L9wffjlY6SPkT5G+hjpY+THgZHve+H6WboUwuAQZHkrdP/ODb4uqIRCIetQxCLxZj4cDu0Ij4CNkbGBvGXT3QRgw3FhBnjjha1zu6RQhErACAQCSiQSymQydl+TyUTD4VDL5dKChzv8kaDF/eEoBBg2B0nB4XCUgXD8zHrB6LlOC5ixtjis5x3nUMBQATpoonE+wAWWIRAIWFtT1oz7x4gPh4PpcXFY5pH8UDqBMXvecaZJpVLRZ599prOzM9t/l0WCvZ3P59a5JhQ6drSJx+Oq1+vK5/PmGHRZIlFBosLfA4LhcNiKRgEKl9lApgEIog+ez+fWkcnzPBuwCbvC50jH4DmbzYxtQu/uau6n06mGw6FpkWE7sQX2j4AQCoWMCSVoIeXodDqKRI4zQujQwwwaHB1NPlp8ZtcMBgOtVis1Gg11Oh0LumisYf/o5AXjSjJFMKdwlwQQmQx+sN/vNR6PzSYDgePcE4rHYTnn87nZLM/E/4fdd5lN/JbfpR3saDSyImHsUXqUwIzHYyumz+fzqtVqxgQSjEmqJpOJxuOxlsvj/BbaFyOVQXqCtj6VSlniSYIA0ynJ1oF9JDZiP9TJwIzv98dC5Hw+r9FoZHNOaEDAeuJvgClr5NZGSI+nJG7yDhuHP3GRCJBsATCxWOxkFhGxBImZf33Y5WOkj5E+RvoY6WPkx4GR7onbD6+f/MJFEJQej/bcxXN/jsBIgKEQ1S1o49/c4kY07bz1M2gOxyRIwDawQO6m49AEThZnuVxqtzt2KMFZCJIYSCAQ0Hw+V6/XM0NFY+uyG6wFwQYGcDKZ6OXLlzbjgcLc6XRqGzkYDKzgkWPbSOSxexPAi/HQsrfT6WixWCibzSoSidj3EqzRTcOOsUdokOk0xHE44MsQPpgEF7C5H6Z9szeSbF8BSY6TWevJZGL34na+cTsURaNRk7LAihF0Q6FjxxuCVDabVaFQUKFQMPCnm4/LvLAXMBmwZnTrYX33+2M3J7edLnIHQMhNSGA2SARWq+NEeEBgOp1aouJKHprNpnq9niUZJAOLxbGd8vn5uSqVig6Hg9rttskTSqWSttutut2uAoGATZynQBWbpUuS53n69ttvT5JlWCTstN1uazgcqtvtmmYasCYJc4/psQcCMOuQSCRsiORsNtObN2+sboFEMZFIqFgsmnQgk8mYZAC5CYkbbDzH9sHgsSal2+2q0Wjo7du3lgQhr8D/DoeDddKq1WoWn9g3kuD1eq1UKmVATzBer9cajUaW1OGHJED5fF6FQuGksBl9PKw5fkZ8wo6Wy6UxrsgodrvdSQLEUE6XNQbgkfngc6wNJwjcL/YeiUTMh1gHkm0AE5tGIkLcI7EEkN6nT/evH798jPQx0sdIHyN9jPw4MPJ910+WFOIE3IhbaMtbHxdH1vwdi8kNo2d1j+9YSLdgGLYNwAiFQhoOh/aWi96UxXC14wRNDA92izdT3nKlx45D7vEl2lYYGAIETNgP5R4wFqPRyAY6ugV5MJswKhQH9N105gAAIABJREFUM4gOIHODK4ELBojAEQgEdH9/r1gspmq1aoWL1BAAHrAUAIEbPDHOcrmszWaj6XRqjo9kAxClwJHjXBe0kR70+31jgUKhkLW2BcSQDywWC7sPOuoANDC0DNlEU813si/syWw2U7lc1mw2M8djT7BTHIQ9R4cOwwmLCjvJ8yID2e/3J0Afi8Vs78PhYyvZ29tbZbNZ9ft9u0905NjHdDpVuVxWLpfTbDYzm9psHlu6kryg618sFkqlUhqNRjaYkIL0SOQ4vX6xWKhSqRioIc+gDgNpistuEkhJGpjrQVBivoxbEwKDnUgklEqlNB6PTW7w8PCgh4cHvXjxQul0Wr1ez+oEXHZouz12e2JN+PvFYmGSBVi/29tbdTod02ojfSC5cxlJgAAGELulE5oroSDQ49u5XM4kGUgw6A4Ga0rswY4J5iSGhULBmEWSOGIktSXpdFrpdFrdbtfkQNgAn4vv8V3EC8/zNBwOJR2ZZvelaLlcWqwhaQc83RcAYswP/Yh9cGON/8L1/3b5GOljpI+RPkb6GPlxYOT7rveecAUCgb8LBAL5937CfwUfZAa8bbJx7pEfTBfMHkGfv0MLzFurKxtgIfhZAgGMBcfc6K4JLMFg0DrLIFtgg2HBkAHA/nFciCYULTjBHN0nhkrAwVAIVhRARqNRpVIpPTw82DogvQiFQjYZHXYLVkaSHeVOJhN7y+bfcABJarVaur+/12g00mw20+3trd0/P9toNBQIBEyPTVAE5HjWarWqbDZrwZG3fQKbuw+u3ng2m6nX69nkegIuTOxisTD2iePv+/t7Y4UIWAxG5Lso8OTIWJK1r0WSQHKBsyHdWK1WtgYwiNQIYFu3t7fGujDV3vM8TadT9ft9Y5az2ayur6+Vy+UM5GAmAWbWiWN99+ganXW5XFY6nbauX7Cvl5eXyufzFiiur6/15MkTnZ2d6erqylin7fbYoWw4HBrgEsg4ci8Wi4pEjoNUKZz2PM+AjOevVCrGHtJZCQmGy0BRL4CUZDab2QBS1mi1Wun6+lqSjGF99+6dJSTr9XFuRq/XU6PR0GAwsAQQv41Gj8Ma8/m8gSCsWigU0u3trQU/mDBiDvsKExeNRs1Hut2uzf1hr2GpSICoF2F9iC3EEZ4bG2s2mxqPx/K84xDT3W6ny8tL8zcADHskqBPMg8GgCoWC7WexWDSboMgbeQc2BFMaDodNrkJzgslkYkBOYkeiACvnsuouC80zwkADSsQEEgf/Or18jPQx0sdIHyN9jPQx8g/ByPeecB0Oh//vvb+tRwaOQI1x8Pcui8dbZTKZNCYNp+ft0T3yJ3hjGLBCvEViUAQcmDpJ5rjuW7wLMDBt7rFpKHTsDsRnwzb+11ookUjY4uJItKXEEfk8noHNKRaLenh4UDabtUBbrVZPjt3RINNxqlwua7lcWsDIZDIaDAa2qRyJEgxYW4zLlSHwp9fr2RwNWInVaqVSqWSMFdr//f44aR3NMBps1m63Ow49JLAScJE8BALH2RowXhQIwyBkMhn1+321Wi0bbpdKpczhYLBwmkKhoGazqd1up/v7e3mep2fPnqlcLhtrhPNyTMze4kx8N4zjdrvV+fm5scPcN44MG8T9MEMF2yeB8DzP5ogkk0ljnCVZq1eeYbvd6u7uzpgVgnw+n1c6nVYul9P9/b263a7ZF22MWadkMql+v6/pdKpYLKZEIqFms2m+4RYaI1+heDgSiRj4c4zuSpIOh4NpwPl+WMNIJGIFzcgaMpmMCoWC2u22isWiBc5AIKByuaxms6knT54ok8mYFGa/3+urr75SKpXSixcvlMvlTIcuHQM57CKymPl8bqDEz7jsMQykGzNWq5XOzs70N3/zN7q5udHd3Z3NEzkcDpZoplIpdbtd+z0SGQqy8X/2ACCnuxMJI7bi1kRQg0PSuVwujY3DLohbxB5sCzaW54LZ45SEuMeJBAkUnwlT6YIDsZXkhNjBvmE3w+HQElISS5oQ+Nfx8jHSx0gfI32M9DHSx0gwklj6Y9fP0qWQ4lICnLtIMHCwdPwOBsGNsnme56lSqcjzPOuu4z6Iq8ENBB7nf8AgRqNRcw6OwUulkrUiBXDQJLtH8rALvMUzhyGRSJjjrddrJZNJ63YTDodPikAJ9IvFQtvt1gbItdttXV9fy/O8kyLY6XSqJ0+eGBDt93tjYq6urqwQk6N85m9wvxjq4XCwjkGuLAXtLDIPgJtuOhSfcgTLoMjD4VhMmc1m1Wg0lEwmbc5COBxWq9UyQOBoudfr2T4dDgf1ej0bJsgx+fPnz5VMJrVaHec2VCoV01q7c2Emk4kVcAJcgFEgENAvf/lLs69QKKRKpaL1eq2HhwflcjkDymQyqeFwaNpcV5M7mUysNqFUKqlararZbKpcLhvzTOtg5kUAEoBMOp02NpLjb5IoPn86ner58+f6/vvvbfYEdgX446S9Xk/T6VSFQsGKlkejkbLZrEkTyuWysdXr9dr02Bzvz2YzvX79Ws+fPzcmx9XW53I5kxcxt+T+/t5sEJ0/7DLBFJ8jAD99+tSYR9i1dDqth4cHO/r//PPPreUz+wvjXavV1O12bVp8o9FQOp02dgmpBFIPWFrYNfwFGyHZJPAj43j79q3q9brVLSALgcHCzhKJhNbrtc2ZwRdgltHQj0YjzedzlUolY29JRpBcIFfY7XYGwvv9sXCaGMJ3LZfHIZjY4ng81mZz7ACGhAkZlXQ6C8eVbQE4JBCAIqxyKpWypJ6kipMO1sFdU0n2MuAmJ/71YZePkT5G+hjpY6SPkT5G/uQXLjYSbawke0sFUHgDd99AXQ0mIBKJREwnjDYTtoIrEokYaycdJQ8ciRNoMKLNZmOBjMUgSOTzeXtjRZMaCoWM6Umn08buIGFA70nBHyxGNps1lg+5BZ9PEAYsJalSqejNmzfa7Xb2/KlUSvf39+r3+xZsU6mUMZ3okUOhkGnZV6tjO1Dmk2AgOG2v11OlUjEjgG2jG9JgMFC/31ehUDCNda/XM+YB8BiNRqZ1DgQCyuVyqtVqxtzhxLBKs9lM0+nUjp7ptPPll19ah6hcLmcGnMvl9PDwoGfPnhk7CyuWzWaNmeHInkCBLRDsAZVSqXTCvjJIkWN52A8A8ObmRn/7t39rbAXOAzDzbEhMCD7YoKv/pWUse7DfHwuMJ5OJ8vm82XK5XDZ7S6fTZnOr1Urff/+9Op2OXrx4YXUCJFRID0ajkR2jw7y62m5XVuN5xxao5XJZpVJJvV5P7XZbT58+1WazUaPRULFYtDWJx+OW0BCYpUctcyqVUi6XU6FQUKvVMhAGZNfrtdlyJpNRPp/Xw8ODZrOZMpmM7QfBOpfL6eXLl9YxCmkJwZC5MZPJxBhGghyAn0wmzXb5/Ewmo+VyqXfv3qnT6ej6+tokS8hNqAtg6CTJVygUsjoFNORII2hn+8knnxijO5lMlM1mtd8f2/ZOJhNJsq5X2PRsNrMkCJ8m/vEdi8XCYirBHrAOBoPG/mFLxB/kESS/SHUAa3f2EAw7unbYbsDYPT1xYz2A5l9/+OVjpI+RPkb6GOlj5MeBke+7fvIL1+FwMAkCR62e55mT2xf9lzPwBs7C8Ps4BsfOsHB0/MlkMmZogJRbREhQ5u1/vV4bQ4d2lU2DUeGNPRKJ2M+iS14ul8rlcrq5uTHjJsC4Cw+TwPEpC85bNPcXDB67St3d3SmZTFpRJ2/l8/lcDw8PFhgArUKhYIYJMPGcHJMSiEajka1hvV5Xp9OxLjKuVj0cDms4HKrdbms6ndrAxX6/bxpunAIQjkajJ606YXe4JxydoETHoVQqZTMm/vzP/9xAOxgMarFYKJPJmGym2+3a7AWO1inEZlAkjA4Bk+JW9MBIV2B8cBiYMgKEJJtBcXl5Kc/zDOCCwaCGw6F124nH4/addM7Z7XYajUb/jTXu9/v27OPxWPV6XYVCwRKJ/X5vwy3ZLxKt4XCo8XisbDarTqejf/u3f9Pf//3f61e/+pVevnx5krhNp1PV63VJsuP/aPTYOrXT6ahYLFrLZ1i5UCikt2/fKpFIGDNYKBTs+/P5vNbrtR2vA+L4RjQatdkwh8NBjUZD7XZb4XDYvi8cDqtSqej3v/+9FebiqwRrupiVSiXV63V7LrTy2Pd6vTb/LJVK+td//Vdls1ldXV3p66+/NuCHvUaGgfQNuUo2m9VgMDAGHnulCL1YLCqVSimbzRpzDMAgmXElQS4LttvtVK1W9fDwYMnFD2U5gB1+hZxCeuxyNB6Prc0zAEZyQBIEk0o8pZMb7DcAE4vFTGbBYNt+v3/SRpqTB9hJYtQPE39inX/9v18+RvoY6WOkj5E+RvoY+bO8cAEIkmxjYRw42nfZHd4KOdZjccLhsB3rRqOPE+U5LifYs6kwd7AGPLjboSeVSqnRaCiTyVig4Lt52wZUYOdgDikahJFwwZA/+/1eNzc3xnrAMnmeZwP6ttutFZGu12vV63XrWCPJjquz2axisZh1Z6HTFEDLmzZGmUql1Gq1FAgE1Gq1rOVmMBjUs2fPlMvlFA6HT0AdKQgBn+8kGBYKBUnSYDDQZDLRer3WkydPJMkYFIoR6X4DawqLFYvF1Ol0bE8oCOaZsIlCoWA6/nQ6bRpsSaYNHo1G9u9Mfd9sNjZXZTAYGDARsLEJOh/BmtCRCdtByx4IHFvJSjIZy36/15MnTzQcDk33TSE1rCv2yR7BWFYqFU0mEzuev76+1qtXr9TtdnV2dqZ2u63Ly0tVq1UDfJIc5DvlclnZbFa//e1v9fz5c9VqNZN50I2Jn93vj9r7fr9vx+9IiABOwC+bzRoD2mw2dXl5qVgsdvLcMDcwkW6RuyQD/0KhoKurK5MjEJxev35tLBuA/PLlS33yySfGJJ2fn1sB/3a7tY5cLnvL719eXloBfbvdtmJ9AjJae/TqJBgkK+v1Wvl83oplKU6XdBJg2+22arWaJb4kJN1u1+oVrq6uNBwObX9JVorFokleSqWSdWFyGd/9fm8ymGQyaQnEfn/sNkULXU4viK+e55nt7nY7O/lIp9M2h8llWgFnfC0cDuv8/NxkH67sgpkqxHAKwYljgJcb3/3rwy4fI32M9DHSx0gfIz8OjHyfCiT4xRdf/OHI8SPXP//zP3/BhvHmiTFyAzAtMFuwfOjYKUzj+Dwej58UxUYiEet4FAqFLNgS+NlEWJFgMKh8Pq9Y7Djsjina4XDYutuwAYCYe/zIkTrggRNxpFkoFEwPi2YZzSfAgzO4syXOzs4UDoeVzWYt8BDEJJksIxA4dmDJZDKq1+sn2mzuBTkBz3ZxcWFrh/4+HA5b4KAQ0GUdkJHQbYm94LhWOupTG42GgcFgMLBWqCQJzNGA4SEoLRYL21fPO85nSKVSevbsmRVFT6dT09yTfLjyE0AD1iiRSBjLst/v1Wq17HNub29VLpe1WCw0nU6t2BWmhv0GSCneff78ufb7vb755hsLOAAQ3akKhYIVfMJwbjYbO+JnfZPJpOr1+gmTg+RhMpmoWCyq0+nod7/7nem6CR7olgGMYrFoM0dggLbb43wRANTzPEuCWq2WYrGY6cvxQUBnPB7bXBhm0kwmE2Orut2udSqC8T4cHgvupUe5EmtHMvDq1Sv772+++caeDYYb2+L7iRmHw0Fv377VaDSyQBYKHQcfttttSdLFxYXu7+8Vj8d1dnZmnZTcmEGiiH9hi61WS8FgUGdnZ+p2u1bwTytgkgB8djAY6PLy0upPOJlIJpOqVqtWsE7tiXQMtAzDBIyojUG73u/3NR6PjbkjTrC/SDhWq5UFeII890ECBNCQPFEfhA8jj+H3aCMeDAZNrgGzKMl8mUR7v9/b2iyXS/MjgKnb7T588cUX//cnAcdHdPkY6WOkj5E+RvoY+XFg5Gw2069//ev/82NY8LOccKEVdt/waUMJ08YxOYwXb+0YLEd0nueZw6LTlmSOBDPosnCbzcZ03NFoVKPRyIr1Op2O6XoDgYC1xYTVCofDxlLQKcmVFvA8u93OnB6W5XA4FuFOp1NzbED1cDhYYILZQtogSZ9++qkVp15eXurm5sbe0NvttmmLJZnjAQLISSKRiHK5nDl+vV7Xw8ODSUtgzzD8/f44yG04HNoaEpiRcRQKBZslQWcZCn63260uLy9tLzBSjnLdPeZ3Q6HH+Sw4wGw2s7aaME/SkflttVr/TbpAYEHTTF3B4XAwNiSTyahWq2k6narVaikcDqter6tUKun8/NxYjk6no3A4rFqtJs87tohFv/zw8KA//dM/NakPDDAyjOvra00mE3NUbIej9kwmo263a0Wf7IHneQbQjUZDn332mer1ugUI6RikAVGKqJFbVKtVk6gAIvl83vTLsM2lUslkPSRwJBjRaNQYs/l8bp2k8B+O59frtabTqQ0X3W631nYWP4DdIvCsVit1Oh2Nx2N9/vnnxkySMFA8i+Tkyy+/lOd5+vTTT1UsFi0WdLtdYyipiWD9SPiWy6VevnypSqVizBmsFBIpgiAxxPM8dTodYzhTqZQlFdgY9nc4HKyAnCYCBGvWbDgcajqdKp/PW7tmEs1YLKZ2u20sOUw+Her4uUajYYkc0hw3BiIlo9U1f++CDLKI9Xptwd7zPEWjUSuEj8fjVnyMvbmyKewYGyWGYZcAIwmEf3345WOkj5E+RvoY6WOkj5Hvr/D6Ay/eGCmkw5ilx2M2jqVdAySgcfRJEAI0OK7c7XZ2vMl/u7p2jo4xKOlxnkQkchwgdzgcrC2qeww4Ho9NZ4sMYbFYaDKZaDqdKpfLWcels7MzY+VwAPeIGd0wRahuIXIoFLIZFgRx1gd5B1rtzebYCYWgBFuwWq3U6/U0Ho+t0BCtLF193Ddx3uIxFAKcW2AK49TtdnV/f6/t9tiOlbf2brdrR6zT6dTmbtzc3FjLVUBBkjkqDBKdYgKBgLGXDw8PpoVmfThWDgaDGo1Gdt+5XO6kxWqv15P0ONcGBiUWi1l7UjpJBYNBjcdjO44vFAoGfAQAAkWxWNTFxYXW67W+/fZbayNbKpX02Wef6fz8XNPpVL1ezwIEumyCEC1vYbv2+73prGGLJOnrr7/WaDQypjISiajX6xmQcrTvsoxomPGL2Wx2Up9A0kNh6Gw2M200uvV4PG76+sFgYGzoZrPRd999Z4XervSFORkEROyWJJFBh8+ePVMgENBvfvMb7fd7a9cbCAR0cXFhUqjpdKpEImFT6PG3i4sLff7558rn8zanYzweKxwO2wwYgpzneQYso9FIqVTKgBtwJEbs93s9PDzo7du32u12xl49f/5ckqyQPBaLmf3SGpq1BjzcOhPYOWxgt9vZvBvAPRAIKJ1Oa7fbaTgcmp58Op3a/gWDQWsrTMxwWVyeYzKZWEEyzBpyJphV7IR4RvLMMyOfQYKBX3LPMMkAsCTTqbux178+/PIx0sdIHyN9jPQx8o8fI7GzH7t+8gmXJHMuWDmCtasH560RoOHNluNXmAn39wAiOo4QvN2jfx7Y7atfKpXUarVsM2AccF408HwGm5TNZm3AIoWj5XLZukBRxEtwxdAY9MYRLYWUgAMMGxvZ7/dVrVaN2YpEItZRJxKJWEek9frY7hSDRCIxm81svkOlUjGmdL1e6/nz55pMJup0OiqXy3p4eNBqtbK2vBgOQZzf4/iWY9tUKmVv+xxfw4pMp1M7Lue+CJTb7Va9Xk/NZtMAjRaynU7HgjnFijc3N9Ymdr/fGyMC28qROvu52WxMH4/jFQoFpdNp3d/fK5lMGtsIe/Xq1StNp1Nj9NzkBeZitzsWdm42G2UyGdMEU4QM+FQqlZO2xuv12vYkHH5s00ois9vt1Gq1dHZ2Zoxuu902phafyWazarVaGo/HJ7ULNzc3FtR4hnQ6rU6no1qtZkEEEHUZ7vl8brKC2WymZrNpgfhwOBhjnE6nT2Q5kjQajcyG0NnzuYHAsUB+MpkYw77dblWr1TQajdRut22uiiQ1Gg0Fg0G1220lEgmdn58bc49EAiYd2UKpVDIGnsCMHCKdTutwONaXUJA8n89t7gqtnYkTrPezZ8/06tUr7fd71et1i0HEJvabhKBerxuojsdj1Wo1ax6Azr5er1tzAdao3+9bLNputxqNRlosFubHrjxovV6bHIwZK7vdcbgpQZ0EmJcd6lxIxpCPUGfAyQRxmD8Mph0Ohye1L4AI811IlJBbSMfkwQUZ//qwy8dIHyN9jPQx0sfIP36M7Ha7/yMO/CwvXLw9SjK5AMHFZcoOh8cCYIp4ARfeImEOkDDwpkkA50geo3a16553LJpFM8zbKp/JhaQiEokYiHG8ydFrOBw2VgDGbbM5tjrle2FPADj3/gASt5NUIBCwWRgE43K5bG/IaLFdtm48HiudTlvgo+AWAKV1qXRktHCkq6srtVotPTw8WFtejlJhXlxmD4ddLBY6Pz/XYrGwWQeAIAwl09QpDgZIKXJMp9MW/Dm2PRwOBsS09U2lUsa+8T10vUH7jx6dQJdKpdRut43tg4UMh49zT0ql0slQSxhAJAIu40XR98XFhbEpFxcXury81MuXL80pA4GABSecG/vieWFzCHTdbtcGFcJIYzPh8LE7E4FpuVwqkUjo008/NaZ6tVqpUqno+vpaNzc36vV6ev78uTFY7MdqtbJ5NgAhCQJ+xXE6bYvZe3TiBKf7+3vV63UL/CRELitG4HSfgWBXqVRUr9f17//+75rNZiaXgXllH5FEjMdjZTIZJZNJDQYD89NGo6HD4aBisah0Om2zSIgRFNhmMhk9e/bMkiykEEhGttutJSPMGSKO0IIYfTj1DySJ+/3eCrUZTOrWdhDckSuwr7CL+P9isVAsFjPQhV2l8JcYQYLCM3IKgQ1RP0X8IVFjb4ghsLGsJdIb5t3AFuNnsH98DkwtwEjsRcrxPvbOv/7ny8dIHyN9jPQx0sfIP36MfC8O/GQkkaxYlIWANePBMUR+hpt1j9kJJpHIcVYIuvJ0Om3ODyvGGyUP6XmeFRput1trqZlIJCwwY9zM45BkAYU3VRga/g0tMtrYfr9/EiD5/lAopEKhYIDC0TCMAuwhx/M8QygUsuLRTCaj3W5nk7q3260BG+sFo0jBM8+52Wz04sULvXv3zoLYYrFQr9ez+QjL5dLYAhga1h/Q5pg5kUjYcbc7V4UOMNPpVJVKxcC40WjYkTNGTmEqoNfr9cxgk8mktXaNx+PG0EiyjlDofmFWo9GodVZCG08rU7fjkMvKhkIh5fN5O1aOx+Pabrf65JNPrF0vwZB7n06nOjs7U61W06tXrwzUCGDFYtFYTQIVDB+aZqQmAEgsFtO7d+8s0FNXQGF7LpczP4L1y+VyBkJISwjOX331lcrlsg0/RF+NTxDwsDUYokAgcNKCdz6fazQaSTrKmfC12Wymfr+vs7Mze/7VamW2xnR7/IR5NZ1OxxIZjvylR40z9wBDxn7B3sOIYdtv375VrVazuohut2t1JMPhUNvtVtVqVdvt1gaawoZOp9MTrXgqldJwODwZqBoOh/Xu3TvTwdO2ORgMms4dW4YZk2TDW0kiXfYabTuJEUCOxGq/39ugSlopS4/1N3QII3Zix9gcyXcsFjO2HX92Y2S5XLYOZCTLFI3ncjmbQ0KjBF4CSLphxrkHEhPu178+7PIx0sdIHyN9jPQx8o8fI1154Q+vn6wPwXExDv7wpsqxvGvoSCFoB+s6Jm/iMAQwFQQQ3rRXq5W9HXM0yKLwuXTQYVFgl1gYtwUkjBhv5TgQulNAkGF2vNlSbMnzwCIAPMhIYAR+//vfW/DFeAjABJJgMGhOQ7BHR0+XJknGkr5588bewul+A/C47CRv4qwnBk2gQrcMIIbDYQsIkUhEtVrNCoslmeOx/8Ph0L4XHTtH6YVCQf1+39gXOlfBmiIP4Kga4IFFwJ4Aj2g0qmazqXg8bqCay+WsRqJUKpkDuV1tXKnHdrtVpVKxYZlIcl69eqV8Pq9nz56pWCyazAHpDdIaSWafMLLUTBQKBauvoICXRAY7RHqRzWZNxw6rFovFrOYAECNYAuSAK1IeAg+fQwtk5EHYNwXZgUBA+XzekgLkO/l8XvV6XfP5XPf39+r1euZH/JxbCyHJ2E0YHthd2Et3lgxANJ1O1el0VCgUTFIEa83/Yg+TycSSwO12a4XR7roTPwiQ1JnQehqmeblcWjE7XaNg2RaLhUku8PVsNmsMNvGNYA97iiwFnbwbeF1tOMkvNg37zP2T4LCX1GPAVKOxJ8klxvDzyEhcNhnWn0Sf56VbG/UNrlzQ/Tz2mHjkXx92+RjpY6SPkT5G+hj5cWDk+1QgP/mFy33T5Kb4QhbJ1Ui6b4guU8ZbZigUsqNLjJ+NZIFgSHAaCuwAEY4GCYrFYtFmGNC2NB6PW8En7S05ZnSDKQXFOE8oFLJuQPl83gpWXd04OniOmt2jTN6ah8OhGSngyZH+bDaz41ECFcenFHi6hrjf79VoNFQoFOwomEJc1o3gPJ/P1e/3NRwO7TMxrnA4bDp51hMNcjAY1O3trQWzaDR6wlC5etblcmkab46ckZjAWBwOB2MUXbnFZrMxUPU8z+Z7kJSMRiMlEgkbYtnv91UsFq3gEyYVppHjb1ePy9E7yQ6SFZKOUOg4/DCVSun8/Fz5fN6kHev12jrmkBTxe7Cfu93OioMJAiQTAFmlUjG2zgXWaDRqkgtYmkAgYIkR7GoikbCkx7Vb6Viv4c4HIdHjHvFJgI92uqvVytquEjhJ1ggwHKkDXuv12nTzMJkAred56vV6xlBtt1urO4hEIsrn85JkGnBYTJhW1oyuS0yvJ8ARe5jzQic0l2WTZHUvSAmi0aglkrScXiwWlnTBzFF4L50OakQCxTO5sgLkTbB6JK4EbQroAUDiIUAO40e8YQ3wYYAIWye28D3sMTpy6gj4LiQ7rLuQ3MdaAAAgAElEQVQbo3lOEibWDruSZCylf/3hl4+RPkb6GOljpI+RHwdGui9lP7x+FkkhD4pBsdE8NIwdgOMypSyQ9N/fGAkifDYPRVDgD59JgAIE9vu9stmsnjx5ovF4bMwUOmeMmA3mHkkqAEV0nhxh73Y7K3jEgbgPjsNjsZgdVx8Ox5a7MDObzUbD4VBnZ2d2nIoswNXJugwFBsDxOKwULM5XX32lSCSicrmsZrNpMg7acB4Ox0JFgjvPyPfhgASvZDJphc7ZbFa9Xk+bzcaOWvldagFgTWCU6vW63QcMR7VaNZsgIKFXxwFZAyQAu91OjUbD1kiSHSlns1n1+31VKhVjMvh8gjxsSCQSMVkDaxuLHYdPoi9Ht42DvX79Wp7n2bA9WCc+EwYGBhY75TMIYrBWMG3D4VC1Wk29Xs+Yle32OOgSZo6EA3sheJBcUNwpyTTrADOyENi1ZrNpdowfwTrDDAcCAWNmuXcYTu6dQlY38aNoGH8G7AD9Wq1msiG3sBhZiyTTdlNbAgO53W6tpe1qtVK5XLbAmE6nzT5ZF+QIAJabvAJQgAXsPnUOsISwXdgTdSW73c5kELCX0pHRYnZJr9czoMC2WacfAjj7AYCNx2M7hXBPQ5LJpK0VdgVokCCy9/wbBemuvAyfp/B/vz/q74mVbvLtStI4FUG68z72zr/+58vHSB8jfYz0MZL98jHyjxcj33f9LHO4MEAWAmNzgcX9edgaSScPgENKMmaERWMD3DdLPo9jyP1+r36/r4uLC0Wjx0FqGBDtPjeb4zyFTCajyWRi3VLYLDSevOW7nZ/4b4CDDccIABJ6+u92O2UyGWOJkEHk83mbGs90dHfmAwGdz6bbEIDiskwwaJvNRs1mU8+ePbPOTJLMoAKBgHK5nAGL53k2rA1WE2ANBoMqFArWmhbw5bv4GRgNSSe1CAwJBNhhOw6HgwE6LALP4X4GYIO0gC5UABza2c1mY9KQq6srjUYj049TyLvf723/YUSRK/Az2CTyAoIyLKbneSqVSlZTEY1G7f45fmZ93WeipmEymVjyIskCmfvdBC7POxZDj0Yj04kXCgXd3d1JkgVRSdbBiEDgBv58Pm+BlWJwWGBA1fM89ft98zHuCRaRPabDD3UFrCOBkbk4JDlcvV7PNPkEfr6bfQkEAhoOh8ZYAjYu8xyNRlWtVq2FbqVSsUGdrB8yKbdYnqSOPSIJcBl1/BoJBLUK+AS2ht/w7OjDsetoNGrPRj0NCZt7eoGdo20H2NziYRhL/I2A7sZMmEvYWOInrHQgELDYwz3wzMRWPou/I1l3mUB+BukKNuxff/jlY6SPkZKPkT5G+hj5MWDk+0jJn+WFS3rUnrt/5/63eyMuM4MTACq8yaKf5WEJ4K7cgs1dLpcqlUrqdDpqNBoqlUrGyHBMfjgc1Ov1jIFbrVZqNpva7XbGxABUFDjHYjHbbIwSo6FAlLdz97kk2fH34XCw41xa58J08cx3d3fGpMDW4VQuo4gchM8MhUKq1Wq2Xv1+X3/yJ3+iSCRibMVkMtHhcDDmhhaj+/2xRarbAYjfIyAkEgnV63WNRiPrYoTMg/3E0FxWAAckiBwOB5vzstlsTD+8Wh07QBGocESAHRBCYkHrXIAX+0DL7B71TyYTSxzcRMWVdhC0KORlFgMSkO12a7UI3W7XAJdZEewDdklw5DN5Nob4EeAJSG79BQkTOnwSkvF4rFwup1QqZbYPSBCwOF6HCV+tHlscDwYDrddrm7nC52ezWYVCIds/umMhd4B5xvZ4VnwEIMnlcha8kDrN53MVCgVjdxOJhP07rJgb0Fhv9u5weGwEcDgcB6fe399rs9koFovZOlMQTWyggxkgQUvqUCikYrF4ot2nO5erNwdwJRnjS4JDnYZbwIzdMZcEe4Y1JTEFJHnW7XZr7GkoFDI5GPvjSo/wI3yYZ3DjL58Jg4f0iVjF2rtgz/fyu+7piisBkWTJuhvT/esPv3yM9DHSx0gfI32M/Dgw8n3Xz0JX8sUYOn/Hzblvh7x5AiSwVPw7hXwAkau9xQklGbvC98DGjEYjvXr1SqlUyhydQmKKA0OhkAUbOjOlUil7Q5d0coSN4RMEc7mcJpOJGo2GySY4ToUdwBnpXBQIBDQajazL036/19OnT7Xf79Xr9bRer60dJRsMW+eCAUfwFN1SsMqwvFQqpZubG+ue9ObNG7t/2CYABe2+WzgbDAZtcjfgSGErTAoaf6ak49wwKAQX9orj60KhYAABk8lROUHB1XgTPEKhkOmOATXXztDVl8tluw/pkeXADmEgIpGItW6FYaEmAJbP8zwLgi5gwZi5Nk/iARjw7DC4i8XCagoAdMD3h8HlcDjOU0kkEqpUKlZkTeEugWcymZjeWjpKOLBZ/ANwpBsRiRfAtN0eC6Lv7++tmBq5AgwSbCqMVzqdNp/E1lkTgI0kolqtmt7aZYIkWbLEHsBOA1gE+XQ6rUQiYbIBJDDo0rEN1hCWPxAIWOF5JBIxQIdhg+kjEeB5CdhcLlPmJkMULWOHdPmiTbZbDA4TGgqFTGokycAcv+SzXP074O0mrcQkAA0bBVT5HhdIeEZsGd92WUViC98B4HAP/K5/ffjlY6SPkT5G+hjpY+QfP0b+r55w8SAwIe4NuW+l7sXbKAYtyQpc2SAcWZItBP/O5xFkcc5oNGoD45rNpi4uLjQcDhUIBIzx4fiQ76SrDMzT4XCcLO55nnU7oktROHycO9JsNnV3d3cyRI833cPhYB2NCCZsANraYDBoBbmVSkWtVkvD4dAYA1iJwWBgTuWyVi5AT6dTa4lKofR+v7chgchF2A++H0kAQA5zxhE538F8Ba5kMqlisajBYGDHwgRMggaBhjXB8VOplPr9voEI7FYgELDAFwqFjDFDsgLTSOIQi8UM8DKZjLVinU6nJmvA4WC0sBGCpQv82ByFv6yjew+wawAZzgsLyXH2fr+3IMbsjNFopIuLCw0Gg/+/vS8Pk6o6039v7XtVV1XvNN00WxqMTKBZFW0WERTBUdGZmMw8M0GcRY2OCxJjBqNxJo6aZBLzB3mS0SQ6Y3xUxolKVJbEDcUAAiJgoBt6X2rp6qqu7lp/f/Tv/bjdQgPSRIHzPk8/SnfVveeee873nvOeb0EsFkNxcTH6+49mEOMYo2qZy+WELHhkTrU4FAqhuLhYsnzps05R7bFYLOjq6pIAc16T/aRXq1lng5mIaLSTyaTMLf11SfocQww6djgcMBqNogLS752gKsRxZzAYRFnl3Ovr6xPVjgsCji23243W1lZ4PB4AkOtwbnNucV7Q3QaAjGESP8meBJHP52WBwLHBRShtD0lS72rBtnOOer1eGRt0MQmHw4OMMf9LVyMuQLjIADAoaJh2Ue8awXFLktOrxbQ3tB/6e5J0Oc/0C3zOF71t1rul6Z9B4dSgOFJxpOJIxZGKI88PjhwOI1r4mB3JB+Wgo+pAI8UXwt0hXRS4U6R/rN5o8iiTHaUnFKfTKYpWdXU18vk8YrEYcrmcHBvTDz0SiYifNo0RDQ3Bf/NFJZNJ+X1jYyMOHDggR9h8eXSloP8vfY6NRiO6u7vlCJWBlX19fWhqakJHRwfa2tpkcNFVgxOZxovHouwTqlEMuqRC1dPTgylTpiCVSqG1tVWUOioAHHgckDx2526d1+U7CofDotoAEGPS1tY2aMJz4cDPABDF0Gg0SgAr3zX7x+/3y/31R8w0iFRsqU7GYjG4XC5RhmgYaDj4Of2k5/EzXQYYZ0AVzGAwiM86j8OpxtHg8D3kcjlRYemSozcqHo8HXq9Xsv8Eg0GZE4WFheju7hbiYKYfEjKNDJUhvl+mg6ViXFhYKCppPp+XY3e6zZBY+U5J7FwcOJ1Oqd1B5VHTNMRiMSSTSXGpcLlcopqSdEmUzPAEQPqexEh1lM/A8Ua1k6RrMBgQjUbFRpAEuJijCwRJkIvPRCIhxVH1BEmFFIAQAReAzc3NMkdpeGlv+vr6UFZWBr/fj+7ubhnXVFip8pE0+Qxut1vaT/tA9wwSBdVEqpRUhvVByLSFJFT9s3IhQyLjHKMrCV2s+Nz5fF4URPYLbSPVVc5PvXLHucc2c8yynRxrw6l3CseH4kjFkYojFUcqjjz3OXJYHjhJvhgW2WxWJicfWO96om8wj+uofpAc6LvNyczr8BhZryjpXRj4wKzoHgwGpZiZ0+mUnT2PLbPZrKgevDc7kgOaPshUPGw2GyoqKhCJRLB//374fD45mqaCw6NoAOKrygBRGhn2VTKZRGtrK8aOHYuOjg60traisLBQ+ouGWR+ozuNoPVHRf7miokIGeSKRgMfjQVNTkyyQUqmU9InX6xVSofHis1OtotHXp+9lulWz2YyGhgZ0dnYODKD/ryxQVdArCkajUe6XTqelzgfdIKg8sWI77wtAXE5o+Elw0WgU3d3dGDt2rAxuk8kkahmJTV+7hYuQoUbV7XZLlXKqNSRfXjsUCiGbzQqpcdzyOtFoFD6fD5lMRgjHbrfLIoaGkQsL+jnTTUc/rjmOSIT9/QN1Qnw+36DnonJK/3kuAKjIMaOS3s2B7gh0U2HRUwZ3k3xcLpf4+WuaJvENzITEe/C90/AARzNBsago/0aXCcaK0N2AY4oLSKrMXBzwndIu+Hw+KfbKucGgcfYjjT8Xo2wrXU8cDoeMfYPBIGREe8V3TOPucDjkmUnoehcGEjmfh+4v9IMnMbBtNOYkWADyPFyY0NhzLnGesl+ohA61hZwjHCMcyxxrVF/1qrpeWdQTHwmfNsvlckn7FU4diiMVRyqOVBypOPLc58jhcNobLj4wd/g0SnrwwfkCuNvl72iI+CL4wPpjPP2OlrtnAOIrTVWCfrnBYBBOpxOffPIJTCaTGBfuiPVH+wCEvDRNk0nMCV1RUYFkMonDhw9L7RAeTxsMBhlQrGXBIFiXy4VQKASDwSCBkjyKTyaT8Pv9iMVisFgsCAQC4iPPyZ9IJAZNMh7du91u9Pb2CqHQHaG1tVUMQUtLCyKRCILBoBhKAFKHhHUhOGk4kO12uxgYHqfSsDB4WNM0uY6e4D0ej7gUaJom2aVaW1vR19cnPuEMjOUEZB0Lt9stim5PT4+MHQZcdnZ2wm63IxAIIBaLIZ/PS0pYAJLmkwaZz5PP50XpoxHSK6I0CvpjZgDiomA2m6UODd83JyoNKI1ELBaD3++XSvMcy8yuRFJgnAIJiOou4xTYPwxC5nixWCw4cOAAxo0bJy4jzMhENYsBv1x8JJNJ6U8e6zc2NiIQCIhhaW9vRz6fFxcYvlsaKCpNdGdgTEMsFpNMP5lMRvzXA4EAQqGQjFWqYtlsVt4Jr693xaAy6Ha70dbWJv7lHo8HHR0dQiB6YmPbuBClneCxfygUQlFRkfQTyYnzn0H/vD9VZ96LixCOf96vra1NVEOr1Yru7m4hb45dvSJNm0G/dMaE0J2FpAoMqNBc3OkXMHT74L8Z2Ez7y0U9+7O3t1fayMUClWX60JOIqXZyEaI/PaGNVi6Fpw7FkYojFUcqjlQcqTjytDdc3OFxR8oJo5/Q/Lve+Oh3iHzxnOA8YuVxN9UwAPLAdMEwGAxipMxmM3p6elBQUAC/34/6+nr09vaioKBAiIJtZGfrXQf0Rom/pzLT1NSEgoICIUQOAP33jEYjiouLkU6n0d7eLultTaaBWgwGg0GKMjLIs6OjAyaTSQKYacj5O/rP610a+FKpsOmLJFKpbGpqkkHvcDhQXFwMl8sFn8+HlpYW9Pb2Shpbs3kgeJUB1DxizWaz4uaQTqeFJBnUSJWRAaZUOuh3rGmaGHwe+8bjcSGMTGaghkRhYSG6uroGqRs88o1EIjAajXA4HLBareIKYbPZ5JifRi4ajYrCQYNCA5ZOp+UInePRYDBINi19ACdwNJUzx47dbhcS4JF7JBIRw0GDQlcD/UQ0GAxieGlYaAD6+/vlexyHDFYdNWqUkC37hsGsVqsVFRUVQo5sH40E3S7ommOxWCS4mAseBndT4aWiSdXJZrNJNh+qhTQysVhsUGCspmlob28Xu2CxWERJ58KDhioWi4lLEhcKmUwGHo9HFlFFRUUynvQuHHRN0rtk6RVQBgsz3qOwsBAmk0nqo0Sj0UH2IJcbCJi2Wq0yBhhrwPEQjUZlUUPbQdvG0wAWGtW7YGQymUGKMk8DeCJA0uI74++6u7tFsafdZP9TpeV/AcgY4CkIlUYSJV0w2J96dxQ+D+cF3xPtOgC5Hm2DwqlBcaTiSMWRiiMVR54fHHlGN1xsAJUMviA+qN71QX/MSqNFBYrgRLRYLIN8JUk49IXn76kAsDp9Op3GqFGjYDab0djYiJKSEgns1bSBLEhUongdTjwqhlQHGbBbX18v6TR5L96bCl4mk0E4HIbX6xXDGYvFUFVVJek/eUydSqUwYcIEuN1u7N+/X8iSGXUYUDyUNKl0RKNR2dlTOQkGg0IoLAjo9XolkJZparu7u9Hb2ysGjeoUFTqqAHTz4N+prFHZpNLAI3VOQL0LCY+m9eREtYnuAVTq+DxcHLB9BoNB/PtJIlyIZDIZhEIhIWVOKqbyBSBB1CQWqnR023A6ndJOEgMAUUdpWPUTyW63o7m5GfF4HKWlpWLoNE1DIBAAMBD8HYlEEAqF5Djf7/cLCdlsNlEgOf71xOR0OmUBwraYzWYkEglMmDBhkHuJ0WgUZYqGls9rsQzUV6GKTLUbGDCUDEoOBoPwer3i9x4IBMT3uru7W+aK0WiUtNFMCU1yKCoqEhJqamqSxQndk+gOQuLt6uqShSNVKhI8a5JQeY7FYmJYaRtSqZSkV6bhdjqd6O3tlRgUALKgiEQiovTT955KVzablVgNzmmSQjKZlLTLVOFogDluGXfA0wuq6nw+2jaORS7MuAgjUVBlBiDvlYocYyBo3GmnNE2T/rVYLGLvuBD1eDzSR1TsueAn0VCp5+9pb0lw9FNX+GxQHKk4UnGk4kjFkec3R45IlkKSh16B07s0ABikzPHIX08y7FgOQKoCdEMgWbGTObBoQGlo9LUAGPTLoz4aNk7CoW4IdKnIZgcqx5OgPB6PqE5Mq6k/tqQaRUPlcDgQDAZlx+xwOFBZWYk//elPGDVqFPx+Pw4fPiwBegw0Zj8AEAPV29srLhp2ux1er1d8z3O5HAoLC2WX7/F4hBRpWPk8HR0dSKfTCAaDYlCMRqOof5xkNLJUaegy0NPTA4tloGBlOByW90bw/XCipVIpdHd3izGju4beDYZqC5+XihxJju43VF8YaMp3ro9dKCgokMnS29srxQqdTuegMeBwONDb2yspSvnOeA8eiVMl4335vHRHSaVSsmgh+erHYSqVQkFBgag1+v6Ox+ODAr3j8bgULNQf9bN+Bw0cYx5I/CQwqrterxeRSETefTKZRDqdlud3OBwyJrhIocEKh8O44IILZN75/X40NDTA6XQiGAxKxjBgwChSAeL/02BzrhcXF6OxsVHcJdi3LM5JdxwuMkh+drsd4XAYyWRSshgZDAYUFRWhsbFR7s9FD79PhZiuWVTrgsGgKOL0x2cK6lwuh0AgAKPRKOoxF4305+d4Zh9T7aNNonFPJpPo7u6Wtgx9No4huopxgUJ/c7pR6W0ixxkXX3QzIYnRVvKdklwJuntwvtHW6BcvfGd03+CYpLIHDCxQONbYfoWTh+JIxZGA4kjFkYojzweOHA4j4lKody/QH6txR07Dyx0xlRdOED4MFRsaaE4YHqFzx0wlg2TA66dSKbS3t8tOk5PRaDQiGAwiEomIsaf6QP/bdDotaT7p0kB3Abox0GByR03FymKxCPlw4ND3nMfyXq8XNttANfqysjLs2bMH/f39QlAkX7vdLll7mC7TaBwIrK2pqUFLS4sMFE3T0NHRIZXk6YLBCUiCDIVC0DQNZWVlsNvt6OzsFEPL98VjXY/HIxmG6CfNNhgMBpnoJCmm/dVnaGEQNtUiGlAGVfN90ZDS4Hd1dQGAZEPipKKqSENCdxIuHpgeVq+E8dn4fb5f+hdzIcL36vF4kMlkEIvF0NvbKy4hHIfA0QJ68Xgcfr9fxg0NFccqDRXHDwBEIhEcPHgQNptN3CSKiopkbNBtgUaEKgvdKJgxSdMGAmrpC81Ad5fLhVgsJu5CenVb34clJSWw2+1obGwUoxoMBsUlIRAISIYnujlUVlYiFotJtXsaGbaXRpdjNhKJYMKECaLCMZ0uXW4YKM96LnQXiUQi8Pv9iEajsFqtqKyslPkFQPqZ/tXRaHSQ+4JeaaU96OjoQElJyaAFLg0v7U5LS4sofVxIcNFDNxW2l5sOqnNcgJEouajgvKebjtlslkxOvAZPJTif9H7oNPK0DZnM0QKYdMPhe6XdpfsSg6+pPAKQuAP2D+0e3SL0xMa/sb0Gg0HmOduhcPJQHKk4UnGk4kjFkecHRw6HETnhYnYhNo43pdHm7pHKAI9zSTS8DicnH8BoNMrgNRgMg47K6UphNpsxevRoCTzlzhsASkpKZGfN63PHSgNhNpulo/jy6eOZzWYRDAaFmEgwVAb5vHwJJCgaX6oW/C4H2JEjR6RYXXl5OUKhEIxGI8aOHYu+vj50dXXJIOJEYkYf+hXTb17TNBw+fBgVFRUoKiqSjDWBQAB9fX2iYjGYlEoGB2EuN5BFKB6PS2V1/aQkOBipitBXl8addU7oYsGJz75n9iUOdA7Wjo4OUf1cLpcQC5UcTiAOcC466OPP/u/s7ERRURFCoZAojvQfZ0FEZvyhomgymSRTEOMYGHzKdtOw0V2Dbi9UQTneOeY5Gfl7TdNkQo8ePVrcVeifzkxUVAtzuYE0zSS94uJiZLNZUQG7u7sRCASEROjTTb9uGjoSWTabFZXzyJEjyGQyaGxsFII1mUwIBoM4fPiwZIzinKioqJCUx2VlZTCZTJIRqr29XRaLnB/hcFjGfyKRgNvtFneRZDIJr9c7KIaFBp5qI6/J7Gh0iXA6nejq6oKmaRJA73K5UFRUJP77JMN0Oi1jk+p5S0sLSkpKpF6LnoB4QkCbwZgQjmO+Wy6m0uk02traxKgXFBSI7UulBjJ9MZscA/ypkNPYk9D47CQHqo/0+6d7Be8NQEhP71bFBT2VX9pUxhdwAaJPU0yCyuePZnwigZFMLRaLjGt+l/dXOHkojlQcqThScaTiyPODI9mOY2FETrh49Ks/LmWwLTCwy+dRInfFVHroG0rC0TRNdo36z/C6BoNBBqR+EUBDyHSb7Kyuri7U19fLYO7q6hrkD55MJhGJRGRw2Gw2MRj5fB6RSATjxo1DPp9HS0uLqDrcdfMaDQ0NKCwsRE1NDbLZrBST5GSiUkjyjcfj6OzshNVqRUFBgbShublZVD/6clPJiEaj0jaXyyXH/u3t7YjH4ygqKkJbW5v4vtKAZzIZ9PT0oKKiQgYYYwTcbjd6enqQSCSQTCbR3Nw8iOzpegFACB2ABIpS/SEpsR/j8bgMeA5AxgtQMaTbhc/nkwnHoFS61uTzA/ViUqmUZL4Kh8NSG8ZisYgi2N3dDa/XK8+r973leLJaB+phsF1UuDRNkzS+Ho8H+Xx+UBB3QUEBwuEwUqmUkAoXFuwfAKIS8nibvs0AxDBTvWTwKlVFs9ksCwCOb2BAUWJhwmAwKK4iudxAgCn7mnOOClkymURXVxeKi4uF0OhWwcVER0fHoMruDKqne47RaERHRwc8Ho/UwgkGgwAg/twMZqU6ZjAYpO+oMPL5qbjq3XLi8bgc5VN9DIVCiMViKCkpgdPplAKlBQUFEiuhL1jJRUIymURLSws0TZOFk9VqFdWbBp8+5Vw00deebdGrdsBRlwH9uLbb7UgkEhKQ7/P5RN3ngognDVRXqTzSzYexByRYo3EgroUnBZlMRjKWcSFHO0gFkmOSsQyMA+CCQh9IThvNGBO6b+hdOfRjWp9KW/95hZOH4kjFkYojFUcqjjw/OFLvRjwUmt6H/LOgsrIyv2bNmkFHgPzhYOKDUYU71rGbnmj0fuP016SSxPZyR8odMR+aHctBRMPKbD083qSySEWRR+E0+vRHNZkGCg9y8FosFvG/5n17e3sRjUbhcrng9/sHGeBMZiANqNlsHhQY2dPTI6RrNBql/kFvb68YP6o5LpcLpaWlSCaTUpySWXEMhoEsTDabDaNHj0ZLSwvGjx+PtrY2IVoOGr/fD7vdjmg0KsqL1WoV1waqayRs/QIhm82KQql/J/pgYRpF4KiSy/dts9kkswyJgP/Wq6A0SLwfB3A+n5caFfn80exOnKRG40BtBbaf7zOXO1q/Q99n/KHxo2uO3i+eR+fM6sTJzTHHY2q2h+OFSrXRaBT/bR6Ns5/1ag2JjosvLjqo8HDik4hk8mpH/Y6pxgGQNK10MWFQJ/tB78rE5+eijsG5XCQxuJSuDel0Woy2/jm4eKOSpo9P4Xii8aPaT7cQ+j739vaKewjnH0mTY4WuNOl0GrFYTJ6P96HqBhwNsCah0aecizW+T44dziW2k4sMuhNRMeY1+NxUH9n3emWci1IuYuhzPtRXne+C9lLvMkE1Ta/0cbzy32yvfm5omiYxNjwpoU3WtyGdTsuCj/dlnAvHC/+Wy+XwjW9844/5fL72ZPhBQXGk4kjFkYojFUeeLxy5du1aNDQ0HDOY67Q3XLW1tflt27YdvaB21Jf3VKBX9BQUFBQUvpgwGAxqw3UKUBypoKCgcH5g+vTp+OCDD4654Tptl0IqLVQxuAvUqxp66P9NBYi7T72CNxI4VYLiZ/XPoW/zsZ5n6HdHqu36a57oukPbdTYQ8/HaOPQ5h35mJPv3z4HhxsypXAM48RjQQ9+/n/X+Q8ff8eb0Z4X+WiczFo7XvuONmZNp58nOkxMtks/E/B/p6+uVvRNdayTf8/kOxZGDv6s48uSgOPLUrgEojjxe+xRHnvw1Tpcjh+uvEUmaQforhpQAABkySURBVMMbiUSwa9cu5PMDmYSqq6vh8/nQ2dmJ+vp6lJeXo6KiYlAcQDgcxscffwwAGDduHEpLS4/7kCdjJId+5lhkdrxr0F0AOHrseaxrHOv7PHo/FZxJw3g2GN2TaePZ8BzDYSTa/1n76XTvPdSA6slEf20e03MhONyCS/8zNFZF/zn9onK4+TfUMOrbeqy2HMtI0i1C/3Oy4P30ftv6hejQPhvu2voAfP0z6f92sm3kQp2fZTv019XbboUzB8WRA1AceepQHDly11AcqThyaLvOBEcOd98RKXzMRn788cf42te+BgDwer2YNWsWVq1ahX379uHf/u3fsHLlStx8882SjrS7uxvf/e538fLLL8NoNKKmpgaPP/44xo8fP8gf/VgDQv+C9INz6GAFBr+IoW3Wo6urC5s3b4bVasW8efOk7sOxnvVYbdD/fqiKqYf+O2e7sVQ4t8GxyyDrZDIJl8slWY6y2YEq8qzzQl/x482bdDotgby8TiZztMghv2e1WiWm5FjIZo9Wqrfb7YN8+xmDwsxnvAb9xZlNi+2JRCISKK0Pjj6ZvtG0AV//rq4uZLNZSV/MNuqDw/VFQ4cuePULWf6NwcTZbFaKkzIOZzhVmzZHH0/T398v2ZhIIgyoH/p9hZGH4kjFkQrnJhRHDt83iiOPYkQ2XEQ2m0UoFMLUqVMxc+ZMPP/888jn8/jSl74kGXa4qzQYDHj++efxzDPP4LrrrsO4ceOwZs0a1NTU4JFHHpEsPqFQCGazGUVFRZK1hLUUfD4f3G43Ojs7JT2owWBAc3MzNE1DeXk5+vv70dbWhnw+j+LiYkkn2dnZKVXr8/mBYMwdO3bg0UcflcDeqVOnSu0HPfL5gcxMDOItLi6WQRUOh6U+g81mQ1dX16Dg20QigWAwKLUnFKEofFGhJ4Bdu3bhlVdeQVNTEyZOnIjrr78excXFePfdd/Hyyy8jkUjgy1/+Mq6//noUFBQMGte8Tn9/P3bs2IFXX30VHR0dmDJlCpYtW4ZIJILXX39dMjWlUinU1tZiyZIl0hb9gi2bzWL//v3YvHmz3HfOnDnQNA2bNm3CW2+9hd7eXixYsABLliyRFMgdHR14/fXXsWzZMhQWFiKZTGLTpk149dVXYTAYMGPGDCxfvlzIYLh5ybakUils2bIF69evRzabxZQpU/DVr34VdrsdH330Ef7whz8glUrhK1/5CmbNmiXXHopwOIzXXnttUErwCy64ABdeeCE++OADvPnmmzAYDKitrcXMmTM/lZ59qCtEe3s7nnzySUyfPh3z5s3Dm2++iXfffVeKmFqtVlx66aWYNm2askF/RiiOVBypcO5AcaTiyFPBiG64mNVm3LhxuP322xGJRFBfX49gMCi1Rvi5/v5+vP766zAajVi9ejWKiorw3HPP4cCBA8jlcmhtbcUvf/lL7NmzBy6XC0uWLMGiRYuwb98+/O///i8aGhpwwQUXYMWKFdi4cSN2796NO++8E1arFQ8//DDGjRuHm266CS+88ALeeustZDIZXHzxxbj22mvR0dGBxx9/HIWFhYjH4yguLkZhYSFee+01HD58GFarFY8++ijuvvtuzJ49WzKeEAcPHsQvf/lLfPLJJygtLcXVV1+N6dOnY8eOHXj++efR1dWFWbNmYerUqdiwYQMOHz4MTdNQUFCA1tZWXHbZZbjxxhsHFQxUUPgiIp/Po6GhAT/5yU9w6NAhjBo1Cj//+c/h8Xgwf/583H///bBYLKipqcG3v/1tJJNJ3HHHHRJrQuRyORw8eBA/+MEP0NLSgrKyMvziF7+Aw+FAdXU1WltbkUgk0NzcjA0bNuD222/HkiVLJH5Fr463trbi4Ycfxu7du1FUVIT/+7//wx133AGj0YhHH30UpaWliMfj+P73v4+qqipMmDABmzdvxquvvopXXnkF8+bNQzAYxM6dO7FmzRqUlpYiGAzihz/8IXw+H6688soTGlgq9Hv27MHtt9+OsrIyjBkzBv/+7/8Oj8eDWbNm4cEHH8Thw4fhdruxYcMGrFmzBvPnz0dvby/279+PYDCI8vJymM1mHDlyBN/61rcwbdo0FBUVSQ0Zm82GNWvWSJrr1157DQ888ABmzZol9rW4uBilpaWS3SuXy2HdunV46KGHcOedd2LBggWIRqNobm6GyWTC3r17UV9fj7KyMiEThT8PFEcqjlQ4t6A48vj9ojhyMEZ0w8VjukwmA4/HgwkTJmD37t3o6ur6lN9oIpFAU1OTFFoEgNtuuw1erxfpdBpPPPEEfvazn6Gurg7bt2+XQojPPfcc3nvvPVRUVOBXv/qVHEW++OKLmDt3Lux2O55++mn88Ic/xBtvvIF7770XXq8XALBx40aUl5cjGAziN7/5DcxmMyZNmoS6ujoYDANV50mI+mNWvV9nf38/HnnkEfz2t79FXV0d3n77bTQ2NuKuu+7Cj370I+zduxdVVVX4r//6L+zZswcffPABuru7paBgOp3G7t27sWTJEhQXF49k9ysojBj083X37t3Yt28fVq5ciauvvhr/+I//iN///veoqqrCoUOH8NBDD+GrX/0qNm/ejF//+te44447PmWIqQDu378fd955Jy6++GKsXr0a7733HhYsWIAHH3wQZrMZP//5z/Huu+9i/vz5x/XD7ujowPbt2/G1r30NM2bMwL/+67/iD3/4A7LZLKxWK9asWYNcLoe/+Zu/wdtvv42qqio0NjZi586dUoQ0k8lg/fr1SCQS+NGPfoRcLodVq1bhjTfewKJFi2A2m4clFJ5APPfcc2hvb8ezzz6LiooKfPTRR1i/fj3Kyspw4MABrFq1CqNHj8ZDDz2E7du345JLLkFnZyd+8YtfYM6cObjmmmvEJcJgMGD58uXw+/0IBAKoqanB1q1bYbfbce+99yKTyeDee+/F1q1bMXv2bDQ0NODJJ5/EZZddhssvv1xS2r7zzjt48sknB6Vg/su//Etce+216O3txdq1a5FMJjFt2jQAyp3wzwnFkYojFc4NKI5UHHmqOCMR0/o89Xqy0Af56StD099yy5YtePfdd9HX14dnnnkGU6ZMwWOPPYa77roLEyZMwJ49e7Bz504sWrQI3/3ud1FdXY0NGzZg/PjxGD16NF566SX8z//8DwoLC7F06VL8/ve/RzgcRllZmbg07NixAwaDAU6nEwsXLsQjjzyCm266Cddffz3+6Z/+CWPGjMHEiRPxne98B7W1tYMUNpLghg0bcNFFF+Gxxx7DP/zDP6C0tBTvv/8+3n77bSxYsADf+973MHbsWOzevRuJRAKLFy/G5MmTMXHiRFRUVKCxsRHxeFwtdBS+8Mjn81JksrS0FD6fD5MmTUJ9fT3S6TRGjx6Nd955B8888wySySQWL16MdDqN/v7+QT/JZFKKmFZVVaGoqAhVVVVoaGhAV1eX1OrYsGEDysvL8ZWvfEXqceivk0qlUFJSgrvvvhtLly6VehwOhwPhcBjl5eUoKChAdXU1ysrKsG3bNpjNZqxcuRJz586VehpUE/lZr9eLmpoaHDlyBKFQSJ79RNi/fz8CgQCqqqpgtVoxZ84cHDp0CFarFatXr8bChQulbo3P5xMVf8mSJfjyl78sfvKHDh1CPB7Hli1b8Oyzz2LdunUIhUK48MILcd9996GyshI7d+6E2+3GqFGjAAAlJSVYvHgxJk6cKCcMkUgE//Ef/4GioiI4HA6pn8TaIqFQCG+++SYmTZqEyspKAGrD9XlAcaTiSIVzA4ojh4fiyKMY0RMuYIAw+vr6sGPHDmzduhV+vx8lJSVSMK2lpQUWiwU2mw3jx4/H3r178cknn8Dj8eCpp57CFVdcAaPRKGTECtChUAjl5eVydJrJZKSIXFFREWpra/Hiiy+ip6cH1157Lfx+vxR3mz9/PsxmM5588klEIhHk8wMFAufMmYPp06dLZzudTqlw3t/fj6amJhQXF+PAgQPwer0YP368PCcnTDgcRmNjoxT4Y+Vt7vwNBoMUUzQYBqpcUyFUUPiiY6iCZjKZUFJSgnA4LIZx586d6OnpQSwWw+zZs/H8889j165dsNlsoujb7Xa0tbVJsUaHwwG/34/u7m4kEgkYjUY0NjZi165dWLp0KSwWC3784x8jHo8PytjmdDpxww034Otf/zr27t2LX/3qVwgGg5g3bx6efvppyTrk9Xrh9/vR1NSEXC4Hm80mxUOHPhf9tQOBAA4ePIienh6UlJScsF8ADCqoaDAYMGrUKEQiETidTvz1X/81tm/fjmeffRZVVVWYM2cOTCYTfD4frrrqKvkeMFCA8pJLLsGiRYvQ19eHW265BbW1tbjlllsQCATwu9/9DuvXr0dRUREmTZqEfD6P0tJSlJWVyXWy2Sz++7//GwcPHsSDDz6Im2++eVA6cQDYu3cvWltbcfPNN4vNUrbozwvFkYojFc4dKI48fr8AiiP1GPGkGalUCu+//z6am5sRjUaxcuVKGAwGJBIJvPTSS9i9ezeAgeJgV199Nd566y3cf//9sNvtcLlc+Nu//VtYLBbceOONWLduHb75zW8iGo3C6/Vi6tSpqK+vx6uvvoqdO3eiqakJ11xzDUaPHo3LLrsMzz33HJLJJK644goYDAbMnz8fL7zwAt555x3Y7XYkEgnU1tYin89LlWg9SkpKUFVVhffeew/3338/ZsyYgauuugrf/va3UV1djSeeeAJOpxNLlizBiy++iNtuuw1/+tOfUFVVhYULF+Lw4cPYtGkTDhw4gEgkgosuugjbtm1DKpVCX1+fqBishq2gcDZAr2JlMhm0tLSgtLQUH374Ierr6/Gd73wHY8aMQX19PZ566in8/d//PUpKSqSSPDBgLMPhMICjKngoFILf75fg+K1bt6KnpwcLFiyA2WxGMBiE0+kcNE/tdjuMRiP27duH73//+wiFQviXf/kXTJw4UYgEGEi/HQqFUFlZCZPJ9KnnYGICkkpfXx86OzsRCATg8XiknScCr0GiOnLkCAKBAAoKCrBjxw5JbnDbbbdh/Pjx0DRNskcZjUZYrVbkcjmMGTMGd911F2bMmIHW1lZYrVa0traiq6sLPT09Eouzfv167NixA5MmTUImk0EqlRL3rra2Njz11FOw2Wyor69Hf38/tm/fjjfeeAN1dXVIp9PYvHkzHA4H5syZczpDQuEzQnGk4kiFcw+KI48PxZFHMaIbroKCAixevBg2mw1lZWX4i7/4CyxevBg7d+7EvHnzxEWAO82ZM2fi4Ycfxvr16xGNRvHAAw9g4cKFMBqNuOWWW+D1evHhhx9i8uTJWLp0KaZPnw6/34/CwkIcOXIEV111FZYuXYpAIIBZs2bhuuuuQyqVwsUXXwxN03D55Zfjsccew8aNG5FOp3Hvvfdi0aJFiEaj+PrXv47JkycP2sUHg0HcfPPNCAQCaG9vR01NDdxuNyZPnoyKigrk83lYLBasXr0aFRUV2LNnDy6//HKsWLEC06ZNQyAQwEsvvYQjR45gxYoVmDFjBqqqqjB27FiUlJQgk8kgHo+jq6vruJlYFBS+SNA0DWVlZTCZTGhra0M8HsfHH3+MUaNGIZFIIBKJYMyYMZgxYwasVit27dqFefPmYe7cuYOuk8vl8MYbb2DTpk04dOgQRo8ejSNHjqCiogJ+vx/JZBIbN25EcXExJk2aBJvNhhtuuOGYqV1jsRieeOIJ9PT04Fvf+hZmzpyJXC6HiooKvP/++wiFQgiFQmhtbcWyZctgNptFSaf6bzQaMW3aNGzfvh2HDh1CNpvFxx9/jBkzZiAQCJx0/9TW1mLjxo1oaGgQ15HKykpkMhk88cQTMBqNuPvuuzF16lQhtebmZvznf/4npk+fjuXLl8NsNuPHP/6x2ERmcbPb7Vi3bh3279+Pxx9/HPPmzcOLL76IhoYGaJqGjz76CE8//TTq6uqwcOFC5HI51NTUoK+vD9u2bUM+n0dTUxMOHTqEhQsXIhwOY8uWLaipqVHuhJ8TFEcqjlQ4t6A4cngojjyKEdlwsUETJkzAD37wAxiNRlHj7HY7Zs+ejYkTJyKbzUpaRofDgWAwiGXLlmHGjBlIp9MoLi4WI1teXo5//ud/lrSyfr8fRqMRU6ZMQVVVFZLJpOTz1zQNfr8fq1evBgAEAgFomga3243rrrsOdXV1yGaz8Pv9cDqdcDgc+OY3v/mpegIGgwEzZ87EuHHjkEqlEAgEYLFYcM8990iAsKZpqK6uxq233opoNAqbzYZgMAij0YipU6di7Nix6O3thdfrhd1uR2VlJSwWi/iJ5nI5ZDIZGbBqwaPwRYQ+feqFF16IyZMnY926dXjllVfw4Ycf4r777sP48ePx7LPP4p577sHkyZPx4Ycf4oYbboDNZhO/cSKfz2PKlCmYPHkyfvrTn+KFF15Ac3MzrrzySgQCAezbtw9//OMfMXfuXBQWFoqN0JMJ586mTZuwfv16TJw4Ea+//jp+97vf4aKLLkJdXR3eeecdrF27FrFYDHa7XRaW+XweyWQSiUQCwICbw/Lly/Gb3/wG3/jGN1BaWoq+vj4sWrRI4mWGcyPgvF2xYgV+/etf46abbsKECRPQ2NiIW2+9FVu2bMFrr72GKVOm4Le//S1efvll1NXVYe7cubBYLKiqqkJhYaEQ3JQpU3DffffB7XZj//798Hq9WLBgAT766CM8/vjj8Pl8AIB4PC7+6U6nE2PGjIHf74fBYEBJSQkeeOABpNNpOS25+OKLsWzZMuTzeWzduhVHjhzB3/3d38Futyt3wj8jFEcqjlQ4t6A4UnHkqcK4du3a07rAunXr1q5atQqapsFkMsHr9cLj8cDpdMqu2WKxwOv1wufzyY/b7RY/bY/Hg4KCAvk8f6xWq1yLD81iZG63+1OV7vXF5giTyQSn0wmv1ytpd00mE2w2G8xm86cqf9NP3ePxSPE3h8Mh2ZkIq9UKt9sNp9MpPqA8tmTbeBxKf3yr1Qq73S6F2RSRKHzRoWkanE4nqqqqoGkDdXSWLl2KZcuWYdy4cfjSl76E/v5+9PX14corr8Stt94qxVCH/rhcLlRXV4sv9/Lly3HFFVegoKAA4XAY8XgcK1aswJgxYwbNS/38zOVyqK+vh9FoRElJCVKpFFKpFMrKynDRRRehsrIS8XgcPp8PK1euxKxZs2AymWA0GhGNRuHz+bB48WKxCTU1NYhGo/B4PPirv/orXH755YPs0Ing8/kwZcoUdHZ2wmg04oYbbsBVV12FpqYmmM1mFBYWSjBzVVUVqqur4Xa7UVNTI0HEAFBTU4OysjK0tbWhpKQE99xzD2bPno3KykqYzWbs2bNHYm+uueYaOBwOeDwe1NTUYNSoUbBYLDCZTHC5XLIIfv/993HppZeirq4OACQ+4MYbb0QgEPjMtUUeeOCB1rVr16475S+ep1AcqThS4dyF4sjhcb5x5M9+9jOsWrXqgWOOldPNL19bW5vftm0bgGNnLNGrACeL431nuN8Pvf/JXmNoZ56orafathNdS0Hhiwx9IcVkMolUKgWbzSYV7rPZLBKJhKSbdTgcgxZ4Q6/DIP5MJjPoOplMBolEAg6HY1C1+WPN7WQyib6+vkG/47V4/VwuB5fLJWlgAaC3txd9fX3w+XyD6nH09PSIWqhfoJ5s3+RyOcRiMUm/zar1/f39g9pot9tht9vlO3rCYjpsJgzgYhsAEomEqI4ulwtOp1Ouqb+O3gblcjl0dnbC6XTC6XRC0zSJj3G73aJQfhYbZDAY/pjP52tP+YvnKRRHDv/7E11LQeGLDMWRJ+6b84kjp0+fjg8++OCYXxrRDZeCgsK5B72B0mcW49/0gbj6tNbHuo7+Z+hnhxrGk7kGwWuxPfrf6ZU/+qbra6gMve+pGFi2Y2gfHK+N/NuxwPbp267vF97veC4OJ7MwZpHM04HacJ0aFEcqKJzbUBw5fN+cTxw53IZrxNPCKygonFugkR5qxGi8TtY4HUvVG+76w13nWAb/RO3Rf4/31DRt0Oc/i6I+9BrDtXG4exyrf4f2y/H68GShYrYUFBQURhaKI0/cHsWRasOloKBwEjhVw3imrnOizw3392OR2OliJF2ehiPIkbqegoKCgsLIQ3HkZ2vP6V7rbOJIJXcqKCgoKCgoKCgoKCicIZx2DJemaZ0ADo9McxQUFBQUvuCozOfzhZ93I84WKI5UUFBQOG9wXH487Q2XgoKCgoKCgoKCgoKCwrGhXAoVFBQUFBQUFBQUFBTOENSGS0FBQUFBQUFBQUFB4QxBbbgUFBQUFBQUFBQUFBTOENSGS0FBQUFBQUFBQUFB4QxBbbgUFBQUFBQUFBQUFBTOENSGS0FBQUFBQUFBQUFB4QzhtDdcmqbNGomGfF7QNG2mpmnFn3c7Pgs0TZt1Frf9Ek3TCj7vdnxWaJo2+yzu+0vP8r6fe7b2PXD297/CqeFs5kjFj58fzmaOPJv5ETj7bfTZzJFne98PB1WHS0FBQUFBQUFBQUFB4QxBuRQqKCgoKCgoKCgoKCicIagNl4KCgoKCgoKCgoKCwhmC2nApKCgoKCgoKCgoKCicIagNl4KCgoKCgoKCgoKCwhmC2nApKCgoKCgoKCgoKCicIfw/kWFA/Wu0KJ0AAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA1wAAAFDCAYAAAAu+g+jAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOx9ebxkV1X1OvXqdaczJyQkBAIhJjIqIjP4McmMCCICn8woyCwKCEKYEUGRWWSIgIg4MAUhIHyCgAhBGQVMMAlJCEkgA+mETne/96rqfn/c2vVWrbfOrXrpF+hh7186r+rec/bZe5+99zp333NvlaZpkJSUlJSUlJSUlJSUlLTx1PtZC5CUlJSUlJSUlJSUlLS3Ul5wJSUlJSUlJSUlJSUlXUOUF1xJSUlJSUlJSUlJSUnXEOUFV1JSUlJSUlJSUlJS0jVEecGVlJSUlJSUlJSUlJR0DVFecCUlJSUlJSUlJSUlJV1DlBdcSUlJSUlJSUlJSUlJ1xDlBVfSbk2llHNLKTtKKdvo3zFXk9djSylf2GD5HltKGY7lurKU8s1Syq/N6HNwKeX1pZTvj/udPf5+xPj8uaWUi0spB1Cf3y2lfJa+N6WUb5VSenTsFaWUd2+kfklJSUlJuweVUn67lPKVMW5cVEr5RCnlVzaA77tLKa/YIBln8hrj11VjPS4opby2lLIwo09V91LKS8Y8H0rt++Njx5FcTSnlttTmhFJK/hht0k+F8oIraU+gBzRNcyD9u/BnIUQppV859aWmaQ4EcCiAtwD4h1LKoRUemwB8GsDNANwHwMEA7gDgMgC3paYLAH5/hkjHAHj43AokJSUlJe2RVEr5QwCvB/BKAEcBuD5avHngz1KuXaBbjHHzLgAeBuDxtYZz6v5jAC+dceH2YwAbcmGZlLReyguupD2OSimHlVI+Vkq5pJRy+fjz9ej8Y0sp3yul/KSUck4p5RGllJsAeCuAO4wrZFvHbTeXUl4zvtv0o1LKW0spW8bn7lpK+UEp5bmllB8CeFeXXE3TjAD8LYADAJxYafZotGDxG03T/E/TNKOmaS5umublTdN8nNr9OYBn1y7cxvRnaAGmdiGYlJSUlLSHUynlEAAvA/DUpmk+1DTNVU3TrDRN89GmaZ4zbrN5vFPiwvG/15dSNo/PBZY9a7x74qJSyuPG554I4BEA/miMjR8dHz+mlPLBMc6eU0p5xvj44WNeDxh/P7CUclYp5dE1Xl3UNM1ZAP4DwC9dXd3H9C8AlgE8smO4vwHwi6WUu8ySKylpoykvuJL2ROqhvfi5AdqLlx0A3gwA4214bwRw36ZpDgJwRwDfaJrmdABPwvhuVNM0cSHzKgA/jzbZnwDgugBeRGMdDeDw8VhP7BJqXFl7HIAVAOdVmt0DwL80TbNtho5fAfBZAM/uaPMhAFcCeOwMXklJSUlJey7dAcB+AD7c0eYFAG6PFstugXbHxEl0/mgAh6DFuN8B8JellMOapnk7gL8D8GdjbHzAeKv6RwF8c9z+VwE8s5Ry76Zpfoz2btQ7SinXBvA6tBj7HsdrlmKllBsD+D8AztoF3QGgAfBCAC8upSxW2mxHe5fsT2bJlZS00ZQXXEl7Ap1SStk6/ndK0zSXNU3zwaZptjdN8xO0yZMrViMANy+lbGma5qKmab7jmJZSCtqLqD9omubHY16vxPQ2vRGAFzdNs9Q0zY6KfLcf3zHbCeA1AB7ZNM3FlbbXAnDRnHq/CMDTSylHVs4HwLxwvFUxKSkpKWnvo2sBuLRpmkFHm0cAeNl4x8QlAF4K4FF0fmV8fmW8m2IbgBtVeN0GwJFN07ysaZrlpmm+B+AdGGNj0zSfAvB+tNvj7wfg966GTl8rpVwF4HS0xcW3VNrNozvGcv0zgEsA/G5Hs7cBuH4p5b7rEzcpadcoL7iS9gR6UNM0h47/PaiUsn8p5W2llPNKKVcC+DyAQ0spC03TXIV2P/iTAFxUSjl1XEFzdCSA/QF8NS7o0G5L4AucS5qm2TlDvtPGd8wOA/DPaKt1KKVcv9DLPsZtLwNwnXmUbprm2wA+BuB5HW0+DuAHuHqAl5SUlJS0+9NlAI6YsX38GEzvrDhvfGzCQy5atgM4sMLrBgCOoULnVgDPR/v8VNDbAdwcwLubprlsTj2Yfnk8/sMA3A7tVnyMX4YRuPkIzKc700lo7/bt5042TbME4OXjf0lJPzXKC66kPZGehbYyd7umaQ4GcOfx8QIATdN8smmae6K9sDkDbWUOaO8IMV2KdjvizeiC7pDxg7yo9KnSeJvgkwE8qpRyy6Zpvs8v+xg3+1cA9y70BsIZ9GIAT0C7raNGL0ALhvvPK2tSUlJS0h5DXwKwBOBBHW0uRHuhFHT98bF5SHHufADnEC4e2jTNQU3T3A+YbJ9/O4D3AHhKKeWEDl71QVv6J7T6vWh87L6Em3+H+XRnnv8P7fbEp3Q0exfal1w9eF5Zk5J2lfKCK2lPpIPQXihtLaUcjvaiBABQSjmqlPLA8QXNEtptE6Px6R8BuF5svxu/5OIdAF433ouOUsp1Syn3vrqCjfe3n4zp58CY/hYtmH2wlHLjUkqvlHKtUsrzSyn3M/zOAvCPAJ7RMeZnAXwbwGOurtxJSUlJSbsnNU1zBVpM+ctSSuzyWCyl3LeU8mfjZn8P4KRSypGl/YmRFwF475xD/AjA8fT9PwH8ZPzCqC2llIVSys1LKbcZn38+2gurx6N9wdN7yurbAZXXPPQqAE8opRytJ+bUXekFAP6oNtj4Tt+LATx3nXImJV1tyguupD2RXg9gC9o7VKeh3QYY1APwh2grez9G+2zXk8fnPgPgOwB+WEq5dHzsuWirYaeNtyf+K+r72tcj3/1KKb+oJ8bbGe6B9s7b/0P70ov/BHAEgC9X+L0M4+0WHXQS2pd7JCUlJSXtZdQ0zV+gxbaT0D6ndD6ApwE4ZdzkFWhftvTfAL4F4GuY/xXofw3gpvSc9BDAr6F9Acc5aLH2ZACHlFJuNZbj0eN2r0Z78fU8x2tO3b6F9tGA51TOz9Jd2/8HWlztor/H/M9TJyXtMpWmyd98S0pKSkpKSkpKSkpKuiYo73AlJSUlJSUlJSUlJSVdQ5QXXElJSUlJSUlJSUlJSdcQ5QVXUlJSUlJSUlJSUlLSNUR5wZWUlJSUlJSUlJSUlHQN0bw/JFelUkr1rRulFACAezGHO1dKmXzv6uvaz3su+CrpuHGM5ah9dny79JglQ1ebq9N2PeT4sr57G/E87c16bjTtqq1m9dfztThbz3hBmhscz664nhXrtXFr1JUPrk4erOUm5TEP70q7S5umObLaIWmKEiMTI/dkSoy8epQYObutjlujPQ0jm6axAs91wVVKuX3TNKfVzi8uLs4lmJ7r9XoopWA0GmE0GqHX660xymg0QtM06Pf7aJqmEzy6xnLgsh5nHY1GUzJzv9FohIWFhcn34NXr9SbHSikYDoeTvgsLCxiNRiilTOmpCVzl5jbcX2UPniGHs1uv17PjOvvo2ErcL2wUtmHeLMvCwsLEXrUgDT5hSwAYDodr9IrPs3SelTiYX/DSftFOZXbyq86qWxc/5cu2WlhYmJpjnUPl78YN2WKO4pzakvvyPCjP8G/2+5rONVJ/Zx/n/sPhcCrm2IbA6vzX8oLz8+jjYpj71xYgvV5v4pu1BK9y8hjqe7W4V3tx365FtOa4mm3Zf9SHmqbBysrKeWsG2IdpFj4CiZGJkYmRNXslRnqda5QYuXtj5MrKyhreEz2qZ4hmgYkKzQKxQ6oR43wIHm3V2cNpOJHXAledoZZoOeE5Q8bYLpkxhfxN02A4HHaOy0klnE7t5mwVckQCiX8KbMFH7TocDtc4JwMYj6mk52fNay2hqY7BQ2VQfm4BEgsLDVAAExvp8fje9U9lULuzPNqvazEVCyXnvwqINbn6/dXaSMzpYDBYM9duUcS2dP6oix5NJiwXz5ee41h11GVrXVTU4lhld+cATGJR54Ptz0lVj3GSZV+Nc7PmOWR0+uoxziG8OIi2YVft53ywBvYhW8xhAEm0Z/91ccrfk6ZpPfgIJEYmRiZGKiVGrrW/2joxcs/HyF3eUhgDaEJmAYPYAbocwR1XZ4vql1bAoi8rzQbjyoSbZB6PEz47eQRzyBH8uqoJLCdXCRzVErUmXq2yuMQbejl7qs7OoVhWll2Ph7NHgmNdw1Yqu46twMVyMwjz/EffWhVSEywvFDRwdI5i/hVAVDaWS+dG/UDJJcyoTLFeYVeWXeeMv9f0Yt/nWNQqa80/HHiyH3D1qmtuGUCdTdiOKqvqqfLWfIfJ5R2OLxdLbpyFhYUJcHEeUoDmJN7v9y0wx9yGL/E8MR/1J45Ll7Nc/LPOtdhzn7vAJKlOiZGJkYmRiZGJkXs/RnbRhr00Ixxy1i24IKecKqAGV+fXJKu3d52MLIde4atRtV98jslW+aJKxrJyfz6nPPkcJ9rRaGSrb6w3kwYnJwl1XieL9uXKgrNHjMdtef65ounswrKpHpoQWZauZKfnawnX6R1BORgMpvjV2vPY0UZ9IKqNGrj6OfrwooT1Z9Bm3XgOWAa1dZx3lSBN2rqwUr61OeA+tXmPPjzH7rzKqDKFf2lcsU1YX7XFrLnkz+qfmuP0bgDbW9vF2BrnPB7bXHMBgKlqMI/pcqyzcRdIufju8v2k+SgxMjEyMTIxkvskRu5bGLkRL82wxg8ha8Gqwa4VKxcMrKAGZExOnI9zWtXRCg3zdzrwd6evO68gqm3YFi6A2elcO7avVtFYDq4iAKv7wVVflW9hYQGDwWCqAuV0rlVA9ZhWonQe2WYqH8+V2tUlHNUnbFCzM48d9uHKXbSr+TPziaqbAgf7Nh9n2XUu2Z+jQlRbbOn8aWKIPmy/Ll2c/6quzJPb1yr07IcK+I6vjslxrH7jqvaOt8sZQZpUnU+xfi7pB+/wATf/bB+2oeYn9cHgXevHOrDNXH5QH+DF8Sw7Jq2fEiMTI/V4YmRipM5hYuR0fx53b8HIDdlSCKy9ynMTz+c1MPSzU8ABUfxlA/DteWcAF0R8XCtpTm6WSZ3dgY+7+mfAYP3Z4dz+0TgeFS2tAqg95tU/xmE5+PZ3jbcDOj4XcnF/tqnycPbTc7WFAM+Znp8VDNG2K/k4wGbAY2Bh3+G/umXHJbdoXwML1V8Tg6u6uMTCD6Zz7Gqfeas4qlv4qcaIm5t5PmvC08WIJuuYC7Uh2y2IF28cl87nedERlTQH8pqzuD3PP+vDMT0L3FV/p5fbulPLjbwgd7mpa+6TuikxMjHS8U+MTIxMjNx7MLKLdvmCS52r63t8ZuW4MqHOMa8SNbm6EpSrPPF5lte10ytjN8nclvlq4KusfDXNAOcARe1bk1tl5CRfk5vbuvYuwNyYLCcnpdr2lminzs/9aiDOutfmVxOVVjlrFV9HzIP9iuXjeWf9dFuF8yMFJPVNta+eY50i3tin+LzqFLxURydPkFac1Bc1DpmXjqf+UkvUKrPy5b+8OJuVhB2xrzB4zVrARV+NIwdcNdlnyca2YR05/9QWgsFft/Q4WZLWR4mRiZHcNzEyMVJtnRi52ndvxsgNu8PFA+sVdJAaQwNAnaPWr8aLjztQm8VHeTjDA2tfFVuThcfViQ4+OoYLWMeXeTtn5sBRu6iutaCeJ9mwHEpsL5ZJk7Mmw2jPW2Y0+GclldptYhecLMto1D7Y7XxYx6/x5MB0c8B6cz8FL8e3ZuOYr1r8uW0M8bm2OHELHT7ngNKNx3Ne8xX1YfVLtaPKqeeULwOoVrNqyVN5a07h51t0y4bK5GJJ31alCxiVoZbMa34aMjoZtD2PX7PtPICWNJsSIxMjdczESKwZR+2SGJkYuSdgZBdt+AVXzfE0YQbFg5fsMOqkzlhB6hQ8bpd8akB1LL6lqeOWMl0F4XPqcF124Ilz42tf/quOobfeNdm6MeO7S8BcoegKrnm2JnBFq7aVpStZ8nGXmBRI1a4hV42/9tVkx4mTt6nEd+WhtmA5nI+7W/xdcjpd47MmSubvfKo2B5rMnd/WEk0tcdXiRcdmG3Ff9d2aLPw5tid0AaP2qX3W7+pjqg9/d3PMMauVXNaZqQbGaiMnc5fvzNI7aeMoMTIxUtslRiZGujYqS2Lkno2RG/KWwlowOaG0Db8NpuaA7NgcvNpOk6STR4NFJ73LaYP0Sp3Hr5FLbPr2GJVJvztZZjkyJxD+p7pH5UcBoivIagsH57DBX7eO1OzDidrp5249Az4RKbFdGRh0Lt3cqkxuywProDbXZNFl2xhLgV0rQdxe9VT5+bj+rVWOdT70n/Mpp4+OpQs51pHP6RuUHAiqTaO/+82TWcChcrlFATD9FiQXp7Vxw5YBMOq/2k5lmlePLjnU5jGGazcv36Q6JUYmRiZGJkYmRu7bGLlhr4XnAVVIve2tgKDKaNDprV43uTrxXUGh7eZdSHAwqwM7kFI9XSINni7YHH/WwwEP89C5cCAVCVXt6cZUqunt2qm+et7poQlcwYT5xW1hPe4WGRpMKocmDTeOtlO/47FqfsA21ASqSYZ5O7mVBye6GuAzqYxdoBeyMRDrD19qn3l8Q+2nv9nDnxlgnX1UFpc/FDAciKg9nE85G+o/tbuLJ13EuVzq7NflezymVpo1j7r8oTRvrkyqU2JkYqRrp/rq+cTIxEi1X2LknoWRG3aHq2uvak1JYPXNPEHOuWrB5wKZ+ToH1iCpJUkeP4LHAZVLKrUJBqZfO8uTzKQVoi5yVSoOcE7ICtZhn9iP7eaJdeE2NR1qCwLn0GwvpwP7hgbXwsLC1CtqtcrByVSrLEp8jPuwP7vKGMvD2xSUpyYstSVTbeGgcVIDQwcCXXHi7KDJmiuiLg6Ct3ueQIGf5XBJnHlEv9qYzMvpyxVC9cuwP+8VVx8O4n3/TlcGfvVtbqc2YL1rIO7mzIFArfLKvJyNXa5i+3eBfdL8lBiZGJkYmRiZGLn3Y2QXbcjvcPFnl+R1b6YzkirkHNE5DpMm99o+1xoQOd56XMFqlp6qE8vIe505uWjSYN2jLe+dr4GEk5Ftr3qWUtY8L6A614i3HKjMbI/aVgznH3yedW2aZlItUjld4nDJuWbf4M082NYst9pUk7HbSsEy6aKE22pCDNL98c4/nQ1mJSJto77MY6sN4rW5aluNcX31rNNB50LldcfdvKgd+Xc/9FfvGdScz3LC51gPHRSEdGFdyyHOL1j+mAO2scocsrh55HFj7mqLHc1NStF/PeCS1FJiZGIk2yMxMjFS5zQxcu/ByC7akDtcPBEqJDBdTZq3MuWSvH7XiVVDOQdnObitGlC3EHRVW7QPO5D+ZedUnVQf1Zv/1fryeZcM4jwHgAMVR5pg3dzwP7ax2rPLX5QXyx/ECcGBdpfsMTZXh3is0Wg09epWBSP2q1m+rADjKkRaXXSJKvrUwJHnpJZw+v3u+orzLf4Xeqvszh+0Hds42vf7/TXzrG+/0vO1ZOfmoUsfbeeAJM7xnHEOU8DQ+FC5XO5iO0YbfmZHfxDV+ZOLfZdDeCydW8ejK88krY8SI6f7JEYmRqpOiZGJkXs7Rm7I73CNRqOpvaTq7EHhUBHEfHXKV67a1ymsMnA7dXxOOno++rmr4VoQ8ZWwggTrUQuucAT+ZfRoo1sSlA/r62zNc8GO1uUEIXdta4JWF2ty8dW9Vky5rXvrlgv6GjiXUtY8TN3v99fYWCt+tQDjAGQ7OF1rc6G81Xb8Y4sqJ9tLkwXz4oWNjhP6uaQYtg6AZH04Jvh4zReYn7YFVn8k0v2oJceZyhjHNCaiX1QsA4CUQh61I1eVNb44rtmm0cdV/weDAfr9/lQOY9006fOCgMdTHfjNZJxbeM7cIoMrcqx7rdJWy826yAs+KkfS+ikxMjEyKDFy7fHEyMTIfQUjN+S18OyYoUTcntRkppMQx2rG0oTDE83Br06hARDHSlndEqAJQBOaVk9Y3lIK+v1+tcrDMnDCcYHEfV0yUyDVRNQ0q7dTWT6X/LsSjquwhdyqnyYjZyNuF+Ny2whs9RM3d6yXa9u1IFFwinnl8dUH2W6xeKhV3oDp33LQYFbf1rnhvzFOLAp47tQuvDhTG9Rsy34SxHPikjkf4wTDY2qCVsBRu6mNo52+ojZ4Apj8ACxTDbCUn8rQ9eplnRsex4GF9ldeLhaCP+cz/s7gr7K7+HA+r21rC9eohEcbjlWeHzd/SfNRYiSmdGQeiZGJkYmRq3ZQSozcezByQ57hYuO7yQXW7tnUhMbVv/jMgc5OrEkSmK68OJBQXnyO5eXP3F4dOQKeDR3Br2NEe+att1xZTk2aSgwI2j8qC2oztb/qzUlMk5f242SkfGLutJqmc63A5IJDAY159Xq9CZiz3O72sb41yS1a3Fgxp+zDOp9qBzdX3EYr1zVySYjBvtfrTW7dDwaDNYsvtWmAiKvq6EIj/L0rYXUl0Zo99JjmCK6s1XiyT9ZAgI/xcwxMvF/dAZ4D7ziuvtm1iNE+ysv5dSwkuuLMAUTNB2ugznMd3zUvB2lFM2l+SoxMjGQ+iZFr54rbJEau5ZcYuedgZG0MYAOf4QKmE6YKFdWSEEyTSfzVRB4Kj0bTe4b5dnmQcxx2GPer1RFg2kerL7zgCHkiGfFChCeZE4dLwg5wHUA42bgdX33rQ5dsA50bFwy1fqwzn9d/w+Fw8s8lNQVgrTxpEo3g4sobAyDbiBcTusBx4B633mNct3+bdXJ2DTl0zCB+FoIXCax3VJK4v/MBNzbfTg+9YixdWNWSAcsTNgmfCt113tmODObaxvmIIxdj3F79iGNGgc3FiOvv5OkCqS79hsPhmudvOL+ErKqz2rIGuCqPk5PHcL7keLs86qhmr6T5KDEyMTIxMjEyMXLvx8jONrsKoL1er4kg5CThJoAnUJXlpKGVDT7Px4K0rY7NY0SgaDWL29Xkdp9Z31pSVp69Xm9NcqrZLZIr/61tX2iatbfDGUzdHMV8cMUqbM2JnbeGuIQUY3BSCQB3wMX24sqSCxSWKyjab9q0aWou2YYqu9sjrlsSXDy4IJyn0q8Jj5OkW0yonvE5Enu/359s9WFbRhuuwgS5tyNx4mZ/cdsKQn71IY2BGIMXbDW7OqBwbbid2sqRJnudI553lY951/KU8nU+zX85sbvYUF+Lyrsbw+nJfGtjukWIs7+TyeVCABgMBl9tmubWawRLspQYmRjJtkiMXNsvMTIxcm/ByMFggKZp7CRs+AWXnJsS2vSd9Auj6C175tNFajQ+5pwjqGZAvQrm4OP2vP9VnUbH1jG0D7Ca+NRRFRS4j3Ocmr2co7LtXYIN0ipJF8Az8GmQR2IM+7EO0SaO88PNwZc/R5tIkAxQ3IZJgWc0Gk21Z73UP9h2Lgm7hFxbZOlc1AA0gELHU904sbgEzcRzqbZSQHaJLebVAbnqFd/jnCZM1lkrjqzbLBBw9uE3GNV0Uv90OYL5a7xp5dLFlwKjyq4go/HA8mlu4fmona+N6RYJLm9xXC0vL+cF1zooMTIx0umVGJkYqXokRu75GDkuHtgLrg15aQYDgAaTU4aEso7a7/exsrIyxUcrVy5J6LhsTJfU4zgnPHbISGr8A4KR6CM4OBA5IdecgW2h8gHTr3JVudnRmIdWbjThcxBzGxcADFIuEWmi4TnXYNOxou1oNFpTCdREoltMOMFrold5a0nU2ZsTgtpB/U4BQBMqP8PAPhLt2E5sF124sP3iXCxauPKrvqf685aV2tyw7d1ccV/2j5p/sk6ukqcyBOn4muDYPhqjPDYvziI2uK/TgWVUQHc5hOXQZ2lqWx64H+ebOM5VVvUz5w+sk+Y1bqsy65yx/bmNu7ugeiXNT4mRiZFsu8TIxMjEyGm+ewtGduXdDbngUidxiqkDR5BpIHFwuepfzZG4kgOsvmHFjRt/lXd81mqMm6zRaGRfs6pAwsdnkauEOB7s5L1eb41Dq8MzuUUTO7cDwBjHgaQLtvjMwbOysjIls7O7k6vWxs2H6qJtHBCrHAFobMuoNCqYxXgMZgEoMS+8vYHH4e0nXC3lsVmfSDThI/xdebh41LeF8RgagzW715Jh2EyrbhGDaoeQnwEyeGkyD93UxurnLJ8eVzvWFnla0XOxEvaq5RXHl8+rDdmOCsIKKgo0XeOoPVxc1BYCbAv2DQfASfNRYmRipMqYGJkYmRi5SnsLRnblsQ254IrBNFEHuYnhN9twnxBajc1tXYLXANCEXgM4NbhrrzqpLgyMOqY6kfLnKiHLxAnSJRdu45xG3xjVBSLBg+dAx1InrgWyG4vfEDQr4NWHdK5CBk7oEYC130HgLS5ObpZJg4sTmfLR/iEL+xH3UTu7uVS/ZrtwsnGLovBDByj6FioGTR6Hq2hq99Cb+2nb6M8Jkbd71AApbB2kCdC9/tdVmV0OYvtpjGui1rlVu/CcOb9S/+L5UPnd4kvn1uUdBV+d85oPsB/yyxPUJ9iW6t81gE2aTYmRiZG1sRIjEyMTI/d+jNzwO1wuCbOwzhG6ErUe4+DRCdQkE+eiSuAmNIJMqzAqP8vqAkz1VvsoqKhNuBLobBE2Y6Dl42p/l/CdgzHpnKj8tYdntWrT5QMxTvCJhyDZxqxLHOd51d/BUBuoP+lWArVPJDp+1sCBLN9+Vptp4gnfcnbkz6q3JlGWtZYMuaqofdXvdJ44yTLo87g8H+rrrH/01wpk2IIXFG4xUfMBHVN92+Udbhtg5sbk8Ryv6O/G1jl0OjpZ3RjMU+3rfJbjIr6HL2iVmecm+nM8KZCyvyt1gUlSnRIjEyNVX6bEyGk78ufEyFVKjNz9MbKLNuyHj93VLVcCakap3TrvCiqXCDVp8BYCHZ95ue8u8bATuO+aWGsJ2R1XcrpzMo/vzh6afLvAg9tr23AmlsWBnOqmAKH6qR/o72MEf03SLikOBoOpBz41iTFPdyz0qr3ViI8pbw5ilTn6KwBqBZP7qT+q78WY4QNRrawBgANhXXixvsxbgUDnhsdRgGQg1facD2p+75Kzzl2tOsU8Qm+eB12EsV4qly6QnM/X4sGBnPq9ju/O1/JRkMtpPK5uu6iR062mVxefpDolRiZGJkYmRiZGTvxj6LAAACAASURBVLfdGzGyi2a/t3OdFA7DinZVSmuJuzZZ8xwH2kTDV7I6wS6o1DldAmJyAeXO621ptYH77ioGXQ7M4KFVJ5eknE4KhPEgtNMrbFcDdeYfMqgf9Pv9qS0bysfNr9qc9Y3j+kpjZy/mPxqN1uwjd1tNeLHCVao4F/943tmWOh9RMXSA6+aZfZIfPGb7uUWLs79LlvNUr50/abwrdQGIbpEJ/TV36P575wvOpmxr9cdaXCiYzmobPN02EjfnDKzBQ+OA50JBlv/GPLLfcRyo/y0srP4OiuZg1SfpmqHEyLXnEyMTIxMjEyNZjr0RIzfkDpdWCPRWHDC9R5SPRx8OgPjX6/WmHEOdVZO4O89tmO8sHuEcoV981uqk68vHtPrAcjkZ1AHVkZSibdhXq2Yqdw28ua8GvksEtcTA209YBtUj2ur2BFfhqgG246dbY9Sm3FcrcM62QbW5Zp4xLu9pZ9v2er014OwSqPJlHTXJBaixnLXFDRM/MO9AxAGAbjnSCiXLx/HDdlCfcOOwTPpZEz3bggHc2VBBdp5550SvdgmeuhDRxM92cqAQxM9Y6Ku1gbVvHlMddVGlflEDdCaXH5xdktZHiZHTfRMjEyMTIxMjuf++gJG7fIerS+HJIHJr1jk7T9I8ijtj1hRnw3YlcubBwFNLphoscdxVCphnfI5EwODkdGMeKgeP25VwOQmono4v66l81ZYKTI6X2tzNG88//+NzNTswiDn9mZ/aTNurXdSvtGKi/VW3kI0rLHzeJQk3r/y35hvaz8WjI5eoVRblFQAZ/9i2KiePMWtR6BZSNf90gFUDHY0BHpvPOV/l465SrraMdlpJrM2DyurI6cX2rsnBY+hiw+VMF+uz5E+qU2Lk9HiJkYmRiZGJkdFub8PILtqQLYWqkP6r9elKhG5ieayu75Gg+Za248+JxvFiZ1GH7poM5VHjF2M7OzAvJ5cmCx1L27EjaVAE6XYJ5VULbAeYOnYtObvEUaMuW2swsgxapYu2ushxwaR61wDbBd08CU2/6zgujvR7jSeP6yqVTs6afYNmJSPnt7rY0M9KfLwr/vhYzTdqvlyT0bXjnKI6avWyFh+qX8Q+j8G27dJnHmI7M/jNcwHlckrIpDkjaT5KjPSy6PnEyMTIxMjEyD0ZI7vG3+UthZoQOXHzRKvzuUlQUkfpmviaczkg4c9adXB9ox1PjBq3Fki1IFVQcY4667Y/O3lt0udNwKqbysA8nEO5JF7K6luIeBtEtNVXnCq/WQsH1SHkVZBUO4WurtKngKG2AKZ/54b7OVtpIor+vNjRqq3bFqGyBF+udKvMmrjd367E6vxN5XCxr21mUU0/Phef2TYxh9qGZec5ctVWHUMXDiyLq9a7mOL+XfLplhHmXVvE6JzWtpKEfZh0+wSPxcS8wsbrmc+kaUqMTIzkPomRiZHaZhYlRu4dGLlhv8MVpIYJ4dTZ1ejuWNf+1hiLz/HVrwKBk4cNxXw1+HmsCGC+VVwDPealfFgWPq5A7ECK7aMyuiBwlRtg9Xde3PwosOh8sF2dvTUAagHjvncBvyMHFNpebaCBym+rYXlcHw1St0ByyYaTks6v2lATvX7W/cucuHjOWDeNP63kaF+XrFQX10d9XvdNO52YrwIU83b+VNODY9sBdfiKLmq4vyOdJwdAuiddiceobUNhctu9NF7Ud2fJMI9+zjeTdo0SIxMjEyMTIxMj9z2M3LBnuGpO4hIyfw8ha/s4a8lSHVMrIAwYnGQ0GXQ5N1/tu7+c0N1kufYuaJ1dajqHbtxfQVTPO4BXPdlm7hyPVdPT/avNv/MbDZgYL/b5OhtxEEd7TSLMxwWcJne1q9NfbawyOb21v8oSMcBz6SonXElmv3Z2rgFzkNv+UJu/2uKKAWTWljPn72wz1p37xHf3Kt2uGFJbaFsH7DVZ1X9qMRhtmQ9XbLWvyqW+yzxUd9WXAdbluZpvqI0URGblkKQ6JUYmRjo5EyMTIx0lRu69GLnLF1zs9KGEUpeDaX9N4LVAVcPHZ3YOThRaUajxUoOpfI63S0RBbpJdwmfw5OqTk4fHd7dEFVRUdpatlvR17K5bxZGonRwu2FUe1b+mh84TyxT2YB6sj9OZ9XF9NJi0Xw3oeUzWy43BSahmY5cQ+QFo5c1VWZY/XgWs1TE3htveUrObJiDexsCxx+BYS0y1GKzZTXOE2pGJ3zrlxorvzn46voKpfnbtVe6aXfkHXpmHyllbAAD+YfUasNbyVq/Xm3pFrlsEJM2mxMjEyMTIxMjEyMTIDdlSGJPHvxquyUMTFjueJq2a0Jy81KAxJn9n3gpKLomynBwgOn58DufQsVRmBaDBYDCVsLlNHFcZ2Unir0s+MQfqjM4WbOua/KqLk8VRyK1bF1ziUsfWY9ovZHWv1+XzvKWF7cny8XdOgrowUXBg2d0taZ07nm+2P7etycnnXAVHZdSqjc6305H1c9uInI/qOZeYlIf6h1twxNw6UIj2uiBhmWvE5zXHuHnqqrS6LQ1s71joOUBwPsNycwyzfFoh5HO1BUFXtZ3zaRBvbeF8HvabBShJnhIjEyOVEiMTI7VfYuSej5Fd9t3lCy4X8LMcyPHoOuYCX53QJX/mozyYtNLEfTUZ1sbuCjy1lQZPjO9+SLBmH+anwdwFDNyeQdm1q/XrIg4U1i1ItxtokNRARJMAJ34nsy484jwnZbfwUNDS5Fsbp0sGHqPX601VkWr6qd8y9fv96g9Xqm/pudqxWsw6vRRonJ34t17cNiXmq7Kp7sGLfaXfX5u6mD/7tfqAzpHaTLfmqEw6RzxWV/7TxFyrqvMiyFVb1XbM3wFgV97VsRUg1TdnxX/SWkqMTIxkSoxMjEyM3DcxckNeC6/BH381ObBQ7ha7JpowqBo+vuukqcG0nQsmloeTcYwzaxJYT5dYdFLDMTQ4SilTvxTOOrBNtSKgCZVt5/46p2R5+W/IpLaq8WLwncUXmN5mwXPMtnN24Xb8MGfo2TTNGmDWsbVSq4GtQK/+FroOBoMpvmp/F3z9fn9qTLcYUaCpJTu2JduglsR0HmoJgpOY8nSAH3w1iev4zhejHSd1nmMFpfAHXQw5n2adQyeWPbYDuMUTy8T683YKAJNq/Gg0mpxTWzjZIp5rWygUpDUXan5V0GDZuU8NxFweYd5dQJLUTYmRiZGs3zx8gcTIxMjEyL0JI8uugmiv12t4QlQgnhQXJEx6i1QDTJMoOyEHb+1WZrTl8zqBMRaT3ooNJ2A5lVdtbHVcreToJNb4chJmEGTH07lwFas4p/Zlp6tVBPStRaor81bgjPlj2+lccOJnngoqPJ+coNgGzg9VXpc4NOGpn7t2tYSqyTj4RRvXNigS1Wg0mqpmueSrNuLEy+TAYZYPs61qc8v6sd20bcitsvEx9hWuCKoOLl7Yb1hmtyhxCyK3GGAd2D9ZNpezamDCNnK+7/Tj43zejc/5JeSM9iqX09f5w2Aw+GrTNLdeo1CSpcTIxMjEyMTIxMh9AyPHF5fWQTbkGS51Sk3Ork2c1+B2SUWJnYO/axvHU40zCwTVKR0PdV4GOQ4GBjIHIkG1ff4hJ1fUnN4acCEXH3cJvQbebg5qcx3fo8LCQRZy8K1+B2JunJot1H6cKKKyxoDLn5mPju1sy21qdufvYQd+uxgnIq0Yd40RbbidVj8VFJx+akPmpbbl2NBFyKy5i3MK6NyXk3GMr6/yjfYKBKoDV8UBTNlcZWMd2TY1fXSBUMsDHPfK08Vj2EcXxmy/Lh1YDwVDtrECO/PSvjWAUwBNWh8lRiZGJkYmRiZG7v0Y2UUbtqUwEkR8BzCV9EIprlS4q1blq/3js3sFKgenJggdwwUV31aM7zx+yM0UCUtvw84DdA5I3YS5ylTYlm/PMn8edzgcTtkrZGXduLqi8nA1i2XSOWUwVX8IH+laaNQqruEr6jPBk9uw3jqfqpcmA7ZH8FEfHQwGU7ZkW0cC04SviwXV3y1kwn5sU42r0DH2amtMaBxofIas4cNOJ7ZJ/NMtAerj3J8B3MWeA0gFgpWVlTW+Ez6t8xh9nS/VFg1hU07EbE+NDbYD+yDPSZwLH1TQ0TsIKh/7qOY1Pc56MKCqri7mamN2xUXS+ikxMjGS5UyMTIxMjNw7MbKLNmRLYbyikRO9A2g3FhtBnVOBgHk4gBrLM2lXc6h55FAHjYDV5OCCtEtW1bU22Swr93UVuHDoSCwrKysT/R2w1pKAS+psB60UxbGuLSDKWxMJb7lgO3EC0gqjftcx2MbzzoWzAScTlpn1dwuAqNjp2BEn4U8MsOxHkYgGg8EUIAafCPhox/3DZtGvy9c5cbgFk0u+/F0Bh/vEZ9VNdVG+ulhwMnP86DgKYrWKk8adflcfjD4s18LCwmRver/ft3lA+WjucAuWWt7RuXKJX32bbRrH+O5BzAlvo2Df4n7BK7cUro8SIxMjEyMTIxMj9w2MXF5eRlPZUrhhF1zqoCwEtbUJLL6H4dm46iA1x6k5M4/Nx5zzOCfVtvydJ4snRNtxwld7KHXNhwKLS7DqQF1g5WTlzzxXuuVBkzG3K6WseVDWta3J6wDbzQvz5cDWLRFsO5Wd/3JFhfsxv5jDOKYAwIkggJJ/iJBv56tt3JwFxd505/M8V+oHNeKEqePzcQYBBwgxXti/y4/cYkb1ZRu4qh/LpmOonHye53g0Gq1J/uyHXTHj+PG4zrcc6MZ46gdKmus0pzFffr6nKy/q9ozwLae3y9l5wbU+SoxMjEyMTIxMjNw3MPIaf4YLWHvFHwIGqWOxs9aSIvd1V+LskG4c/qyVIja2c2x+6JJlY5k1ofK4nGzYJvr6zqiy8C1iPh/f3a1ulp0/c1J01RB1MpeU2BbszLU3G3H/0JWTTeigQacJSm/FM0+2SfSJc2w/XaxoP7Y9B7T6R9dtZ9ZBqzPsJyyrsw8fq1V5wu5xPF55OxgMsLi4OBkrdNC5DZnU53lrgas8udhg3eN8yM0xH7K438sIcvbWWGJyeYGJwV3lV146NzUZdU6Cr+aiWDyEbTSGnaya+FlnloftrbHOuuqbymqLLwXR+M4LJTfXSbtGiZGJkSxfYmRiZGLkvoWRG7qlUI3ohHAJjc9pII3HmDKmGqyWdBxIcJXNJdhagDMp4OkYfKzGX6+81WF7vdWtDwy4ys/J7ICFv6t9FdBrC4KYU7cn3o0f5PTTPqGz9nHtGAziXBdosG7xl/vyQ8uaiDXomU/YRMn5MNtPK4TRjoHV+a5bGHA8MEDxYqKWEONYVLLifGzT0HFVXkc6Lywj+yzbqpZM1Z48voKEJkaeY40f1ksXdrXkr8d1fJ1TtVnNnm6Ona5BvACo5RuXm5iiv4sv53fBM/qsrKzkHa51UGJkYqTqnBiZGMn2SozcezBynI+uuTtcIQAbRIOQP7uErcJHcACYuvXOk6YA4+Tidjyma+NAyiUpDs64Tcw6uAnV2818jNuzbfTNPV168diaCDkAOOF0Oa0bp1bNUlI78jF24tDfOb8LXh3DARrvDS+lTPaBK/F8urf+6G+rxGf1NZccVW+eb5cIg9SHnI5uPLWpLhxUfpaTbcFzxPy7kr/yDru5Khrz08Tv7MsA73xF24d/xzjcXtu4CrIubIKH2rrLN3mx6nSPan2Mr7kxKrNMbsHF4MkLia654gp62JRjRXVnYh5J66fEyMRIpsTIxMjESK/7no6RXbQh6KkO3hUwPEGclOOcJlZ9uJSdgPvEuPy9No5z7IlBer2p6lQNXMIpOICUVC5NyAEWmiyir3strsrsKis8BuuutuNgcM6i2wiYRqPpNyIxP5VDZQw5IthHo9Gat0RpBYjlDN4azMFHx3bgyd91UaOk/hltNek6cFf9NQEwwHACY90YZKJ/tNEfGIzxXMWXZXG2Y99yfSM2eG4CeF1FSivUNfszhV03bdo01Z//su9p8hyNRpPnFBYWFqa2j8TfeHMTLzTcYqVmK80LHAexRz/04D6j0WhNNV4fiOdFrc5pjOdiwlUd2beYR4xXe4ZE7R1/FeSS5qfEyMRI5qdyJEYmRiZG7v0YucsXXGz0+O4SMBuck5P+MCAblMHAJXIO6DinwcWTwxR8XPVMZdFXcKr+2l8BgPVzSZ2rFDqJnGTYwZqmmQS3JuuYdPf7Fvy3FkhKTp+QG1ibsDkBcgLiKpbOm0tUCozORjpXHFQ1fZUf68OVO5dMuDoWxJWY4OPsBUw/HKx+pQsFnWsd1+mg5/SzyhZysA31M+upNuFnLpjivFbmawlZ5y4AoUZd+nASDz4KOmw7l0xZdqV+v2+B3+mviV/9W9vEcQUZB4q1xSHHifbROdbxlZeCYl50rZ8SIxMjEyMTIxMjp2lvxciuOfmp7A9hx+Gk6AyqgRnntKLmkq47zp81AbjbpMpTdQiZo39tHHWc+OtuYfNixk202oSrF24BFLdBXUVF+TFwO0eJc1xxY7ldIAXAcXVSQZYrd84/anKr7EqarLSCpLpGm7A/j8tj1vgzEMbcqC7Mi+VwOjr9uW8cc77J86K8nS2dDVmGLgDT3/xgvsrLxQH/dYs/F188B+4CQMd21b0asKhd1H9jXB6LE676svoBLzZ1PKdXV0y48yGTgkvN9zkHsy66uGaqgWvSrlNiZGJkYmRiZGLk3o2Ru4ygbEQWQB1Ib90F8ZtlmCf3Z+dS0Ij28dc5TIxfc4wYJ/hzVUq/s0ysF/NUh+KJYpn5uFapHD8nt/bhNsGXZdSxtT/ziWoWv8qUE4zax4GpJlR1bpWXnZxlrQWJJjytNLhkpHrGmGEvlh+YrjZzRZn7awWVk6j6SE0m1i9soX4b/GJuWH71fzf3wZ+J2+hWCD0f/dkH2C7OPx0oOGBj3krcX4HGxZ8jto9WFfm4VpbZLjxu+IsusBwQuOTvdFRg4AWYzm/MMX92YKZVV/VHt+Cq/UtaPyVGJkYmRiZGJkYmRs58aUYp5XYAzm2a5kc1AziHn5c0AcVnBSedHA1KB0hGlzXfnRO7MdhpuqoCTrZoo5Pm9OdEwu3UqRhYmRcfc0Gm42o7Pu4WAMxDx2Kn0yTPCZtlYj3YTnGOk3a0Y978al2WV22gNg5+7gcotZ0DCrWf80e1DctdSwwKEGwrJjf/KqObW/Uh1y/8tPbjnG5M1ZVto7GgizPWMfSaBbou/pS/ysnJVfVh/3S6sWw8PvdRXjpGTQf18zjH/uB4ufhz86NzqHp18WZeo9FoamGVNBsfgcRI1z4xMjEyMTIxUseo6bAnYaTjEzTzgqtpmi93na8FoUumKmhsgej6YTOXgHQS1JmcITlIWR6XSFRu5tk0zeRBQpaLqy0qg3NClpmTjCYk7aN/HTio/OyQXJ1SoHTOEzwdaDl78ZywDDyuHnf21kBingq8zt5ObpWX7aCVLh1Dk7Kzd5evuTmsJS3m64BH54n9r+Z3zDNijvVRPmxzt0BSfXTOdVyWryv58jhcHebq/bzx4EDN+W0t5lmPmsyRx3hrRchcA8Oab7l2KovK0BWLLP+shY72cf7lKsH7Os3CRyAxkmVMjEyMTIycPu/kS4zcczGy6znnDXktfJeTqZAqsBrE8XYJk53KOTpPhBtnnoVDzRk4YbCMTvda0NWcM34crmabOOacVZ0P8G+hUt1VlwgQriqEzG5bR80GClyalKI9Jzbn3GxLPa9BoUCic8DzEH+1j86fAl700zE0SDlZ1HRytuz6HjbVW/o8vsrmFssukTu/coDn+Dh9HJDpefYZ1VvPKZ9aLDswdO1YDp1H5u18zslak1ntoxVprSaq3HrnwIF7bQ74rpQudrSf87kuOyTNT4mRiZGJkYmRs9q584mRexZGdtGGvhZeE1YXQKjAWjnRtpqoag6vCYGTaZAGlkvCLJOe5wd3udqhcjr5XdJl/v1+v8pLic+rHF220eMhhwKcJigGFrVR8NM3YLFvOH1VlhjHAQHLWktAPGaXv7Ac7la1JuiQrZaQ4h/PgyZq5u100jZur7TrU9ONbeqSMsvHScoleSd7LU5dRZV1Ut76Wfdcu8qyyjWL3CLO+Skf033cNb/lWFEbuXG6bMZzyjkV8A+b61iOd+gSPOaxGcds0q5TYmRiZGJkYmS0SYzcNzFyQ+5wsQBA/TYtU1xV8i24mBB2IJ4MvQ2tiS/IJUxOGnyeHYGPseyzthi4LRtM7BDuNwX0djwHDycTNwa3c+PrK3F5jpRHzeldgCh1JSvVT3lxoNf04+/xt7Z4cXZTGZ0P6XcFTT2m9qgBdc2HWD7dTqLJ1MldG4PPc+zUANnN+axxuxYumrhcO543TfLaTvlzLqjJUtuGU5O5K48wxdYIBx6qi/5YqI6p/hXE8VKb79BRQaXmX+6c5kPtozmoFvtJ81NiZGIkf0+MxJr+iZGJkXsyRnbRhpQwXZDVqgH8j5WK/afBQ9sw7zC+LgTUWID/DROWmSsFzkGUGERUJrVFHFf5NCHqlgTV38nFgVhKmar6OZk5WTvn4d8wCJu4X/N24Mv2dWCgx1UPrSjUqhQx7zWg4/3MjpRPLGZqYBW2YNn0mB4P+VxFUD87ANS55x/WZPBlPi6h8Hguqfd6PSwuLq7pUwML9V8HojyWW6wov7AX/3Pjcfvgp/Gh/9TfHFixbDW/Zv0Y+OMf5y0l9WH+x/x0IcA6qc0cf/5ey1uzQKdGtfhIWh8lRiZGJkYmRvJYiZH7HkZu2B2upvFvbFFHmlWNcnxrt891Enlyglyb6M/naxOlRo/2/EN26gRxjHV0jqfOxADMtmK+NaBzFUC1k5NNdQ9b86/Rx3e1jfuuIN0VaCFbtOW+TGofDQrWWZOeBo8Ce/TreiYg7Ks+oYDOc6B6K5CzP7AvBh+ugPX7/alkWvt9DwVWtrVW1GIcBmBecDBPngf2G10Y6ZwoIOs8qTy8mHLgGj5YG8v5s7uTwPblvlwNY7BgUrnZrrxQ4Vd5O7mYHDi6NvzXASafB1YX0mo39a8aCKtdk3aNEiPX8k+MTIzU/omRiZFKewtG7vIFFzuMJlCgnqSdQ8RxnihNVO6zTrAmBE0gtQnhxM7JjtvqXwU7blsbVymSmcrBOrGzcRJmfbUvf9ekofqzTpysXDAyqZ0B/7srnCSUlyaS+Nd1u53twAlXk7XTk/9G0Lvk6xYazrYsK3+u8dAkq+DL+gWP+OsCvstGKpPal/+6WFD/dWDMurAdGaCcr+lYah/1SfYrB/iqv+PDNlBf0W1brtqtyVp9J2xQqzTWQIGBaDAYWJ1cDuEYCjvHFimOJ13E6mdX3eQ+XD1OWh8lRiZGJkYmRiZGrtK+ipEbcocrhFfBNckCawFBr6LDuAxOwGoFx/Fjg2i/kIP5ljL91h/+y3x1LJZVq3FafXPy8n5V/ht8OfkosKmucVwriOxUmiC0cqqAGBRbL+ZJYhoktfOsC4O1zpOrWEZbfitU2Dq2ecRx7s/2ZjkcqLE86s9qU50/bsf/VG9NmKFH6KCJJ+Yg/pbSbotZWVmZ0iU+1xKGA/AaqHMflVkTMs+jAk2Au1b02dZMqr+LPZfUdV6ZuCKrfbR/jFGbq+BX+90V9kmtjKne6ke1uVIQDBmCt7YJOdgebp7Zv5mH6qQLhaSrT4mRiZGJkYmRiZH7Nkbu8gVXzQFdRSvadB1jXsqTJzKCJ9r1er2J8+jVq/JyE8HB7RIwy9XlwBqY6iwsB9Ambv59ghiTv7NjuPFdYDmZOZEDmNoSoUDIfDRp8NhsY5bNLQTiuC48XICxPYP/YDCYJHtgOqC4UqQVSraDk43tzsDE/PnBapcc1c+YGBC4Ssu31JWfblcJGUJOrZIp6XxosnNgqMCliV8TnUvuPA8LCwtTv7/BvDnmNIFz/EZllm/56xw40GMZdfGiSbZp2t8N4goejxH21W0qPAdc6dPxXRyEvXWe1X9ZRu7LeUP1Zv6O3B0J1rcrH9d4JtUpMTIxMjEyMTIxct/ASHcuaMN+h0uF0OqYkiYgrTyo0JFE9BamS+aRdDRo+K+ThRNbTU91UK4YaeLiK2S+lRm3Q10S4CDR/uFoLC+AqQTFgRP6cDVOQcEFM9tWFwddCwB3jpOggjiPEeTAif8BwGAwmNw+5+BnwNKFggMVVy11Cw7WT33P2YH3BWslkOXhyijPK7C6yNAkr9WZGIdl1IWW6qFJRv2d7chyM9C6RBmyaDy5RRF/1+TLyY7106Sn7VTXWjuXC+ZZiKn9HNjWgKQmw+Li4hQohE9z5VEXlwrMTi7Wy/m/Ayyt6LkFQNLVo8TIxEjWPzEyMTIxct/DyA15hkuVAlaNUhM8JjkSHTtr9InjoRBXVzjBaWJlJ9Vx3XeWXas0IUcAFDuXBhDLrzKwPgwiXBFQ59SqmTpi7TYm6x0201/55v5hP54L57zOmXhOXHC6/hzgvKWB51jnJuRh4NQFiI7BvsJzwrK7cWp8WB+XUPl7gB3z0dcPswwaQzFGVMFYLm2r/WrJmkEneLL/ManPxnenA/fhv9GO/ZRlY99RXVgPJZc0XcLksUIOrQLGeZZd50Rzk7MRV3x5L73mIc2Hmqg5rwFrt0e4hM++GFVItpXOrwN0Z/PQTRdtSeujxMjEyMTIxEhnu8TIfQsjS23C5qVSSrNp06aJMDXHZCVc0uG+XKXQYFWQ0TFEtqlJqSU8d4yrbTwGy+baq/yaNGs6OLBVu8XEs7MqkMbEayLRJKSg5ZxabeTsxM7P8mjiZh35uCZLbut0cDJxsuDFiPNHPaagwwsVToZuEaHzwzLw/OieZgYKBXaufim5KqCSiz/1R01uDviZn7Njlw20L9uSx3Fzw4sGnl+XFHV81VkTM4MXV8hqcmm86HguRpTcokZtqH3DZrV82bV403iPY6xXV97rApk4try8/NWmaW69RtkkS4mRiZGJkYmRiZH7BkYOBgOMRqO1E4EN3lLIW4RUxAAAIABJREFUgumVr7YH/N5vTd5K7ko/PjvHYmd0fTQhM+ltSpXD3cZUcOS3tLAsmgTZceIcH9dEpI7BQOO+s41cotagczaK8wqoLtCYasmR+7NMzAvAVOD3er0pWfUtOLEfWm2jgcM25nHUJjUgi89avYwHdhVUFVwZnFhH1b2WfBjgYp5VT/UjBS8FYx2DgTkAwSXFaMv9FQh4jmbZlfs7n1Q+uojhcywnb6OqvfGsBtScxFUWBR+Nmdoc8vg6FucXBxA1foPBYFKF5wWLi0nOi656XPPDpPVTYmRiJP9NjEyMTIzc9zByw16aEcbj28TsYJpkeQL0tr1WP1wimEXqIK4ff9fkyO2dPOosmgg0CMIWmsh5MkNWvYXN7djmWilaWVlZ89aZCGSthjgbcMLRMaNvjOeqXGprTvj6Gk+1T8ilQBCy8xj6Sl6WjSuYWnULfpG4nY7aVkE9/nEyju8AsHPnTvT7/ak2Or81Chup3/Z67WtQXXDrnDKoajtNLFztjGMxV5zkIyZZNp63SNIxf1oVU0BV/+dKVG3xx3OjiwDWM4iTcfDRvKTH1ZYOZFlvjSsFNyb2H+XB7RVkNV5r/HSxGrKqLmzDeXxS83XS+ikxMjEyMTIxMjEyMXKXtxT2er1mcXFxTWLmybEDGyeJ4+6KU41bmwA+7l7t6ORwSbZmvPVMACddJ6s6lQINj8nt2SHYXjwGHw8eGoAA1iRWZwc9x7LogkFl4O+Ot9NbAVkBShOKVh4iiCLJOQqeUe1jcOR5rPVT3bm6xccYTGrVOdVP7ebePMbycDJRezPpokhtzPoFGNfix43J/skAwXMSfNQGLhZYLtaXx3A2YXnY7+K4W6w5G9V8m+dN9eU5VvvrMV4k1XKFLkrYT/i7zpHK6xZ9NftxG1epXFlZyS2F66DEyLWUGJkYmRiZGLk3YuQ4VmxQbcgFFz+Ey/zUeVyy0fNBcczd9uQ2PBYbvSshcR9tx3JrOweYykcdU3lo0OqY0Y77cpCqrDXeXC1TmdgWNcDmsV0Ci3ZqKweIrJMmfU3k/LpT1l1lV+IEEkEdsnBii/OsV63i5RKR+m3IxXbhLRv6MC8nYSb2MX1jlto62oTt+DMnZtZX9VC71eKH9dJEGGOq/A4cow8DkEuALNesNs4uLsZ5HnmRwfq69s7nY5ywe1CAr8rkgJX5cVJ3emqyd3lBfZu/6/wEL+3LttO8pHl6MBjkBdc6KDEyMZIpMTIxMjFy78XI8XZZe8E13691zaAYTK+gOYHUqiKsPFeItDpkhaeKAF1ZVq9ENdG5IHEOzc6g/XjSmVziY9DlcVU2Tna9Xm9KT2dHtuF4QTTpF8fYLgqMLK8DXz2m86m68Hw4PXnM0JVfE8u6OJ+oOTsf099tUUAL+YKvBr3aQv3UzRsfC53U1908sl6RANgOLCO35+Sg7Wo+7uZe41d9Xqu6Yd/wKQdA6s8xt0zqt/qd51wr1yo786zlDLeg4STK+qr/qA3ZrqyXVjT1u8oTOuhCgMdyrzTuAk3mqWPVbKjgxed5oZJ09SgxMjEyMTIxMjFy38bIDXktvDqOJls3+Q4suF8Ylp3MOQ6Pw8fdlXQQV4ZcUnUVQ666qCNpIlGKcRhgOHnHmJx8WQb3ukkHyFox6poblTdk1LZaWeBjHOw8Z66io/OitlG7uYqlVjBYFwc0bKdSypRd1Nd0DiNh8gJAgVx9NGTjxM92Y3AJfrUgZ75cbQnieWEA0wTC+/01HvWYi1cni7N7jBeLGH5AWW2hY7M9de70eQVXTVYb8sO+LKf6Zdh1nlzDFOdCNp5v5/N8nOOFq5luQaLj1QCJ5Ve5uZLcRU5/tg2PlbQ+SoxMjNQ5S4xMjEyM3DsxspPHzFHmIDWGJiD+B6xe/bureRXaObdLBFoNqd2iVEDQ8Zi/OrCbVHdMr9hdkPBfrtSFkzuwqAUc9w0do/rA8nHyqgFfVGQiicYx/hz8oi3b1FVzNIiVT5xzIK724Gom8+AKg/6eipsjZ0eVXRcBCjoKBsozfNwl8uDRJYuzG59Tn1D5NLEoKLoql1bbne/xnNV0Zxsy71q7kM0tUlgu3qLBY3A/9lWtxOn4IZPKpn6j/bXiqrbjOWf5dN6UXw0k1d5qO9WtFoPO12p+5GzWBbJJdUqMTIxMjEyMVEqM3Pswsos25C2Fqgw7tFOWnaZWveNAZR7MSwFM5XGVFP7MVTxNFjxGLelqRYV5ODlnBWHTNGt+lNJVDNW+tWBXmZhqSVDtGfbRJKv8HQ+WtwZmmvQ0iLVKxTzDPmoPrWR0+abaJPpzZYd5crV2Hr3Zt7hCW7OfiwFeHLlX+tbmUO3l9NO+al8FU7Wr9gEw2a7jwNMtFlw8B7kKu+YRPu/sG31Y5y67O/nURzXpc7J28a/y1hZ2tZzZZUOWQ/txjuN2LkbU5upHDsySZlNi5LQtdiuM3LwFozs/ZPV7KRg1IwAFBUADoKD9X9MApYTewEqvhxI2KgXNuMOoGWGhN/4x1nGnBs2Y2Srf+P8IYxlLO2qDBqX0gKbBcDx2KT0M0dBcNhjxHEzaFYzGsqMUjEbD9vtYr16vh7EXTLQbtUqhCduN+67OR9uuHWuEXm9hYode6WHUtCOWHvlBM57DgqljMexwPL1jSdH0CkajBs3CAgbDAZpm+nkxlqMAWPjmZ4FLL0iMRGLk7oaRXTi5Ib/DFQNpMqsN7pyc2/NtSJ005ekcS0HCkTpIl8zOmVRnBywKUG4snXQdLxbprk8tqHQ8/l5LIC7IuS9/V5BW23TJoQHiAoPnWROqs7kDdubl7OR8SS+Oavp36aXn1VfcbWkX0CGPVhRrCxzlMSuJqy1q9nPJvDbfXfEW55Vc0lUZHU8XD04m5tGlh4sL9QX2N54HVxlj33U6zSIFFaerxpnmHOWheZf51MCrZq+k9VNi5O6Hkc3+B2Pw7JMnfGdvLJqP+L7IRvHcF2hpznb7vey3sDC+4EqMTIzcUzByQy64OPGFcHzOgQCw9lWl0Y75uCTOARN83a1fHocX0M7InNR5q0Cc14sMt+h3xtardLaJ2in4BH++/VqbZOYdb4RxCeXQ/nXxywf/plncr1a3VmUJvXU+J/W+sR1Xq35tO0yOTapRExvHxVpbqWqPh31i7tryl/Jctdlo0m/V9g3YNGVcQWu4nDbhjSmZV+dr9RwQcx1ztFY/ljWqlmsT9aod1vo9H1u1actj1e6lAKdd+R7sGF6xZmGlYOCO87FVe63+8F8twTDPWoJi/lFR5PitJXhNfExdY3LFkm3hdHekSZjl0HZd9mT9ZiXoLnLgqfLMqlx2ya/nVQ/NodxG5dE7KUnrp8TI3RQjkUWEPZF647lk30yMTIxkvj8rjOyiDb3gUmFrTh3n9Wq+aVa3bCmgqDMrP26rdxHinMrqts1xEme5mR8nfr0TorJoYLpJCVvEazNduxqI8as2nb2j3+GbjsXdDn/6mrGTdm/62pUfwg5cMQUos3yLq9cRU/1+f/IAqv6gZXyOc3EMWJsUu8aL2FEf5j4hv9u3zv7qdHTjOxCs3Z1kmygPpVqCrQGc5ild3MY51TVyTW2hXNPD5VnXvjYPLJf6gi5gmE/Xoj2pTomRuytGAth6MfCelwPXOxF48DPWjI1PvAs482vAQ58FHH3c2vM/DbrkB8Dfvxo4/heBX3vCz0aGLrr0AuB9r2o/3/i2wL0e1X7+3AeAb36u/fyQPwCOOb7OY3kn8NbntJ+vdQzwiD8Gzvk28NG3tcdud1/gdvcDUM+FiZGJkWqHnzZGOlsFbdiWQp3YEMwpoM7F59UQ8bnmoHplGcaIyVYAckZn+bsuXtSQrjLndNRzTDp5wbPLLmzr+Kc/YMltZ8mQtPuTbpsB1sZIfOZErfPe7/enYkNjUfcza1x0gQv/PkxXHnB5QReBAWxdz/A5OVVGF5ddCbEGHjq+8ta7QGqv9eSDePMXg3PMv/Z1QBb/uE+XzjXZQo+uvknzU2LkboiRALDtCuDDbwZ+6a7+guu6JwD9RWDLgVUZr3G64tJWxjs9cPe74Lr8YuAvnghccDZw+/sB735xeyW7eQvw1ycBt7wb8D+nAef/L/Cck4FrH7uWx3AAvOqxwJc/Djz2Je0F8HAFOOGWwJlfB65zQ+D0/wSuuAw45vjJRRIwXURIjEyM3J0xckMuuFSYGohw0ox+cazmpBow3NYZFvDbMPgWq9v6oJUz5uHkCz31uSLWkz+7lzuo/g6c+DiPWbOfOhWfO2/7V/HpS96Emx98X9z2sIdV53N3pI/98BW4eOksPPS6r8GB/SMAtPq/+/uPR68s4DHXP3kGh5Y+d+nbcPZVXwIA/MZ1XoHDNl1vbhm+v/3r+NdL3jB17C5HPBHfu+rLOH/HN6eOP+rYt2Kxt5/l8+Pl83HKRS8EAJxwwB1x5yOeiK9vPQVfv+IUAMA9r/0HOHbLLcat1748A5ie5/CtxcXFqXFi3muvQOaEpVU358M18Io23DbaaIJzxOfdA6xKHC9cYVKZ2AZqO6ZawtQYD4BzC1mdH9abZVH7KPBrBTXmRCuy3I77O90dIDNwx3HNG24LWABU0vooMXL3xMhej2Loe98C/vjXgFvdA3jIM4FP/g3w2fevnr/xbYB3vhC4+PwpPfEnHwFe8MDpY8ccDzz9je3nr/4r8IHXt58f+GTg3O8A3/z8dPuT3gcccDDQNMDzH9Ae27w/8JJ/Ai67EHjj+ELwf05rZYwLr4/8FXDaqe25J/058OE3AT/6vsj3z8ALfr39fOAhwJNeA7zmCcBxNwN+79XA1z4NvP91q+0f9uz24nNe2rENOO3jwC/8CnCLu7a8vvsV4IBDgPO/CzzqpNZmp50KbNvqL7hGI+Df/hE49EjgTg8C3vIs4L8+1V5w/fBc4CeXA6d/GbjP44Bb3g09uQOVGDlNiZE/O4yszSGwQa+FV8HZcfW7S35dPGc5IfNyfGN8vrXJ/Vx7lplBJ4jbsKwKKDw+Tybz575qKz0e/bXCx8liNBphMBgYQAV+MrgEZ2z7DC5eOrNq82uC3nneY/CWcx6Mt5zzYPzVOQ+5WjzO3f4VnLHtM1gZ7Zw6fvq2T+OMbf82N5+bHnQPbB9uxRnbPoOl0bZ1yRD229Tbgrsd8VTc7Yin4qjNP4/zd3wTZ2z7DG5xyANwwc5v44xtn8Go8YvSK1d+hPec/0RsXbkQNzzgdvi3S/8K/37ZX+PS5XNwxKbjMGwG2Da4FB++8AW4bPm8qp/qD2bGZ1e5U+Bh/+U24VvMNxI283PjNk37Y6LxC/XRl2Xh2HMxxfJz3NcWdRqrtVyh8aQ61M5p3HHs8t8AYgYS5sd8XK6q2ZQXqmwb5qF5iPvwlg2XD/XHGl1e5O95obVrlBi5G2Jkj+Q+6vrAze8IvO9PgVP+ErjFXYDffh6wvKO9WPjJVuD+TwC++1/t94c8E/jPfwGeedd2u+FppwLf+RJwz0cBH38n8MantxdWr3tKe3fs4GsBb3oGsPmA9o7VaacCd394e3HynHsBSzuAP7hre5fnoc8C/v1D7YXSgYcB931cK+P1TmxlutWvAqee3N4JutGtW34vezjwy7/a8jvt1NV/z7zLqnxf+VdgaftY1i8C3/oC8Lont98PP7rlfdxN27Fe+yTg6b8CXHZR1f8AAEccAzz3XcDZ/w2843mrx+/3eODOv9nK+J0vdvOo0c3vBLzoH4Bb36O9oLv3Y4Ajrwc0zUw/TYxMjNzdMHJDfviYBWGH4TZxbmFhoXqVyxW0MErtitRdBeu4msBre8lZRj0f8unVLlfUog+Pzwmfz9V+BFKdJmzEt0v5Kl515MpEv99fA3irL6YAvrr1A/jfbZ/H3Y98Gi7Y8S18d9vnAACPuf478KELn4+fDC4BADzz5z6O15/d7pne3Nsfj73+O/G2cx+Oa2/+OdzliCfh/Rc8Bzc76F6491HPBgB8+pI34ZtXfBQA8H+v9wZcZ7+b4ORzH4kzr/oPPPWGH8ZibzNed/a98dqz7jWlw5Gbj8ejjn0rAOA7V34Kn7z4NQCAux/5VPzSIQ/Ehy58AS7aeToA4OTzHoWFsoinHf8RvOWcBwMAhs0KXnvWvbD/wqF40g3/CRcvnY33nv9kAMBND7on7nNUuy/8M5f8Jb5xxUdw+YpUKNdJZ131RVy8dDbudsRTcMMDboMHXeflWBo9F0dsOh6fuvgvOvuuNEu4cOd3cNz+t8ZRm0/EVcPL8OPl72P/hUPxzSs/hpXRTpxy0Qtx/6NegIP7R7ULhZW1CxWtIrmEy3HBPqcLpMXFxaltDu5ZCfZ/jUutLrpkpokzZHEy86JznlxQi38dS3/gkL936ccxFhWzsBef5/jUBR/Ly8CkdwyYD+vn4pzni8dxVTfVpYu0TegSD5EnrY8SI3djjOQF0gGHANe/SbtF7qJz2ue1jj4OOOTI1TY//8vA4ub2883uAJQCfPuLwM3u2B5b3ATc6FbAzqvaO2a3vDtwwZnAHR/QbrG78HvAwYcDBx3Wtj/xli2/078MNKOWF0p74TcattvoNm9p2wHtRdsv/Er7+Q77t3enPvHO9g7Qjm3AsTdq+b3hc8Bz7gO8+QvA792m5efo+F8E7vUY4J0nAV/6WDveE/8UuP392ztdS9vbMbto037A3R7W2uYbnwXe9Pvt8aNuADz9DcCVlwFveBrw3//ezcfRwYe3un32/a09zvgK8IRXrokXXZwnRk7zTIz86WHk8vJytd+GvhZeAcIl2zjPf+OzAgmAyV5N5cHj8PHoG79zEHtcI8G6LTEOADkoVO4gd/uaPztHikULX/G7W7khE1+x6/7UOBeBz3zVzqzzzQ6+NxawiFMuOgn3P+oF+P6Ob+Cc7V/GYLSES5bPxtaVC/GsEz6NPz/rbvjx8vfx7BM+g9ecdXecfN4j8cDrvBQnn/dIDJsV3PHwR+NjP3oF+r3N6JfN+Oylb8G9r/0cfPvKf8HJ5z0KTz7u/bh4+Ww0GOK9P3gyCgqed+IXMGwG+POz7or9Fw7Do459K04+75F47/lPwW0Pezg+cOEf4aYH3ROLvS045aIXYlPZH/e89jNx/o6v44Kd38ZDrvtnOKR/NBbLZjz62LfjVWfeCT308djr/zUKerh8+QK8/dyH4+D+0bjd4f8XH/3hy9DvbcamsgX/dulf4l7Xfha+c+Wn8L3tX5rY4w1n3w/bh1vxRyd+Hgtldkjc9KB7YnPvAHzkhy/CYm8Lbn7wvQG0F4NXrFyEZxx/Kjb1tszkw/Qr13o8bnXoQ/DRH74UNznonrjJQffAYm8z0Ey/4YgXMf1+fzL3OuccF+ET6pPsX7qFKfaxa3WHxwjih9JjsbiysjKJP/2BUbfY5MUUj6FjcVsAU0BTi9VYqGr+cC8fUFJQ4ZzDCz4lXhjGZwZ/AJP5i2PMz+VKl3/cnDp53WJZt6QweHO7kMftj0+anxIjd1+M3KPocx9YfbnElZcBj3858O8fXn05BQDc4KZArwfc8BfQ+SLGAw4GfvMZwD1+G3j/a9vnxF71OODF/9g+ezUPXfx94Pfv0m65vMcjV4+//3XAh94IPPV1wP4Hr1tNAO0F5Zc+2t5ZvOqK9nm7bVunsI0vFBIjEyOZfhYY2UUb8sPH+lkTYm3iQyG92lSQYCVd0ARvHjM+hwHCcRgkumTlcXgSA5xiEtVB2QlisvU4B3nIxlUBriKojC4I1XYMLqsyrLbZr3cQFsoitg+3YrG3ZfpZozH7wxavh8uXf4AGDd5yzm+iQYMrBz/CIf3rYNisYMfwShzYPwLLo+3YPtyKe137WfjR0nfxyYtfg0GzhGGzgmGzgt8//uNoMMSf/u8dsdLsxJu/9yA8/+dPa22MhQm/nwwuwcpoB64a/hj7LRyETb39sX24FUvNdhzUPxILZRMA4ND+dSbPXR22eL2JXQ7f1O4Lv2z5PFw5+BG2DS7DqT/8EyyPtmPHcCuGvWUsjbbhwIVrrXm26ndv8F40GKGH6f2+Sjc68K540Y2+hsWyBZ+77K3YPtyK5dFVk/NXrPwQIwxx6OIxiFe7z0tbFg7B5y59G7677bM4+6ov4RM/ehWedNw/YTgaYjAcTPlSfI4kzT8QyQkiti70ej0MBoM1Y7LPa+IDun/8OvqurKxM+Sdve+ALfbdQ50TqkljIpnK5GArSuOO2utiL3MNtuW/YQCv17k4A8+Z9/5rE2Q662FT+XQtwBgR3IaS+EH5Sy1kOEEOW2iIiaT5KjNy9MbJK7/0T4INvAK66sv3+3PsAbzlt9fzDj2tf9vChH64eu/xi4Mm3A254c+DlH26fw3r6G4C3Pw9AAZ74KuD//Abwqfe07Z9x5/a5pr85vW37gQuA3zgK+K1j222I7/p22+64mwGvOAV46UOBr38G+NXfBvY7oL1IeueL2jf8dVF/E/B3ZwOPPAF40m3bY//zZeA3rr3aZscYyx730vaO2PPu125PfNtX/XNXQde6LvBH7wSec2/gvz4J3OW32ovA0mufV/vTRwPLS8ArPwrc4CZtn8fcpL1YfP8F7QtJ+ovA+84GHnEC8OTbtO1e+VFgv/2BG90G+Jd3Af/0GmDUALf61Smfi21fiZGJkTpO2O6niZFdOWXD7nDxFbxOWhiKJ0ABhp0wyAmvTsLn1UDqBAoUzoDMgysmPL4zrgKf+64O4fTgW9l6OzXaRGUlEgQHXySWcLJwosXF+lQ/+ti3463n/NbYAMAfn/hFLJb9xl8LnnviFyafY7uh0r9f+g58beuH8ZvH/Cm+ccVHcOZVbZ+/OvcheMpxH8CLbvQ1vPiMX8BVw8uqcsxLr/zfO2DHcCtecuNvTY4tDbfhFf97m4mNjt1yC/zODf4WANAvi/j0pW+q8jugf/hc45511Rfw7Z98Eg855tVrzr3j3N/GxUtn4g9/7lM4YGE+fkyfueTNOKB/OG520L3wCwffH5+/7B0YYdC+nnZU1vgFgMlrbJeXl6eSmPodsOpznOgiZsPPouIW7XkBx3x4CwdvM6slPCUGLycfML19gduH/vFdtzmEXpogo1LGuUjfcuUWa24bBs+D5gcFRT3PvLvAheeQyQEjz6kuLFUHzsMOcFkOzie6sE1aPyVG7n4YiV4POObngFOvAHr9duF/6hXtBQoK8GD5KZX9Dlj9/M5vA/ttAbYcBKyMtxEdegTw3jOBsgBsGbf99ScD93ls+3lxv3bbYdCffQK4/o1bHqUAh167Hb+VFtj/oPZjf7Hd5nfKGH9Dvse+eK187/5O+/eDFwELfeCjl7e8r3ND4GNbMZM2bWnHe8l4G99+M97OuLAA3OLOwD9fOv6+2G6DBIDffzPw1Ne2nzfv38oDAH/1n2i3b4y/lwIcTfKx/RY3tzaM59g2bUHvy+2jC4xHiZGJkXH8Z4mRtQszYIO3FHKyC+VYEFbS9Y9zrLwCEPcN/uyUfFwrB6PRaHL7VoEn5OWr99hiUQMQ1YlBhInlZP23bNmCnTt3rgkGtQf/DVlZNrW/VhdK6b7Vuam3BaX08Lqz74MRBtjcOwClFLzipt/F8//nBLz0jPaNeQf2j8TvHfcPlsegWcYIA3zwwudhhFaH4Pey794KBQUjDPHKm5w16bNteClec9bdcZ3NN8ETj/t79NDDQ455NT544R8DAH796BfjFge3b2x68g3fj9edfR+8+sw7Y4QBXnLjb6GHBfzJTc7EC04/ES854xex38IheMGNv4yLdp6ON33v1ydy3+laj8f9jnoetg+24h8v+IOJfK8/+3547omfx5vPeRC2Dy7HK256BhbK9FuMmIbNAP91+T/gq5d/ACMM8YCjX4RfOuRBAIDl0Q40GGHT2HYxZ88//UQsYAGvuOl3AQCHLx6LZxx/Kt74vV/De77/BNzykAfhAUe/aDLGD3Z8C+/7wdMmMoZf8nMdTdNgcXFxKpHGbfaIBY4JnntevHGM8ZaLIAYm3grF/3gh45KVttW447EUQHSRX6vU8TldmLF94jwnXU3MHFNsbye7q5BrrOs4zmY18OW4rQESV+NqeSfaco5R2zu9NLfkxdauUWLk7omR6PWmt7z1CQM2bZ5W4ndvsfqWwocdC3ziKqAZAfcfXxjFHa6/OX2aH/N85aPaO0EA8LQ7tXeeePza9ruF/tpzKh+w+vr6uFiLPqWsb2sfX1zOot6C571pv/afUsjG1CXf4qapC9VR06CH9tmqeAFKYmRipNLuhpHFJcb1UK/XaxYXF9c4gzqCjqMKxLEwoE5WGIsNoUZxWxyUd5zT3y/QW8Msp/IAVrdf8AWNViNcNTNuXUfw6g/D6bjOls5uanvWtdfr4Qabb4PHHfO3aNCgjLcXtp8BoODN5zwQ5+/4Bv74xC/i0MXrrtqFXrYRbbmf8ptFBQXDZhnPP/1EHLhwBE660X+hoCC24DVNM+HTHmenH01GiHPT7YFSelPHam35XEiu4ynV+LJsymPUjCZyOT5rz03r+KYL740rRxdO/e4E0O2j6ncTWeRCnBcqEQ9xDph+7qFpmonfBi9evHAshAzcJvjqgomTtfbh/ffuBxSZmG+tOlXrp23dcW3P53XBCWBq+5Paura4dnnGAZkuaGdV43Qc1mXWwp75xbjD4fCrTdPcek3HJEuJkbsxRh52FHb+3Tl+4hxp9TrG0zgyi79VwUeYgqFSVvkkzUdqwz2VrrwMmx++ul0zMXLPx8jBYICmaWxAb8gdLq2WsUBd+x6jAsXt2Ai8yIxJi0qGu8rUapaOz47Atzp1suO4244Qi1Ze/GoyZz7qBMyHZQj9VFbuF/bl3xbQcxpUq3KM97vSs1wFBX//g2fgm1d8DADwrBM+veZ3qXrmWSTl4T7XaNSMcNLpN0EPfWwfbsVfnHUPPOfEf5vSu8anlN6aM659jUed9zySr182oGK/dfBZ6PXRG/XRW2gr4gsaHkeJAAAgAElEQVRjvxwOGywu9jEaNUDTYEQJpDQ9lF4PiEvASaJufWQ0atCMGvTGft9fWECvVzBsRgBWk+NCWUQzaoGtaYB+b3HMEWiaAkzGHD8TsbDQDtk0aJqCXulj1AzRlOnKX9hAP2vi5WdLgjimFIh4sajgEOPwxWHw4HY1sNHxam24LQOY5gO+cNYLXL74Zbu5cWq6cF7R8bm9q9SpXtyXgT1pfkqM3E0xcrVR+zzWRtEoYyRpBvUXAbnISYzc8zHSyTBp767e1kO9Xq+J7RE6kFbMu4y7RjCTTGvnx3KseX0lt3MVd+alV8QqL9+K1vEVBHhSuZ27vaqTVEqZupPg5InP6iwB0M5xj9vvtvid6/3dGp5JSdckXb7yA7z2vLsBmE7YHFvAajWcF3i8BcQlO/b9OMdg4eINmL4jx3cKavnKLXSDNDHzoq5WVeP2mndUHx1L82fYi8cJXVinOM/ta7y5H8sW/ZaXl/MO1zooMXL3xUgcfjSW3ncuyplfx+LTbr9GJ7fQ0gWs3o1wtuF+yi+OxSKSeehruZk357m4+NV4jZdC6AI1eEcu1O1d0Z8vYPl5Pd56yrrpRXYQj8O+EudcrnT5i2XlO6hLS0uT5/aiLb8oQ/NZLNrZFmE/jgN3R1rjINq7nKmfJz6w0MfyqduAKy7Fpoded4o3z29i5J6HkSsrKxiNRtfcHS43oa7CxUqpQ9WSWm087gNMb51wSU8rAMzHBSP3Z6dkcOGJcXI7vVhOTprsIC75Kc/g66oRi4uLWFlZmUoYQYNmCcuj7dauuxWV8Wa/qVwQ91emmvE3+l7iv9Xv003HmwnXnJmihv7XTP2dnKgpsHb84sbiI+Fj43FWv4zncA6JzRhNs2qnKTYswpROMy0yBxXsv3DoRHb1ZZcP3MKK+6wZgRaKypurZ3GeF2m8QFAZHY+a/Jq/eDE3qxqv3912T13QMWjzP2D6N1AcELm8WQMXHp9Bxr3JK2k2JUbuvhipdgv9Qv6avMPhcOo5IZZFt2MGTzcev8BA7avxGfL0+/3JojsuuFhmzRXBs5SCxcXFyfNO/X5/wot58sslOEeurKwAwOQHfFdWVjAYDLC8vDxZTMdFAPuFysdvsox2w+FwIkP01/wcP0YbPMJ2mzZtmvTheeMLRpaJ50ILESx3jMFzqfPYlVuVJrrSMdaX2yVGYs33PQEj3bwHbdgFF1dAWGFWnBOfJtda4mSFaoq4ZDyrLRM7GBs39AGmfw9AwVDH5L2pqpNuz3D2CGJZauAaW04UnI3mAIBvbfsYTrnk+VMOzzKxbKEHV1YcYDCQqtxaQePAcM8NxecAsljghazxA4ScGIPXpk2bsHnzZpTS/kbGQm9h8paiABiVm+dO3/7D4LG8vIylpaUJuMRx3tLDcsWYDGwLCwuTKpz7NxwOJ0C2vLyMlZUV7Ny5c/Jd7acVGrYJtxkMBuj3+1N+pOAe8mui4qQavsD+6RJT0zRYLFvwwuO/OcVfk5nmBfZd1kkTvRtbfVHtwzyUH78gQBd/DDh63MWP5j2l8BcFfNbFATrbl3XgMXXR62TmuYqxVO8awPBb9pLWR4mRewJGri7IOJ40xvliJOTTBZ9uHdW50/78j/NVF0bGxUm7xXx1vLj46PV62LRpUzdGLux9GMny84UerxcUI+Nij/2oaZrJ+oNfcqMx6mKC51lzKzD9qAHbNzFyz8fIWm4BNuCCqwYeYWgmrdQA03uttb2SGkwdWhd/0UblVaMqH/0BOA48DQanNxs9nFm3W7BsOvE8hgNFHieSLo+lWxDasVfH4uSswBfVnkhUS0tLUwGti4MgrWgxMLFsrJvan/tHIg9aXFycSqD8PT4zeETSjvZxXudPQVUvECPwt2zZMmUHlY9jgG+F6yJKq3y1BBd/R6MRlpaWMBwOsX37diwvL0/+BvgERaKKbSQ8L/y6V5Y5+rE9osqlccH99MJPAdXNMY/JsrGNeMGjsrGN2b8V1LhtJMaQt1YJY9m02qjtVS+OR/Z/tZlWVpUHLyr0Fb2cU2uA6mwfxFVcBUQH9HwMmN7alLQ+Sozc/TESZXostnNiZGLkhmNkvx5ziZF7L0ZuyB2uWuXTJc4QtNYuzjuniLG4nyYGngBO7OxQ8yT0OOZAiSfDOaXTNQKEZeC+PFmzEi4n3rAJO5G+0a49tnZLiDoSML2fugbuIYMDdpYlkibbPuaIq4SsE9svgi+qXwEc8TmOl7Ja7QsgCYpjzFtBneUfjVb3+OtFBPPk71wdGg6HUxVGF9g8NwrQmixDZwDYf//9J2MNh0MsLS1haWkJKysrWFpamlT6xnuIJzbnZwMCVNW3dI7YV9yibVYibQRoa37kFijsd5pX2H+Vt1tY6qJH5135cuzrIkrb8xwrKOnCQXOC5h0G7/AjHZf5ORuqHjqGA0Q3t2qHmN+maaZiK2l+SozcPTFykhka2DyfGLlWp8TIjcFIpsTIvQcju2iX0VOTrEu+DAiqpBpaqzxqCG7nJtglLpVXx3Tn1MF5DNeP5VEncoClzs5bMWoO6GSNz8oz/q5W4kp0tE7FfOM8V3HUiYHp/bBOr5gPTTIuSHWu+/0+Nm3aNKmeRVKNY7zvnAEl+DNouECu+WGASOjG1RIXyFxt4kTdBZacMLh66mwU4wwGg4ktgvcBBxwwORdbKnbu3ImdO3dix44dky0eDgyB6UVIzHU8l6BJXRd1bmHC/oOJzf3zHQ6Ywl/Yj1ycq++7XDFrIaRVWpWlloj/P3vvsiNJsp3rLXePe0Teqqq7du9N8uiQ4pxDjvQKAgFqwAGHegUO+BYEBBAgwIHAZ9ADUCOBU+FMCBAiBXV37a7q6spL3C+uQeRn8fufFlHV3VkHWUk3IJERHu52WZf/N1++zFxt14E8l/6jdWk/cxjEtT6x4nydFOlvOvnIjd1l65ioMsj1SY+1a7h+fmk58mlzpMtL/bzlyJYjPwtH2jrpliOfD0eeKo8SrswBKcWdL1fcYd04tBwDWB+0G4U/JtVHte70PhZXIPUruGj9px5LqjGq/I7JJ0fObsSeO5+TWXrXU32IaDq4KcAdk6XrJReNVNlqPX5c26dNonO9Xq+RW64LfclF55qPkYiSfc5xOF/f/ULKQQ4UVC4alVUyOUb0+lmdPxel4lpd5JmLjtZ1nUi13+/HZDKJzWbTIBYifXVdN+yTiYza03q9ToSiOqZf2n5u4tK0wQNwnloL6JMcJRUnIm0jR9I5n4Eo/VyfuOX8h+Ik6ITiBOjE4sTEuRRPhXDs+RjGfczmVD56jk/2fJzeXlt+fmk58olyJNcUzUloy5FNXbYc+fk4UuXQcuSXz5GnyqPuUpgDqIjIGp8Kk0dxHgXQ81xppyYCDhpacoaswtPioKtOf6wPx5R2iij8dzXSY1EIigJ0rt4D2R0AJAeKWtxJOOb9VRB14jxGVj5pIPIGOXS73UQomm8OuQD2usBX/0Mc/Gm6DH1RYtF+93q9xnH0rX3NLYrMAb/agJITv/nvHtH0SCEpHDnAVR1CbshyMBjEcDiM+XyeInqr1aqxGYnK8GPkmSs52y6PyEjH7HXmQNDtSmWXq5//x6J+fq7/7rrxcsy2fbKau8bHlptU5tpm0pKrE33rZJLobo4QaEeL+3COGE/JpC0fLy1HPlGOTP/z70jL9cPbbzmy5chjdqK20Dzw8LeWI58/Rz7aDZcDjxZVMN+1o16OGZMe0wG6cNRg9frcoz8nOCe+HDE4OPrOTD5OJ5VjY/7YecfAfLezd4uY3Bt1xHFi0/P10a2OR2Wv8tV2HYw5hz7qOZCEpkHoZ6J2mhKhkxR1Av/OMZcXn91u1JmJGiq4RzwkC60bmSiwIAOiYQr6p+Tm7areSdGApPQxvV5blmWDjCGV1WoV8/k8bfHrkxZ/2arK16O12q8GeJXHiciBi3IsHYXPepyJgutCz3V/PkU0uZLrp9qR9vEU2Kp95IhC/6sN5rBAdavkqv6rvqvlY7L1/nkf9H9bfl5pOfJpcmSuHLPxliNbjqT8Wo7MBQlajnz+HPkoN1wOYjnQ9Q6p4ZwiilPFDZo2vS3tnwosd9fqY8iNw4+rcbqh+mfPDXVQ0j4fcyQHsYimI7oxNGRZR6MNb1cjXnzXOnJyoSiIHCNejbxVVRX9fj8RBlE7SAQg1Oicg7mO0fvlDq/99MiX1wFgq1y0HNOJ1qmy02t8kqM6izjsduP648WMTl51fcipVzlznP+9Xi8Gg0GsVqsYjUaxXC5jNpvFer1uRPQ8Su3jpF0H9LzPPtRPjuyPEafrSM91u8yBYU5XuQ0MTmGOT1xzuKW6PoaFei4y1vPdZ/iskwwvuTSp3KTSo3C0lyNh92mV76eQcFselpYjnyZHuqz8e8uRzfNbjnx8jnQecNm1HPllceSp8mg3XA6A6vR+A6DXeXEDpTjQupJcWKrY3HHtzzHwVVDKGam2z/+cQ+h1Tmp6nrfjgORjz6WfuLz5firtAjnoY9acc6rj62+5R/0+XsAHslDC0FQJXwSseteFt0QtFfBVrm4jXJMD+BxQ+DiV0BhfLl0B4I6IRqpGzsaOAayPQ9vTseRSYPQ6rnXg7fV6SZ7dbje9z4TIntoQxOb1RjR3njo2Fh0V/VA7OvY4ni2FtU2uU7JzQHY5q/05JrhN5HTiPu/Y5P1z28udq4SvW2nruTmZMHaVpY9RiVbbVBtSLMi14b9xzTGybcvHS8uRT5cjIyLqeMh73seWI1uOfCyOzMmF81uO/LI58lR59DVcqvQccHqHVBmqADUcV6rfQasAdXtPPy9HOO4g7pR6Hcanu+v4mPS4//dxn1JizsCdaI7JnH52u92DQRSkDTT75X30yA2pGK4L7Zv3j+uRXa/XS3nndV1Hr9dLx5RUNFrnoMNxBzkf9zEdYlPUredwXMGN1A61k5z+KDmHo29OLhS1Uy3Imd+YYCkQaF+5xo85yHrEXPP/Na99vV6nrXNVXnqtTxjclhmlTmRypJoDWtehyzbnT1pHTgdat8s7d33Od3MY4d/dbvV3x6pj23fnIoTHMMDrPjZOT6PhGNfmsEjH0pZfV1qOfJoc2YGr5Em882LLkS1Hfg6O9LG0HPn8OfLRdinUu8NTIK4ddSXUdTOdQB2c87RoG2qMgL2epwCHM+SM0vuo/+mTlqMTzowRHHsMrd+VDDA8leMx8sj1tQH45BJGUx+qt1xkwoFQ+6Ggoo6k57ErULfbTd+J0mkeuqZQ5ACc3zRipOeqHFVHOX155IdjvnhYx6fpCNi424Be5/3wF206Qao+1DacRHKRUb4rWNGW60jBTMle5dDpdB68xDSXquEyzU0K3Z70v/uoFnz4WMoQRUn0GAG4DTg+OQmdIiva9PqdHPw61YPLxD+fkqPK2sepdvepOsnVm9PLMZm05dNKy5FPmyMpehPRcmTLkf89ONI5QP+3HPm8OPLkDVdRFH8eEf9PXde/P3VeDgi9c24s0saDY25IWr9fq8LiGgAQ8HbA83o45xTROGnk6srdtbsTqCHkDF3r1Lts748/stdI1m63i36/n2SxdziurI8SgKd6AHZsg4uzKMDm9FcURYrO8R/QIlJENI86PErkRKJywT5YHOwyhWiQSQ7QtD2OscMTstGJUVEUDxZdc463rfZEhE51cQrAqE+BCTsG0PWY2izAeszfcv6j+tJ+R0R6OaXv1pTTt6d07I6072TkMlRQzo0jB8w5kFfC+hiQe6Sf4pHVHMj6pML7quPMEcCpiWhuHCoz+pg730nWdevXOK6d6l9bDqXlyIdtfWkcGXF4Eq/4qvJoObLlyMfmSP3N8aHlyHhw7Dlw5Mkbrrqu/6+P1vDwmsaA3Hh0IA6AgACO50bobThIuECoB3LhHRIR+S0+KQhe+3TszpxrXalusDkgdyLR67Ru3REn13YOlHa7XWOL1qp66BQ5IlHAjoj0yFzfbUGf9M3a1KVpD5qH3u/3E3Gw6xKypT3VRc6wtV86cVBAyJEQn9XB9BzqA8jUJnUXJj1Px5xz0rIsY71eJxl5REzbUsJTMqzrfdRM3/viKSIqA77nfIU+MQ59oSQkRYR0u91Gp9NJaRMaMdVyrL3dbheVnOo26hMwlaOSsPuQk0bOx7yPGpV2+8pFBrXPbnvepgO9PzXQtrwd/6xY6e34NTlZUoeTIvX6C1p11y6XlxLlsYlJW/al5cgvmyMrGSNj8uh+y5EtRz4+Rz7E6JYjnz9HPkpKoXfKwV4d2ElFwUk7nnsng16j7TqYKwlomxqNUcG78DkPg/Y7/9w1OYPMKeEY2fo41AldXsdkrwaNAR0iEA8fYatcdAEmQL9arR5EViiaPgEYKJlozjmRoara77ikRK6Lf7V/rrecTLVv7sjr9TpNJOq6TgBJXdibg3G3220Atr/p3nWY65/Kliiq6g7bol7qpv7d7uFiX9r0SRB6c5moPLQuZKF1dDqdWK1WyVb43u12GxMw7SPFU3A8quwy+hRfVvLzCcuxSZXbs8oyFyGP2E9MNCKq1+aIyfXghKTR0xzeUb+TsOpVsYuJhOOUk5j2w/FAdaa61/NP4Wyu/235+aXlyKfLkblrW45sOVLreGyOrKuHONpy5KH+L5kjT5VH3RY+51QUBc6yPLzkzo3DBwbQeO60DlqVzG8azVFhOkBxzA3nmJBpU4nKgVYjOjllHCMTNQ5AVknVr9e3kisBQQaNu/kkt9NGo4CrIKsGrUBIe5pjrjspkRah7wpBRvqnUTJ1VgUWZKT24GBGVKLb7SYZqdw4j+M6RpXDsQmHk787s+qn3+837EnfscLiaK/bb1hU5gpCbre5CZ3aFHUqYSppKsmiE3ZjWi6XsdvtEvCqDx3zce+LT7hUhg6S/rvK5tiESgH52PWOHUoEOTyhqC7UXryo37uO1E7cpnUcub7nMMrb/xjW5Pqi4z1W58fqb8unlZYjnyZHuo5o0+XXcmTLkY/JkYoD+ELLkc+fIx/lhssFrM6e66jeUeciaJzvURPq00HqnbY6ot71H2s7Z8R6XBXoxOdj4PxchN/byZGIy6Cu60b0TeuHJNXBvA8cS3KwMQLgSliASFEUjZxkNSQlrLquG9E6ctGJ0rEYuCiKlKMeEY0UFsbhawrULlQHRDWoExkoqeSitNoe7eiEgJQUz432SC51uK58MqG/6XfSEZyAvF4nDvrm19NPX3CsEzfAXHeA0kmHy4/ftX/L5TLJ0SNe9Fcjhjmf1clCbpz0X2Wt9ausdUKFzareTgGv44LjAzo75ctKHHqtjjdXcuSSu0btxrHBMTV3Lb+pvdBvxY3cRDY3eeWajxFKW/Kl5cinyZG5CazqqOXIliM/F0f6uFqOPFz/JXPkqfKrb7jUqCkfIxIE5WCnQtDOq/C1Xb1OB+x3+doPFKSGUhQPd09xAaqTaR+8HXVm71+OpBpPoTLt6m/0lWM5J9KIWFNHjK8ZadWIDOC8XC4bzs1/3thO/yAQUiK63W4MBoNEJBq16/V6aXwOOiobBfhjUUQiX/QLUqRoZO6YY0MegKrK1wErF1Fz8nCZKjkyZp/4aG49RY+pXNwOkAckoaSh/VJZaOqDRkt9IqITBfqJ/rbbbXS73Viv142JjBJ3RESn2su0iAOhoetc8cmKHnOQ03MpGpU+1oaCKN9zdu2ReR+b44USmN+caL0+6UNH+t1loLr0sTgeogedNHjkVNvVlCcdQ843c8Tblk8rLUc+XY6s6WvdbKflyJYjPydHKopyfcuRz4MjT5VffcOVU1yORBCkKt076ODv9SkRcdwNC8f1yJf3SwV3ysAVVL2/ubt3JwMfm5/r5OOE6gaTM24HPyUHf2kc8tG7+LI8bHuq9er1OkEAwPWv0+nEcDiMwWDwYBEw5+ujeMBK68bwc7LGdnACJVYe/6scFGzdXhg30St19qrav2PDScgnnDnHy0WPtS/q6Or4bktaj0e21D6UlP036qYPRP5yPsa1uviXokRIm25TastEVrdxHwWNOoH1p0yYchNBl6vqQa/RP5Wjt6HlVHsuB4qPw/WsWKS651onnVxbPpFw4sxNenyi6BM07b9jqY8xV98xnGzL6dJy5NPlSDgo12bLkS1HqmwfkyMjmnJrOfL5cOSpzYMe7cXHDqA6QH73R5t6vjuFD0h/yzkyhbtvvcE41WcFkhx4Uyeg5OCk9TmB0GfqyBk0oOp51Hq9GqKDgUcjOKYGc19ro34eowLoGsWr6zqBKp89t5o/3h+iC4D1mC4GZiwOUEp67ngu37Is0/k5MKW+HOCqPlTv9G+326Ux0747q+tHz9Pf3KE1UshCTwUEd2qfJClI+USM4wpiuQiQyt0nU+qbudx9CMEjdvzG+bmIJHXmMCEnW/VL1S//fWy5cSoIEuHUcx1XXMauk9ykU7GAyQlYQV1aT25C6XW7j2u/j5GT9jEnB7dbZOiT1twYta/qV235eaXlyCfKkavVfYXNoGTLkS1HflaOlLHknrq0HNnU25fEkafKo22aoYNVZ/AohQ5AFcExNTB3CgcXbUfPcUGduvaYMWu/lQD9XAcel4v3wxVc13XaVjY3LpWNbvOukZmI5uN/ImR6vRaVq+pGSdUfuQK4LLQFmCAbyERTJCB1PmvUUEGNot9zMtZFtNTjNuLRGHUSBQfNc9d2iXTgOETyaNPJPSIeROTUB9wudFLhkxjPW1cZ8xt9oM+0zXcdq47XZaFFbUkjRegcItGUC0CT86ifsaX1ANGcODAu7Y/7Sa4vuSgm57sOnXTVFxyrlBD4zTFHdaZ+rOSqdpAjuhxIe1TOx+MY6fio1ziR6G86bu2L2qHLXOVE27nNCdryaaXlyKfJkXXRxEja00lry5EtRz42R/p7KluOfD4ceao8yhoubVg7oxMwFYwDsDuM1s3g/biDhf4G0BwTsAo2ZwyuZAXYXHRRz8sZqyoR5aiDeR06Dh2ngp8DZkrlksfiCgZ1/TA/WPvnzlnXh6iiAzg56brol2OAmJKHG3Kv12s4Hn2ALHI3ido3lbXfXKotuUwZsxIHelCi1vO0fh3PsSgP50FCnK9Eo+fngMtJnnb0sTfRVaKw3heftOQmMy5nQNMnEvRNZcBnP8fJoY7T+nRyU0AnUqu4oICam3h4wWZVX9oO9qK6csD2KF6u7+rvGvX083LYc2ri4WM6ds0pvPNx49tOJBHR0L3jwinfbMvx0nLk0+XIlPhRxwOM4LqWI1uO1L48BkdKwlGDn7y0HPnlceSpNh5lDZcKzIXiinbjxGi8Hj1+7M4/J0TOd6d1I8k5mYKbFxW6OrwrVtvKkacqi+Ln0R/fVceV6UYPkeTI8CCHw7l1fXgSwWcFDd3piEW/+r/T6cRgMEiLgDlONA9SgWToP9FKdKQLfCOaL3h0O1AdUT+RJLXBXGSYMfoER3cfUvvR6xXIdeLkzkVfGIPrXPvlkx6fPLg/5SZQqmuIxSPO7p8e6VOfVALzqI63nQMXyOjge03/cFL1iZrarfeHY8cmRn7MJ2a5lAmd/OauO1av25X3LzfJ8DqoX21Y69aJgf7ucvc++mRB8Tg30c6RcM4ucvbelo+XliOfMkc+jITrxKrlyJYjPwtHyjF9OtRy5JfPkafKo6UUesfUWHUAGv3IAa52+BhZqHC1KNjw3fvl/VXH4L87To6EtL/u6PrdHZrfeeysMsotttN+q7N5FEn7TFQn4kAwVcUuQweZq6z10a/3eTAYpL5rWgQkMhgMGkTD+0UAe5UhY1UyyclVx8y5Hr1ygvH3jbic6I/qCV1QFFioU/ui7XMOYAiJUI8SmE88FNjdXtw+cwBPfzS6Sr0aPVIbczLW37U+bYdjbmfaRs4+nRRO+Zy2SfFJY6745CHnr+g9518uf71edavyU3t1XXlkzcfhY9Y+ut3r+a6rHFZq8Qhsrp85ObsuHdO07rb8vNJy5BPlyOJQBzc1OhFvObLlyM/CkZbK2nLk8+HIY7qIeKQbLu2YCjEH7K4oSs4hdCDHlKztHhMuAOZORf1atE49z4sKOkckbnS54xEHg8gZugI/RceROx8j0jSA/XUPZe3RDJWlXsvxqqpiNBrFcDiMotinPfT7/RgOh2nLW6J6RMWUpBTUHWRVdw6watjHJheuRwcCtzW+q9xz9uN6875Qj5OLj+MYoGrajOtBAdzTSzwy4xMU9wf3I/SZI5+iKGK9XjfkootdPS9bJyc+zmM6c1/xiLvKJ3ety1p/z33Xul0OuQmfyiw38clFJL2/bps5AtKxez+OTYbVX9Sn9BzFz2OTRIrbn2KMy/gUmbTl46XlyCfGkdRZNPXRcmTLkZ+VI4uPB01ajjzI9kviyFPlUdZwqWMeA3SKOkVOsEooagg5UOAaP4ei52vkS6/1x545ElEAPEWG7rwuI63bI3E5J1Dj9t+93x4NdUCudweD1nxfrZstYPUzdZDLHRGNHZdIl9AXOxK508XDRbGPcJG3HXF4vK/6c/kpEKtT50jfI5uu62OA5iABYKrsXZ6uV2Sm42C3Jc7zrXQ/lQAV5BU8dNw6DvctB21knyMs6s8t4lUSIqKncvX0nnspZX1M29bfFBiPTdS0qH1gpznZOsEq4TuGHNOHyt0ng96O98Fxg/N0YqATFq3vUz7nxqE6c1w8dp72S+0G/3W5tuXjpeXIgxyeGkcG19TRGJv6eMuRLUcin8fiyKJzeBKp8mg58nlz5KOs4coBvYOcHlcw1YE5eLoBqOD8Oiciv/HQnWq0Hj57/zj24MZFxprrn59z7JiC+DEidPLzenKy9X4mYEzXN+tSQNhut2kbW8YF6BCVw1khC6J2vvWtG/Ixg9aoEDahYKJOqETi8nF7UntQx8/ZpUY31CbcLtwOFZi0PcBGAZyJljrksbY+FQjVDj36pb8zdo7R7rGIupOnT0C8uIzruo6iLB6c4wCeIwc91+oiUf0AACAASURBVAlR//tYjpGRTwRzxKbH3Id0fE6MOf/O+WxufLkxeV9O4aPKQLHOJ70+Xs7L4ZjLPkd6H7tZaMvD0nLkE+ZIwx+vr+XIliNz9f1ajvRdCjmn5cjDubkxfQkceao8WkqhO76X3DE97uDBsWMC1HaP1efO4g57zGD9GidMJzzvS84pXUYUDIHFt06K2p9c/3OTICe6/XX3xhIPH/FST10f3icC+BGJ8+1sdZycp4Tj46RuLaciF7kIhpJADhS0P9SvgK7Hcs7v8lcZamTE7Un/syDX687t7uTO7cdzYKY2rQSq+szZs45ZgTAXlXTdAs5899xn6s9GoaN4AHDanuv5Y5OqB/VnfEs/u1xyUfwcbuVIzonZbdBTX9xfnei1KPG5jarefMzefz0392TCr9Nzj9kCJRcVb8unl5Yjnx5HpnPj4YS+5ciWI2n7sTlS283JreXIL5cjT910PUpKoX/2CZafp99zA8gRgtbLdV48CqMK1N+85BRLyS3m5JxT4J9z6FNFAcuv07adpHRMCkQK6A3Z1w+BRdvZ7XaxWq2S4Wg7vnsRDrDZbGKz2aRInC8G1igVYMSYtX8qQ40w8F3PcedUXalMc4RJ0e852eZsTWWuv+eigRHNFKKcfRLZc7LwcbgufDKTm+i4TN0//bMSk+qI9AjAUm+s/H+SaYW55SNDOTzQ/ru8VWcfA9Vc3epfOfLwunP1ap8UqE9hi9ppzuf0nJwfHJPZKULR63yCkeuf207OL73dtnx6aTny6XJkbow+mWs5suXIR+fI4uHNssu45ciH53wJHHmqPPqmGTkj887klKi/qSByhpRzoIjmI+FTgsgZh5/v52jUIne9f3bF54yPknN4LTkj47sbqvZfjaMokE1eJzhbXe8jUJ1OJ21Nq3nrjN8fSUNAnJ/TkS4kRR6aB65ACHiqzj1S6QB8jAxcPznbYDweqXKdql16tMZllGvbCc1lRNG0Cm6eOaYRHvp1LKriExS91mWpRHks6uxj8/6ksdyvGSzi4dhc/m7XLpNjwOi612u9f3rNqcmUjzVXThFh7inQMZ/W9r1/6Cont2NjPRWJ1ja9D/iVRs6xtVwf2/LLS8uRT4wjad9wquXIliM/N0f6+FqOfFjHc+PIR7nhcgLwzuYGcQy8c8ajBn5sgDmAzNXrER8/T+vUzyiK6JQa+zECOVZ/ThaqeO2jAmMO1PjTl7S5jPdtPhyzA4jKebvdxmg0is1m01goSiGXnT7udrtYLpep3l6v1yD5Yy/MPCYvZHBsESJ6UBnodTmAVTmqjHO7J+XINzdRog86Lo0iEtH06/jvgKH1cJ7urKRj22w2Dbvkt2NjoA6VncpC6/d+ADb8cX1uQXdVVVHU9/os9mNiW+EcUOdswrEjhzHuEy4jbYe+6vVOpl6OkaD3E5kQ4cxhgvury0Kv0ehorm0f/zH5aTv+u+tcz8vZ67GJQls+rbQc+TQ5cncfGGJ7+NxEsOXIliM/B0e6LluOfP4c+WgvPtZBaPmYMuiogqkb3Kk6P+V4rn9q+OoQx87TejSKpH3OAZDW50am0R+vJ9cfr1PPGwwGsVgsEsj7I1wXTa4uwBHj1GicRuUABqJ7mhaxWCxis9nEYrFobIGr7eljea7LASBOpQauESyu9bE64GhUgt/VYbX/GpXkGm3PJwX85gt9Oc/J0MeTi6QpsSth6BbFPumiDurPRTl1LGqvOm7aqKoqrZnQ96ZoH5xwqEfHU9fN6LzqSvvu9qx/HkU9BrQeFeeYyswJXCepObLS/9iDyohrdU2Cg/2piWpuwuJ25tjhx3KTptzYfTveY9js9voxDG/L6dJy5NPlyMPFD2XUcmTLkZ+LIx2zW458Phx5qjxqSmEOFFxBKFOjOQ6yFBV2xMMtcSk5ZalS1ID9d3/E6JEN7cPHlJxzCu279kNBxiM4DgbUmTM0fpvP5xER0ev1Gm0eHmVDWs1x6GPSoigScUAEZVmmLW0VaNm+FufhpY673S69VHK5XKbdmWhLc5w532VDf9QZADU9T4GYz+owDhqqd5UzOeTaDrbhW6jqtQp4qpMcoOZs5BiwO9D6Ggmu13acBF2eOdJWe2Os+s4VJxElYfdF/SvLMqUURtQN+eYAWuXh/cuBnutVZZUD8Fx7KiOXZ64OPU9t1CceORnr2HLgrBiV669fp/aZm3B68etVd35tbjxtebzScuTT4kjdTVUny9rHliNbjvwcHKml5cjnw5E5OVIe9YZLy6dGVHxA+vmYUfjxnIHiHKokBOiREi3+mxuYt6PjcFI4ZpA6Bo+AeMlFdbQPfMdhy3L/aJqUjsN51HPIrdYx4uxE287OzqLf76dc9cMLlJu7MkVErFardJ6CMXrY7XapDh1rLg1Ax6nEpZMQjZq443rEQsHYiUgjeJznUT3Vres+NyGhT/SRKBjn894RBXgfv4+L491uN73bw2XkoK79cVtysFe96PtDtA70jp5Vr6vVqkG8+/HJpCaDAznfhrS8f1pc5u4fOUJxks2VYxO73DnaX5/A+Hj9mhxQnwJoLzqJ8nG6/XCutu1E4rimpK/n5fTVll9XWo58Ghzp18INLUe2HEl5bI7Um7ZjONBy5JfJkad48lF2KXQH1t9cUMfAk2tzERcVHN9z53ONtuGg4XV4X90AFYSozw3VFwrnDDNXv57rsvOoostSDUblgbN7tCW1I2TAb1VVpWO8pHEwGMRut4vxeBz9fj8Bh5KWR/50TPr4WPN2kQtjPEbuXAsQc163200vSnSZQDYagVUZ58gBGeUc0eXvoKHEEdEEUScZJUC1h1yb2h72Vtd1Sl84BRK5fmtdWtT2uF7bVPvY7fbrD5RUOS+XbpLWcMVDG8xF4fnsYJYrx0iTa9X3c9fmxk//mYTlJiXqa7mS29ZY23JgjojGZMN9mnPV545NHlyOp8Z5agz+5EB1BU6s1+vstW3Jl5Yjny5HlnY+fWo5suVIlcujc2R1mHrrjX3LkV8+Rx67LuKR1nDxX0Gcz7k7cT92DFR04GrYlBxoa39yxJEjvMZE0QyJ+mnDgVD7o2BxjFz0XH7Xt6+jMPKS3TAoustRRDwAa62/8U4I6RvRtrIso9frRa/Xi8FgEFVVxZ/92Z/FX/zFX8Tf//3fx3Q6jcvLyxSxU2cF2CEplRntaKQK0lLnR16qR9rwcdZ1nSJgGv3I2Y7KWsH2mANyrdqt6hswP+awEB3Xd7vdWK/X6bj3i7ZyEwtNJXHwYLxKml4HcnL7y9VHPaTAIF+1HfVBPrseVBbyLR1zcnDgVR0xdpWP6kPtxev2icSxiZvqXBeX52zIbYxjucid68EnCnqcCQg24td4G4qveszr9/4d64NPrDQq7ph6DM/bcrq0HPl0OTKiiS06rpYjW478XBy5qx/e6LQc+Tw40nWo5VHZUzuiwtCOE105Brp6voIN9fOnA9QBq3Ep2Lsg1JG8nxqVorhgVZEAZY6kvPi4u91urFardJzFvA4yKgf67SSMYW42m+j3+2kXpP219+O7rwO5dLvdlNNeVfsXM/b7/Xjz5k28fPky/uEf/iH+/M//PJbLZcpHV2DlepWzj4++QzYQDy+A1HQMrtUoSi6SGXEAx16v17A5jpNTr5EPnaTocW/bSUSJ0OtTu6B9/ezOrLIiurler2O9Xqcdr1i/4GktbudKUppKARCrXSJj1QHy5zi7ajFGnSA4gSro07f1ev3AXvEF1yPXqL/55IzrdcxaV44otK+5yLB+VwD1/up3+qO+7tilUUytX9NPdAzapj6RPiZrxUfFQi257zpO2qX/al8uzxy5tk+3fl1pOfJpcWS/32u0q3W0HNly5OfkSLW7liOfD0eeuuF6tG3hNQJGpxGMd4ri5KF1OZkoaOI4/MbjeD9XBelgGnF8Z5hj5+jvCjYRh8fh/FeD04IhQRr9fj+qqkoLepVY3Cj5nJOl3/kPh8NU5/7cJsDpS/uUjBjD9fV1/O3f/m381V/9VfzN3/xN/Ou//mv88z//c/z7v/97ROxJ6M2bN2mbWwVKwEkNFZDq9/ufBML0zQ342ATAHY/f1YaoT/PQXe+6u462jZyQjxeNYvlEBcLgHLVf9RUHxpye1+t1AgGP3qkcsDP1GdYt5Ap9JmXM++lyVvthXJy73W4boRyVId+PAbaStEYO3Se03x5R10g1RY/l8MhtjJKbyKhc0Yfbag53dBzUrbZHW4pdTpraV2/Dx+eR2tyYc33yuvEZTQdqy88rLUc+TY6c0ffiwA8tR7YcmSuPzpGmL86lnpYjnx9HPsoNlxuFK0YNMuJhuoQ6DwPkOn0smHOu3N2qboueMy4VEm25M/tnJwUfC/X7nb2SYKfTicFg0HDW5XIZg8EgptNpI4IV8fCxr45DwVDbKMsy5ZOfnZ3FbreLu7u7oPtFNMeii4chGXZQ6na78U//9E/xL//yL/GXf/mX8dd//dfR7XajLMv4t3/7t/jHf/zH+O6771JKA4CrIK657+pUjHW326XrdTtVlb8+Nck5LoZe13UjvUKJgTFqhDNiv2OVb+OaczCNeijIKRGw0BdS5k/BFjByPTJWftMIH+25XWrOOeNhHAosahs6TupXvTCZcRmoXtSO1+t1g1h0HBQHe9WhY8EpX6Nu/eyRdtrOYYViCef675oLniMu75u3rbJEtrl2c2BP0UXRWr8CvfbPcUaxiPNU7toHx16VWY743Cba8mml5cinyZGTySRuZMyR+dxyZMuRj82RuyM80nLk4bwvlSNPlUd7whXRjLbwWR3DO+kOkvuu/7W4MWoffIGunq+gqec4efCfHG6O5UjJDUaV2+120/axu93hxYdlWcZoNEqRx6urq5jNZhGxBzh2NdL+U7f+Ua8CG20MBoMYDof7tIQVqn54p05OclEUsVgsImIPwKvVKs7Pz+P777+Pv/u7v4tXr17F119/HUVRxNu3b+Pu7i76/X5DLkokZXlIi6DOsmymMXC+ThycKABqyEKjUAr02Jo6DEUJQ/VMuoHuKqTy5TwIQvtN2Ww2jfeo8AcJQihqh+ymRIEcjkWe6LtHdZAd8qA9HadORLT/PvGjPq9TfYVUDo6rv0JuZVlGcb82ooiiUSdFZazf1YeULOmrRit9gqIEz/V6nUbatE6XbY6I1L6O5bKfwiufzOUA24lVi05aVKdaJ3X5ccU+HWuuX9onHZe225afX1qOfJoc2ZWx6Q0OpeXIliM/B0d2Op1Y3tfHdS1HPg+OPFUe5YZLlaOg647njkpRReWErnefOlitT43OgYBzATF1No38qKFp3XxnrCrUnICLokgLa+n7bDZrREY4ryz3+dX8Xy6Xqa8aSfAxY0zD4TAWi0WDtKuqil6vF+v1OuWOTybjiIjodLoxGo1ivV4nwhqPx7Hb7WI+n8dgMIjVahXT6TSB62QyiW63Gzc3N3F9fZ0IQZ0d59VImeoBoGQculhVo1+MUSMQlN1ul14kqQCNcwF0Sp5Ksjmdqd0gd0Az56BOnLShkTn6wTVKEhCigh39YwxqNzoGjUxruoXbv/fNQVtJk2MRkaKPGnGkeO677qqlRAXZ97r3ayOKaNiy6ssn8roQmXrVX/lTQPcImU8wlQA1Gq7+59HY3CRTJyx6ntpEjqRyxYneU15chx7F1z468am95qKPLqNj9uJ9VTm15eeXliOb5alw5Op+TWJVVXF5eRnb7TZWq1VERMuRUqfaYMuRv54j1/XBD0glbTnyUL5kjjxVHvU9XApyKnw6Q6cduPldBwsg+AJD6sh993pVwBibLu50otLcche2kkxEMx9dx881m80mtUV9eq6C8HQ6TZ8Hg0HqF7nry+WyMTaIh8jYeDxOL1AkKkRUjXrLct92p1Olp17Us91uGy9xBNgXi0Xqy3A4TIt/WeSrulWZUwf94/OxSCty1PdvIEfXj5M79fnuSG742BVRQLc72mGjEb/Woyf+G/Xrn0dN1C/UXpWUNXUF0lDQ1jF6PQ4e2jbfNYqp1yk4RkSDtJSsvaBfFjNjsx6ZpM3chJLPbMigMtEJiMoTO9FxOwmrjymxY3MuU353/audK8b57y5vPaa45gCu/cR2Ne3H9aw7tqk/+OTCbTCHhz5WlZXqUNNx2vLLS8uRT4sj1/ebYRRxeL8W/tVyZNPuWo58RI4Uv6Zd2mw58vly5KPdcCEkBc5chEANSwfiv+vdYs6BGLQLxJWpxzTC4ICiUQIenXu9fPfrI5rvr9A+adQKQ0GxCq4omNQFSAUH1WgVb6Zn56S6rlME7sOHD3FxcZEW3kIyZQFARyLos7Oz2G63MZ/PY71ex3g8Tg7DfxyPyCOgojnnbrBKJDinRlzU6KlD/3KpDf4dW1O5067qFaCk3w48lLquo9/vJ3nrubStoKvA5TaIPUFc6gMKQlqnXoutYms6Tp1Y6DXIDLk4UDOB0Cijyoo20auCh4KvykDXgdDXNHHYMc7mO1jUBlRvqmPts+fmq6ydAGmb3709xSds0oFc61LS4HfadH8+hkU6IVCMyUXXlCyO4ZgTEwX/UvKtquqBHjXVQ+1RbVknTt43x722fHppOfLpcWRvNLrvfKSbqeVyGYvFouXIliM/K0dqaTny+XDkqfKoT7gimsCtCnIjQZmqoIhokJAqkwFFPMzj5FpVqLfn0Rc9x5Wjv2nxMXnfvB9coyUXiQF8VAZE4cqyTItw1TiJdGiksSj2Oy+xyJhFm1VVxXg4jlhEVNU+xYJ+EunTNgAJ3a6Wdl32jB2S9v/qHErSHv1w4lDZKeh6FJjv9Fkfo6vdqaMreGne+3K5TP+V5Fx/3lf6oW2qromO8lllf8yOiDIxdrVhJQ69jrY9kqqA4WByDADpq0+aXJ70R/VSFEX0OveR4+Iw+fJxqt41sqmTNtcj/aMutweP2Dlm+DoDZK161olkDiP0N43iA9yKebmJq+tO5cZ/1mC43eq5Sp4qT5/4emrNMd2r3eikQfXmBNWWn19ajnxaHEl9k8kkFotFdDqdliNbjvy8HCnjwRZajowH/dXypXCk+4OWR08p1MFqcYDmvy8S9DtyPdfbcBDSNlCw5njnDNP7miMZjyQ6yFEUPLQNrZf/aui0g/Oyq02v12tE3Xa7XTbtIWIPpvP5PHq9Xmw2m1itVjEcDhNRHPpxcAKu6/f7DVAghYIoLH2MePjo2CM1CnpMGHSM2mcHd40SkVOsQKoTEKIVXAtxqvw94qRgro7nNqS25mCkvzM2/h8jLpcR7eukwiOWyFmje2rzSmB81mih+4mPKedj/hge/0HWmqNOnzWSx4LbHABRn/ZR/ZJ6FPzUl3XMPglTvahfefvarupUI3A5AFUbcptR/Soxqm5ywK11q535DU0uQqm4k8M8Pa6RQOzfz/HJN3Vr5NOjeW35ZaXlyKfFkd0jOmo5suXIz8uRD1OFW458/hz52Z5wqQL1NwfrXDQmd+0pR9f23Pi4FmA8phxtx+tWA80ZpjqnO6p/ZnwoeblcNlIKAPXtdr94d7VaRafTSYuGO51OuonSnGxy+amDhb3azr4PzW1WAWTNT0cngAjgf0w3GrFT4Hf9KJAjK6Ja7hR6vQKiRyA0IujX6h9gAABSnxKz6hIw0DQPJ0fG5RFJ768SsdqS1qXgo9E5XU+h0bucbLSunN25fR+b9Km8FEQ9UtTtdqOq9jt1uW8d+tR8QScTlRyQ0VcnX/Sci1Q5AahctChZqYyQreOWkxI+4mNUkuWzyjwXUeN37adGWBUnVO4+keMzdulpKVpfRKT1H+5v2odj/u11teWXl5Yjnw5Hwov1rk5PbTylq+XIliO1PAZH1lXzCZfKqeXIQ33azy+FI0+VR90Wns/6506tAvDO6ragqjD9nnOAnCHkzlWjdSPGwDRSp/2kzhzxqNLps/fRDX2326V8aJeVRlE2m02K5pGzThv85xqIZrFYRFmWDXIpyyJit+9HFVVjvOySw/jUoNWxGaOSx6H+5hvRNTKiRMMxZKdpH27EHOd8rsF5lUjoh1+n8ta/iGj0XaORSrAaIXTbUdtQEuM3QADZeR0+WVKSVVtR2yDSpHJUe/P+qk6U1Kjr2OQnF/lUWeluSeikEe2pH/ookWWPOmrkmD5rJDkikp/ouepbqg/FDAdn/ELtxK/3ftMf9VPkrHXrRCRXjxYlNJ0IOX4qSSADxqz9U6JVfTsWkYblky4dv9qETpbaG65fXlqOfJocmZ4eFs2JmftSy5EtR6qO9ZxfwpFsYuY+0nLkoXypHHmqPMoNV27Bm3ZOO0Zx54qIBnjxW+5OnnZcaU4Qeo0qwZWGAnJ1qIJ8PHqd98vr4DGoRmKIrnmhDq1bUzGIZCyXyyiKIu3UNBgMUlQFw8MYy93hkW6nOuTMOvjnwE/TAgCSVK8QSlEcopIOtK5TJwhkqVG2zWbzINqGvXG9k51er4DJ54hIu1cx7qqqEqHmSBTd81uOCLRvOha1UVI7fReo3Gd1dCUQPaZgriCi/dbvTibeHv3X8xUcNXrkkUT3jcDn41BUfj5RU2Dmd41O0wedJOnkBp1rf31S6ADc6G88xCT1O9rVSJ/atvcz9/1YoT/gEhFtJ0V8S6OFToLqTzmcQ4+Kg6o7JSGX1cfG0ZbjpeXIp8mRafzR3P5a15W0HNlypP5/FI4U93OfbznyYfmSOPJUeZQbLhSqitDOaKcUQJwM/LsLQet3pTkx5c6hr240x+52VRFejzstAAuYuRMqSXGc89UAvA3OZSGvkorKHMdjo4zVatUgkJAXHmuUUZ9KYKiuK67RPwBXZcQf9eccG8dQG1FnUDLgT989wuP53W6X3uPCNrVOIB7503EoaeT668TBdwdnrVvBQ6NTWocSjF6vgMJvDlRaB3pSW9EIppKPA4n7lH52EHHQdZ9yAlDC2Z/YJAtfm6B1OPirXnxXIuyMCK72wycn1HkqlUGLf8/hlYK5ygsZnppU59pQ4vLx+KRL+6w45jr1sZFSpW06IWpxPPaJfVs+vbQc+TQ58pjvtxzZcuRn5Uh7UNRy5PPhSL0h9/LoKYURkRVe7vdcZIfrcgCbIyAtSgh616vGqVED2s1FrmgTwNf69RyNKtGm90PBi/Mhn/l8/sBR1Ei1n9TBm+7ZNYmInj5OVcPUPqoT6uJMQFg/k4+rgKMRLwfWHAlrn6iXPilxa+66EoD2v673efZ3d3fpN2TAtdQFCZFugkwUcJCF6hsZaLTE9a0TpmNEoI7skRJk6ekcmneuclUi0ImE2reSIzatNkT9ERGr1arx7h4AHoDUa9z2fNJ4jHTYpTAnOyUk0id80lHXh7ULGjnziCj2leurytNBXe3aZa54o3XqRFQjoLkJk/5XG/DfdYKgvx0jfdW9nq+RXG1LI3U6llxxP+eY/m/LLystRx7af0oc6QlA6ustR7Yc+dk4Uj7rzVbLkc+bIx/thisnfP3OedrRXHRCwVsdVx3EDcQNWqMJGp0pyzIBpF7vEQ9/VE4/tD9lWaZImYIB56sjaJTO84sHg0FyZkAvR5hECBU8/JE/eeyMwXXCGPWPPtJvNW6/UVND0xQJBTuPLPnEQeWg5+vb0709xtDtdmM0GkW/309P8IqiaMhU69cXayrYluU+79nH6frVNiBgB2kd2ynZ6dhoB/tRgOA7fy4TndggE9Ym8FLP3HqA1WoVi8UiZrNZLJfLw2Lx+vB+GsbpkUcFTh0TgK32CuAOeoOGfJC9Rug0DceJkTb1vSXs8IS80XtukkDBt9SmHFB9supyRgZ6Hv7lMvFJaA5fkJv2lcgax9S2tQ4l5xyp6zjUBrED7bO2lZOJRiCdKNvy80rLkU+TI7dMmqJ+gAEtR7YcSd2PzZHR6T44r+XI58GRp8qj3HDlQP+YoPnMfwah7xOgDr5rpMiV4KDjTuEEQYRCDVONwxcl81vEHuwxBP3e6XQS8GCsRJUcVCA36phMJo01VxpBUYfVKNNgMGjcXdN3cqxxXvrEeRQFeT4rYfrNmeoFGfIOBE+Z0AiDgqoCrho12/OqHVGv6j9iD5zj8Tgmk0nc3t42rjv1biCAS7f2nc/nDXmo0zqIIWMIRZ0QXfpkSv84pjY7mUxis9nEfD6Pfr8fk8kk6rqOu7u7RCSDwSBub29jPp83FsQiD17syX/fQQvA5Q33fGZnL+pZrVbJxtRO0VdVVQ+iQ/wVRXOhL7bc7XQj6v35vIQbm8TeIiIt+MVm1G+dHDy9iHUW6qO6BkT9jv7qAnwnONWPykIJArnym8pNf9fF6qp7tTOdAOn12JhHOzXS6XblhHCMMP0z5zleqzxcjm35+aXlyCfKkaYn6m05suXIz8mRa5nn8863liOfB0f6MS2PcsOFwJyM9e6PDuu5Hs3AqakTR9aB59pmoFovbVIfBk+EQO/qtT1VoEZVUGTEIcdZ0w1YZEpfMKherxfz+TyBvRNqxB4U+K+LIAGv9Xrd2PFIH2+v1+vodrsJWHVsTWCvHxxDVgr+2j/VC3JDDmqACizoC1lHHCYAOBfgz/+c7QAyfNZtdzk2nU5Tn1SfSura7nq9Tn/dbjd6vV7KbY+IRmoFZInOAGaNjuqEQ8ndiVr7s9ls4urqKjabTbx79y5NAObzeex2u5hOp2k8l5eXcXd3F2dnZ1EURbx79y4Wi0UMBoMYjUYpkqlEyRhUb0RKt9ttLJfLB+8igSg4D1JQH6VutXGihuyGSdrOfm1EpBuu0WjUIEOdtNGvuq6j3+835Lvb7dKWz0qQ+JtGzBkv7yjR6Je+t0Sj9/RFfcF9QHFEcUx9VH2ZNCQlCJWx+j4+55E5/U2/6/bBuf9KUscmjE5E+l/xWtvWyF0Og9vy8dJy5NPkyMPgmhOwliNbjvycHLlcrtI5BAdajnz+HPmrb7gwkGMd+ZROAMzqlNTtA3Wj8rvV3ONejquzeJSA8333E4B6vV4nCqw1ugAAIABJREFU4FHnBdxRNMbKeDqdTnosro+FdVtbAIxH3kVRpCgHJMT2q4xVo444j47ZjbkqqsO28FXVMCwlcSIfEDnRCQUfvUbr06ibGjBOrzsq8Z3rVO/aPnLRSCHyA9wUONXR6SM2QooAEVMlU+QFWZK2gE7QsxKkP4p3nwAk1fYiImazWSKP3W4X8/k8lstliqgR1fvhhx8a8h2NRik/vyiKuLy8bERyqQMbUdmrH63X6wd+wEsZKZvNpmFz6lvYAePic1EUMRwOG/Wo76NntVNNEVLwBlOYYDEG1QF90+i2grNO4HQyqCCuEUoFYL5DREpoapuOSchU/UPloIvXdRKGfTg5uY40ku4+TFFc41w9RwnRP3u0XW2bSUFbfl5pOfJpc+RekAeuVB21HNly5OfgyNXu4Os6wW858svnyFPlV99waQf10aEep8N0RoXFIFVBqmi9G9W6MSo1bhdQxCEyFnHYQYTfnGyoT42GR5ekNLCtbFmWaQcgojp6d03kp6qq5LzqJET2ICqiOICJPoKGrBQk+fMoAo/EFWw7nU4U0cyRBuw0okb71KVtIhttR8ncQUJ/08/b7TYtaHadIleNYrhOFMhU1/oYHNn4o/OyLGM4HMbZ2Vmyt/l83rhGbQRQp45+v5+iZT6JUJnQnk5qkO9ut4vr6+sk38VikcbIJGSz2cRyuWyAPtdfXl5GxD5VZ7FYxGg0iqraLyzXhb7UhcxVjhp5pK+AuoIS41HSzdkPuqqqKm5vb2M0GkWn6EasIoooEiFTiE5qNJT+0BbgzW5butMSciWCpxFtB1WO0UfIBSIj6q4TSvqhdqhypC/ad17OulwuG+sYeKeH2qtjDO2q3Six6nh8so3dOXHkongqUyVBxV2K+ibj1CheWz69tBz5tDkyYo9TfjPccmTLkZ+NIydnqQ7lCkrLkc+TIx/lCVcORFSpfveoys3VpXWgPH5HeAqEKF7vvFUwCjjUBfhrNEAjdtQHSLHQMuIAnLrol0fUOKoCBI4DIUE01KsAXlVVyhfOkS/yIzcewjk7O4u6ruP3v/99nJ+fp/qT8fJmcwF17SPAQVSRdomcKViq4WuUSwFKAY22AEfXOTLyiUOn04nZbJbqp6/oAEf1tQYKOl6wFcY5Ho8TgK/X6xThm81mMZlM4u7uLm5vbxNBK3kyISC6io1Qh+pH7Z6/xWKR5MQ40D2yJ62jrvfpBPP5PMqyjK+++ir6/X4sl8uUv45fAPx8Rm7oVbdORocqLyYyEYcoHn6lgKKgFxEN36o7zYWnjF8XaEMIEZGi1T7hoV58hPZpq9frpbGiW/qm/RkOh402sCkHbCcRnSQhA+pnYXpENMhSJ8Hz+bwRkVMC0kmc2gW60kmf4qFGVLVvyKEoDjufuU+oPjw6R1Gs9uO03ZZPLy1HPl2OHI1G9wJobkzA+FuObDkSHT42R6JbbaflyC+fI3O4TXmUJ1y5O0xVkgK+Ak2OXPRuVuv14woqGo1A2E5mg8GgISTNf+73+7HbHV6GCJihEB0XAIFCeczsTkW/qNOjjIPBILbbbYxGo2TkOD+gwfh8vLoVLTm9t7e30el04vz8PPUXY9rtdof37NWHqJJHNwBYxgqIIRslc4+0Kcg46QB0qksilkQqB4NBLBaLlDO+WCwSQOMcOmngM5FAdKKRJp1g5KI8GolivB6VY7EtY4DUSE2Yz+eJJDUyBAlEHICClBmN1Cnwo4PhcBj9fj8Gg0HDfrGvwWAQ33//fbx+/Tp+97vfxbt375IN9nq9BC6LxaKRloFt6mQPPRIx0wmaApgCm0aDkAVjfeB70VxzgjxUn9g37RCVo2+QKmPYbDYpqkcdObDXSS56pt8Uxaq6rlPKCmPjenRaFEU6pyiKhCuKGyobJqYaVVUcVKJgbEosnKsyVL9FnvxXn/Px60RN/VQndBzX+rkWvbTl55WWI58uR7qe9May5ciWI9WuH5Mji7IZuGo58vlwpC+p0PJoa7gU+DXy5OfpnTsg5v+56wekcECMMOLhxI72SUEAHHAyVxqOQf0KBNvtNjmvEh4LU1m0iaEAAuqoSkIAB8AE+GAkPDZXUNe26S/9ADjpkz7WJz1Eo4hVVcVmu2nUhfwVmJGR61GjHOoEWocCBPJW+SDj5XLZ2JaVx/0R+7xxyLGu67T4VevQlJx+vx/T6bQxJiUy/qNbtVN1GEgU2wL0h8NhLBaLRjRKdYjcF4tFYxvZ7XYbHz58aERVcGJIFLlBGufn542dlLB3zkWfEYfJyrfffhvD4TC++uqr+O677xKBoUv+MwZNv1DdLpfLKMsy5YIvl8uGXJCH+zvgy7bNTJo6nU7Um0NkSBfjayQe+2bRr4Itdok942/YFqCJjDQyjHw09x3bxFa73W7MZrM0qcNXiabr5Fb7Sts6cdRIOH1WfImIhCe6pbFOtrU+jbzRZ/U7+srYdDKnn5msYTcewfOJrvqEkgznt+u3fllpOfLpciSTYL1GJ5ctR7Yc+Tk4suoeghmDwSBWq1XLkc+EIxWTvDzKLoU4OB3Vu2mPIimhoAgM1Z2d4whTHzVyjgIdSqcelKIRQwVLBXyuw2lVIfyuxovSeVdFRPNxMuSiURnaJyViOBw2QJzj3W43BoNBAqfdbpfkwfm0jRPRP0iKtqgv7oNndX1YrKyO6capDohReSREH8mjc9UZKR3oij5qighbhgPKu90uRdF0Ia5Gjei/ArTqU3Wsutb8XE3PoL8acSFCRDQNub58+TKm02kiy6LY78KnEZKvv/46ttttfPfdd7Fer+Pi4iI56Gw2SzalpIWM0CVAjY6QMRMPJg5v3ryJyWQSr1+/jh9//DEBDtFG1a3ayXq9TkROexqB1DUVCmIU6mKTDLXPFCHaHM5V0FMMQPZeP36tkUeIGL9XUvNJjeJHp3N4ezzjI58cHSsZ4MsamOAz42ZiofnzekOiu2Bh505u+IDbLzrJTSjV/zQKr7KkjrQ+xSbqnKckxhhV52AShHOKSNpyurQc+TQ5Um2aGwr3l5YjW458bI7cyaYZ4GvLkc+fIx/lhktzM7XQGVc4nSLvVI/ro1l9tKzRDAULvasmAuCAqHfcREP0cSjkAGjpnfVms0kRiOVyGRcXFzGbzVJ0jDEADOy2pBEIojSAYL/fj263G/P5PC4uLmKz2cTd3V1EHF4mOBwO03hwbj53Op20+JPow2QySU49HA5ju92m3FmPTKuRquFqtAAC05x6lS19RQfoSEEFZ2bcrA9A/rSpqRg6PurnPPoJuJIfzjg5zyMPamvYgkakADN0g/PP5/OU/0wd8/k81YFc0lOduo7ZbBa9Xi/+5E/+JN68eRMXFxeNx+aTySTJhP7NZrPkR4wPn0KugCdEQtSt1+vFt99+Gy9fvmwQ+G63S1EzoqZE5QaDQWOShN6xa/50gTEy1UmDR622222SVxnNHbU08oMvq0/7pNOJhHpZJ0A92AXy8ogjOnLCqOs6JpNJ8jdSNJTIsUP6iL+hO/pCvfrEAUzSqLVOgjSlSifeREOVhClOuOoTXK9PKdxvnHCQc674BFPJqy0/v7Qc+TQ58lj6T8uRLUd+To7cCIzCHS1HPg+OPFVO3nAVRfE/RcT/Xdf1TydrkU7hqHTA7w4ZrDq8P8oEUFSg/I4jq5B1kAwa8NDIoC4IhAyUhAB9gBOj1RzpoihSNIfInToaDkb0Q9+xoBEewIr2SRPAMOfzebpWZUAkAgNnfNpPxoizrlarWMchcgBQeE63tsVY1aA80qNExLk4vJIP9esjcXRMdAO5EFG6ublpRGMYG7JF9rPZ7EE0jrr1EbJOLtQusb+ISCQc0XyBIO8zidhvK7vdbqPf7yew0XSD5XIZP/30U8qb73a7iQCQAe+cwb70MTUkq2sjdEJB1I71C+Tx//jjj2liQd/RhU+4AN71ep3sDtlrBFH1haywce23Tk6m02mMRqPoVt2Iw3szG5ND2tDfiERXVdVIO4E0NZqFzWLb6NZ3OtNJzWAwiNlslvQGYQwGgzTRY2yOQ/Sl3+8nDKqqKkajUVqgrRhG/RGRrlFsUtlpRF5lpLiC7HW9CP1QYkCPKgPVl0Ye1S+oiz5yzDH5GPH8Zy4tR37ZHKnYC4/oU56WI1uO/BwcObjfpbCOww1Xy5HPgyNP8eTJTePruv4/P4VIdECqdDqvkR2NQCm4qhCUSPjuBMJdMREcjUZoGobfhVIXhIfjRkTagUcBXh/bkt5wdXWVFhETffLICxES+tPpdPbbZd+TnEZ0IiLtmtTtdmM4HKYooMoHo9XIE7nHKJlcYcgG+ZTlIVJFRJP+Ih+IQo0UB4V8VNfUozqmDl20CahpNO7Fixdp8TLAu9vtX2qoL6jUCQc2AZl77q2mFfBZ/xgzclcCres6gT4RXMZIjjQLddEDC0PH43GKzkwmkxgOh/Hdd99FVVVJP/Rdt7LVaI+2ix0wRiY+EYcJg5KnTjQARkh2t9vFYrGI2WyWJjEaOaJ+XcCM32jb2BfjwGY6nU6a9JDK0SCLupneop91oqGP9tGR+yyAyg5Sepydq5CpTj49VYJ0HAi6KIo4Ozt7UK9O7sAJbBP9MaHTJxj6tAC/YRz0TfGC6GNRFGkROPLGJuiPRvCQlZ6jgA/Wqv7oO78r8agP5yb8H4ve/WcsLUd++RwZcXh9Be22HNly5OfkSH4rovmOvZYjnzdHnn5L1ycWBkqDniKhd4RqVKooflPC4DuC0DtyvTbi4AD6+FLJRRWld7kKhBg3j4KJfHQ6+xxlUhi2222cn5/HcDhMxkS/NdrhTjqZTBKo8zuOg6OMRqPG42PSBvQuXImJggPU9eFt8WdnZ0mO9AHQ4rPKNRfNVlkvl8vG+0GoA4fhjwgLbXHcnRr5rNfrmM1mKVJG+xqlYWzYF33SFBf6BCBEROqPpmSojTIWJ0UiRsgI3Y9Go5hMJiky3O/345tvvonBYBB3d3cpyqMvjlR75pE776fpdrsxmUzSd3xBbQeS0F2ZPMK62+2SvQKCADT+BklpGgtFXyKK/NQGPJLHeBQokROTDC9E0TQSqxNKbAW78jQa/E39DPuoqipN9pTo1C+VoJgEcD6TAGwN3erErJbofsQ+kss1kC5t9nq9mEwmifywX50E00/qRg+NiWBxiLBpJBZ5KoboZE1JWtOJmIhovYoDyF4xQuvV9tvy6aXlyCfOkWXR2Hyg5ciWI5EN5TE5MqWyFge7Qy8tRz5fjnyUNVx0BGPUwergAHFVOOew2I8OI2AlpNxAdLA4GG1ikBwjMsLveiet0UKNhpRlmR55cg79ZEyQD2NSYuI9H4Dk2dlZeu8AhSgJ//nMgkPNsdfUjbIs48WLF7FcLlMESdMVGHOv14vd6t44MlEsCnKGWI4Z0mKxaMjco3fIgHo4T50SwtPdd6qqivPz8wZxuv4BeUhIHzsDtJPJJG5ubpJTK1GqHRDZIscd29BUhRwZdbvdFC1ii14W/dZ13Vg70O12YzqdRlVV6fE8ExSAX2Wu4DAej+P6+jrlQfPIH31wrUZNVR9EJIuiiPF4nEgpornQnYmWRpRZI6HpHhrRcpDD3ln4mkinaL4PBB0iQ504IG/sRSNWmq+P/NQXVY4qFyVB9EwqEn6EHaid6mQEXQDmdb1/N41Gf5ExUdzxeJxwjQmP6p2ceM5B/4xbcQy7YPyKG8hOsRHf43qtz3FUsdgjdykKW7SphI9RWo58ehyZbLs+bMiADlqObDnyc3HkaltjUOlFwPhAy5HPlyMf5YZLH/uq0/pvGg3Q/ygDI+C45hirAendKecjFK4lArHd7jePIL3B3yCt0QHAh75BIPRRDYfFojgIClWyIsrU6XRiPB5HcR/NWK/XiThIzcDIaJd+sDgQEKZOPvM+Dq5ZrVYpcrTdbtNiZjVCjEnJWh1TiQzA5HwFFOTkxqbEgjxIM9AII9drvv1sNouzs7O0QFMjDuSlO+nx+ezsrBGR4jeNyGBPABrH2AVps9kkUtrtdmkbWiYCb9++TfplbLe3t420g+12m3Ks0SH643eiuEQ26Rf9jDjs4sPYIcz5fJ52EFK/YVxFUSRQWywWMZlMYjKZNGwEf8K2AFvkRjRRAYvzVd/4u9p6WZbRjU7E8rAWY7fbJVu+vb2Nfr8fZ2dnD1JH1CcgKSVlXYSveMI1HOM/YI4dsIYA/SpOQBoRkXyGl0GCG9SnOIe+lDSRFekrGl3XCRf6IFKt5KlPF5jcEpVFVoC+RrPBKScZ9KVRTyU4JRLqraqqscFBW35ZaTnyaXLknCcccZiAq523HNly5OfgyKJ7/+TwXoakgLYc+bw58tGecCEcJww67B3FuYhCaYqAGisKom5VnBuOkpQCmkbDcGgcn/ZZGKlkgmOiPJSpkUjtj4JlRDN1IqK5CFCVqGSm4AwJ+kJAbVONW/ukBsiixftOpSge4+QaNXqiN0RuElBI1FL1Bwji6JpLqzIlSqTgQcFWiH4QOWOh6+3tbapbt5lFNlVVxeXlZVxfXzciKxGRHn1TsEmcXkmVNgHkXq8X5+fnaRGuRgEhmrre7+jDo2naXK/XKfJD9AxbVEJV+8EWZ7NZ+m21WsX5+Xl0Oocc8Kurq2Sj2B3AxCRK0y2I7mIfEC92DVmjb01/0tQBXTehkyh8tNPpRLk7+OJkMom7u7tEuKT6IIvtdtvYWSoiUqSMdAQInyACEzL1N2xYvyvYElG9uLiIxWKRztf0DWxWSZa69P1FRB81cqkROsau/sw4kDnn+0RRI286JiYUeq36jkbi1Z+cPLB/x0/kpbilNwGc05ZfXlqOfHocGbFfw6U3HspfLUe2HPnoHFndv2srIsbjcYxGo5Yj43lwpAdXtDzae7i8MXVmjUrhHDgI16jQ9XyUzHF9FOokojcZRJ663W7jKQ+O5pEfVag+mtT2OAax0D/q4YV6+lgVB3TyAWA1GsJ/ImkaoVDHoF4nDQBpsVjEcDhMkb1+vx/dXTdiddARslTjpD+keETsFyqT48o5kJ8apEYLiqJoRNlYsE2+NQuvIZder5eIjOtURxH7FA0cHJ3zHcD/8OFDnJ2dxdnZWcp3p170PxgMEijRP/qPbgEMZF6W+0Xgd3d3KZqjAL7dblPUD/3T/4iIq6uruL29TakL6JBxMBb0ji8ApLShdd3e3iZ5cn4unWc8HifCq6p9DnddH7bmRW8adeZ8osIUbHI6nSYbL4p9mgAAn+z1HlABQUibPhBJI0qM7WmagD6lVYxwX9KIJ76lUX6IA7uNOLxksdPppHaISnMMQtVdlMAPCJU0L8ZK/7BdxQiKgjv4QLoG42CMKddf8EjX2eQK7TqRYWfqV8hOo/v6xznU0ZZfVlqOfJoc2bl/agPu6SS65ciWIz8fRxaNulqO/M/BkY9yw4Ww9O6PTvidKwLUhYM6mBxQOSkpgHM+jq2PE7V9QIDHswpYAAuLPomAcOdMlABF6di0rzgM56s8MH5Iif7yvhHAjN+UnBT4lIwYL2PUlAvVzWAwiN62HzGLKMoyOkWnYTBKlBg6EbPRaBTT6TQuLi7i3bt3qR0lXZ0kIBPyoDmPsUBcy+UyveNhs9kkwGEsmiJChAoCLooirq6u4s2bNzGbzVKu8GaziZubm7i4uIjJZBI//vhjbDabODs7S9E+FnRWVZWioxCP585Dgujz5cuXsVqt4ubmJpbLZQLiTqeTCBiwRm/k0fd6vSRT7Fl3ktrtdmnBL33QSQypBpPJJIEb0RzsGRKDNIn2EY1EH4BnVR22O14sFo3oJuPBT3SCh81AWiyWh2DI+deCL0VEepGn+lW3u18kSy7/1dVVDIfDuLu7S4TEug7doQtdat+xvdyEFr+BMNE5fsSYiGiqH6EH5MGYHGu4Dn1QP9ihmIXvUxdpH9omNqh4gy9Rt064HZeUKHzi6n3T8/QcjnmEsC2fVlqOfLocSX084eC6liNbjvxcHDlbHTbN6PV6Kf2x5cgvnyNPlUdLKQSE9G6VY+SGq5BHo1ESKEahjx4ZMILGaXRAGrmCqDQao8SmkT3u0PV3HIE7fxYyMiba5zpAkXa73W4CmIhoADqfqUMXNwLMmleNwRMt0XcsAADIRxe8avQBkozYO9ewHkT8dP8+h24vGSwLjqkPWRRFEefn53F+fp5yxt+9e5fAEBJUYyRCRxQOGaFH5E+EkBxo6iQKpgQLEZEaMp1O08Rgs9lv7fr69esk+/Pz87i7u0vAEXHYqpZH7Bq1wbY0YovOAbvNZhPffvtt0g3ghU56vcMLB9frdZyfn6dF00Q+iezpBMEjUES1PfIDOUyn03j79m3U9SF6O5vNGqkj6PHVq1cpnRRyByjxR0jQI7PYkQIKOlcgZ2chSPfVq1epvUF/QG+S7iByZDCbzZLOqQfi7ff7aREzbc5ms5RKQwRYdYA/46OKD9hBp9NJ7eJLSjbqg6p/CL/T6TS2o9ZJGTaBr+tnjcTp5Bv7AEP06QJY6BNSnbDtdrvG7lnqx0oGSlIUbDBHMoqzXI9ftOWXlZYjnx5HriRtkHVdPLVoObLlyM/FkTHfr5Eq4pA613Lk8+DIU+VX33Bp4xoJoxNEJxgsAry9vU0DBDggFVUs10A2FJTMORR98RoFIyDvF9KiXu3v7e1teoQPqEJSPGqsqiqBMoDIIkM1qIjD9q6QjPYLYPU7cSVZJWM1Jn2Rod7ZAw44gxJERERXIk39fj+m02na1UeJl4gZ0afhcBhfffVVSkmAePVRc7fbTZGZ8Xgc/X6/8V4mJWYFFrUb+otDaTTl4uIiAcX19XX0er24urqKXq8Xl5eX8eOPPybne/v2bVTVPl/9/Pw8EQA2Qx+2232OOS/oY73bxcVFijytVqtEDvSJR/48wsYH2N1nPB7H3d1dIkSiXEqmEKBGOsuyTOka9PXDhw8pP36xWMTXX3+dtmsF/Hk54W63i/F4nKKIFMZJXjXrD3q9XoqAArIAtfstUaSqqhJB93q9tNh4OBzGdDqNbrcbL168iPiPiKI47PY0Go2SX2vKCbajZHt9fZ0mMdjH1dVVwozhcBg//vhjg0DQMRMPtemqquLi4iJms1kicOSvC7Y1OqYgiq3SPum6agNgBuPwaBtjBb/op7ajOfEaUdZJN5FtJguMDx/TaBz2hx6VEJQw6GsuYqnErJPetnxaaTny6XLk2iZcbAyhsmo5suXIx+bITreZct9y5PPhyFPlV99woQCNutEwndC0CG4EAFEATt/pgAL4Ayz5DUBBuDgUfUBo+jhY79D1PO0vTr1ardJNiYIgY9DHmgCIKg0FaPoAdeidPlEJDIw6SR2IiAd9jThsCwoBMS6VA1GTAylH6j8yZ2eeu7u7iDjkgCNnFkDe3d3Fzc1N/O53v0vXEd2EbAAW6mZdAI/0NQqpxg0w8J33OKleIZRXr17FZDKJd+/eRVEU8dVXX8W7d+8S0azX67i9vU16g0A1Uswi1KraL0zFeSaTSWy327i5uUkR3MvLy1gul41oD9dpNFltQyNQRKFID9FoHgtsSc2hfYi3ruuYz+dJvjg2+fXobrfbxZs3b9LC28ViEefn541ouUYjR6NRip6/fPkyRWT7/X58+PAhyV1JV4GOfP/z8/NUF2Ol/jdv3qR8a3yBHbiInjtw4ytE5m5vbxOx6nbOZ2dnqX9KzjoRxZ/4Db2wyHs8HqcXXZZlmVJJ8DlPgWASx2J22sAelHiQt0fvNB2D48iC3/mMzukLNqQ+j43zp/1F5ooL9AvMzj0N0T5zHudqJLEtP6+0HPl0OTJFuONwDjdEES1Hthz5eTjy+od3yVbhpJYjnwdHniqP8oRL7xhpkP8oQgGqLMtk0BGRFhhqXiZ3/Z5qofUTvUKpGjlUslGnR1l+t46SqGc2myWA4jyAiMfykCLX5wgLgijLw45OEXujUEDWCBCOTyGPua7r9GgbUkAu3NHrVqvIYP95f0yjEIAAC0vn83l69KpGrpExQENfWjibzdJjZByLfqpDEXEAkNQ4z87OUptEH6kHZ/dIx9XVVVxfX8fXX38dl5eXDVv65ptvUr43ETJemPnu3bskB33cTIQNsijLfZpJURTJHgAuiMgBhtSF6XSaxnp2dhYfPnxI5MwYt9ttahsCICIEGaP78/PzBKa9Xi9evnyZdshCd7xolEkKBA8hMyZe9jkej+P9+/eJ1BiLRnLUZ/EBQBxwHgwGMZlM4rvvvku+MJ1OscIUWcM2WYyru2eyBTEYQAQU+xkMBvHu3btYrVZxdXXV2CZWX65KGg3RM2yN9Jz5fB7r9f4lokxSKEQ2dYLGegGO+85ZRVGk3aOYOBFxVGwBwxQXsHed5HANTyGUhIjqaqqWYl9Ec5tnnYQrtiiJ0BeNnHOcc/XmoL3h+vml5ciny5FJB3F40qUL7luObDnyc3Bk1TnssqcviW458svnyFPl0d7DpR2OaOaOI1gdGJGem5ubdIetd/cqGMBWCUoFoHeuXK+RM0CD9lFAxCFH0+/WWcyHwwC+8/k8AQmO7MCu0Qlkg8J18gmIeLQSB8KIAW0lr91ul5wROendNm1CyhBKWTW3zOTRLyCz2WzSjjXj8TiqqoqXL1/GYrFIfSESgkPzqD5iD+bkQi8Wi/T4lyiPRmhwVqIGjEffZh4RCYQwfOyHRbLUudls4vLyMjabTZqsMNGpqiqRy3Q6jbIsYzqdxvn5ebx8+TJms1mcn5/HxcVF7Ha7BskCokRu5/N5nJ2dpUWrABS2OhqN0qRjt9unnrDNK/am5LVer+Pu7i6220N+PtE9PmNntPMHf/AH8eHDh1iv13F1dZXSOSaTSfaRPTLFl66urmI6ncaHDx/iN7/5TYooQtj4JLImYs2EAeAm8kpuPpHvbsmOToedwfAH+oFdlOV+l6S7u7sExETuhsNheicJOtZNOYiK6gSR3wF9ZDmbzRIO1v8QAAAgAElEQVTwQ3T0Gz3QR/ADPbG2wUlGc+TV1hgj48sRCjLFtrkeO43Y5+Qr3oEN+LtGjxUHmLDTF35XjKZeL+gfndEu+NaWn19ajnyaHKk3NDrBajmy5cjPyZHj0TjZHHppOfL5c+SjPOEC6PWxHQPe7Q756QAuAgccUZ7eQXJnjUD07pE6OY8oiW9NiSNBCPQp4vBIdLfbpbtgBEeON+DGhLGu6/To9/3796l+xgw4YySAsq6nAtTm83lSGI5cVVVawKnGjCEhR2RAm4yXqA2yoz+dTifKe2DYbQ+LmJGr6hIAo47VahWz2SwRBgSiqRpK3kTdkOl2u00vyiNCAYERweLdHNiFvl9EdY8eLy4u4uLiIsqyjMvLy9hut2m9w3w+j2+++SY51GKxaKRzAKzdbjeBF7KChNFBWe7zpNX51a403xswQrYvXryIm5ubRDakEkREAjCNjqFTInZEcJAjMnnz5k36jcf+2M7V1VXKWUaHjJP+E8GaTqcxnU5TPv1kMokffvghimL/8krGDaF63jS+Bmjd3Nwkkul2u1Fs7xc6F2XyJyZdmmagk8B3797FbDaLV69eJbLHB8uyTBFM3jVDX6fTaSI6nTACzP1+Py4uLuL9+/fphpC0HCYSTJSYOOhElag7dpLWQ96f71FATaeiHr5rlI1Jp/oev/FfJ9CKtfiWPtUATxTT+E2xQic6PpHWSajqm3PpY1s+vbQc+XQ58qCkQ7qQvkuq5ciWIz8HR1adw6Ywt7e3aXfHliO/fI5s4IqVX82egG1E86WFDFaVgDB4NJrbhpOiu5wgvNwOTaQ2AOIUjGI2mzWib0SAEBp91TtWfZyI8DHUzWYT33//fdp6FOdQYmS8qkDN7aVvCnZlWSYjRtlnZ2cN4yN3nf7WdZ3el4GcGDslPSaHiOtDKgQOTZ0YGAAC+E+n02Ro6Pq3v/1t3N7exvv371OaBo4bcdh2tKqqBLT6yJxH2PqIeb1eJ7kSEdM+YvDD4TDG43FcX1/Hn/7pn6ZoHGRxc3MTk8kkRSM17xvZLpfLuLy8TAQMqEMk6LTb7cbt7W3Sma4p0FxyBS/6GxHpZYabzSZNRCDY+XwedV2nRdGLxSJFg5hkQLLYDXKo6zpevnwZL168iLdv38bl5WUDrIgE0jfs+P379/HixYv4/vvvYzwexx/90R8lsiVaHBFpO2LGx4RqsVg8eDFnv9+Pm5ubhg8Puof3bujidXL90RUTj6Io4vLysjHZGY/HKZ0F/4bYsC2Ijc+9Xi+1hcyxKfya4x8+fIhvv/02Xr58mfpOfRGHxbQsXtYnCxHRWHsBgOsTAPTkUTsmafSZCSdjxLd1UbNOHrV+jdpzHH2Dj7Th/dNcd/8NDKFeZKqTz7Z8Wmk58ulyZJrMxUEH+HjLkS1Hfi6OLMvD1Fs5ruXIL58jT5VHCVcSnaJjALYCJCClhFOW+8eOLLDvdDqNrUK5Vu9w9REkhgiI8S4DhEBuLu3xWBQnwElRNk5CrrxG+tgNZrFYpDQCInIYB3nAkJqCvhoIj3yJ0tBvdjYCFIhwQVIQBwatj+XJXe33+ymiA1n3+/3ozA8LjNlKFCOOiPSSPxx1uVymnOnJZBLn5+cpisGjf/LINX95t9ulRcE4G9GkzWYTf/iHfxjT6TTJkagQ0TnkivNzLToDXG9vb2O9XsfZ2VmKBgLwjIGc8MvLy5STrLZGW8gOPVxdXaWJxWQySQTDjkzIkfQC7Bryoe/oApsimsvkibFQP3nxTH7wqfV6nd6XglzLsoy3b9/Gb37zm9hu93nuTr4KGAD2brffoenFixcNooC8Ly4u0uSBFAtsFn8lRYGxkLN+d3eX3mvy6vI3Ef8toiiLlEfPblbI67DO6zBZgsQmk0l6v83d3V0aCzZHSsvt7W2yP+RKlPrs7CxNjGazWZoUIGMImokQemIbYdaOINu6rhtPjyMiyUNTurBV+oO/6mQX3OF6j8pHREo50kggddEHwD6XEqYRNz7r5EInGdiNRhU1igi56Bja8uml5cinyZFMHBVfyrJMa3Bajmw58nNwZNk92B07JrYc+Tw48lR5lF0KFbARIh0gtxpHj4h0p6pRAR4ns5vRZrNfSNnv73eGAVS4idD0A4xA28bwMDp+RzGqFMBrMBjEbDZLBn1zc5Ouj4j06JbcbAogERHpbhlDHQ6HcXFxEb///e9T9A1gqes6RV0gGPKrVYGAPiC53e4XnkI2OP1ms0mECVmV5X771vPu64j/NyLkbpwoEYtM0RF53+xQwyQaRyHyQc45uup2uzGdTuP29jaREH1XsgbIIyIBZ7fbTTs7QaT0n2jedDpNUS/A8ebmJjqdTgK4s7Oz9PLK5XKZ5IBNkZNM9Oj6+jr1jbSOm5ubBIQ64Tk/P4/Ly8u4vr5OwIGtRER89dVXKfpKPjS2wHgA5bu7uxSpwj9IOeAcjvPOkuvr68aLMDn+1VdfpVQHdArBAA6z2SwR/X/8x39EVe23gKUP3W43Li4u0rgYE5MZjdYRKYUs1+vDe1VIt1ku9qRL20S6selOp5PkQT+x8eVyGbe3t2mB8/v372O328XFxUXyC2TL4madXIIXX3/9dUpbUpuAGIh+Y4c8USDPXuvUxbgaSUTe+BQRN7ARX4qIlCKjE1jIgz4hZ2xAJ5eQBrrRpw76pIOCv9I2uOkkohNwf9pCfxlne8P180vLkU+XI/v3/bt+9V/i//hf//eow9Zw7L8cJm5R3B+MPZeWZUS9vyr5zf01ZVlGvdvtf7s/tr8uDnVkikbky+J+17Sopf77i4uHL1wtoohdLWsF633Qi+vS+OiP9uV+HHosTSLr+4Op6aIhq91/bU5y+cx5/E8YI9dHLbtEFkWSt9aldTb6mrr+8P2AjL8sy1S/9x1ZNK6uI8qqjHpX30ueTVXq2Fm/vD8P6pK+1Ls66vr+ybBMzuGzliOfB0fmbJHyq2+4eBwN0KlAUIRHDxA8IDwcDtNEGwPb7XbpBX5EgDqdTtzc3KTcS72jxBgjIk38iNiRp6sLJbX/EMl2u21E7oqiSHfPvOOBu3EIMgHjPckQSeMY7yUilxjgJyLw9u3beP36dYpCMvnnHIg3IpKciUBBwkREzs7OGtdcX1/HaDSKd+/eRbn7Jl5ERFlWybGVgLmWvGUmzkymkT+LK7mpQO+84ZxIELqk3+RX49xledhCVSOUkPBms0nRSnKt7+7uEhnzDow//uM/TuTM2N+/f58im+Q0TyaT9I6Ju7u7ZD8XFxdpS18ilkyCuKHgUTMTGiYi3BiyIxCO/P333zdu8NCpplEQzSQlAVuMiHSD1+1246uvvkpA8/r164iIFIHjHRlEMH/66af0vg/GwGSo1+ulHPTRaJQmdy9evIi6ruP6+jqlTaAfIpXq0xGHd04QsSqKIu2AFbEHrsXykPaAf5FSgD2ja84D7LGLb7/9Nrrdbpyfn0dEpIgbKScQ0t3dXfIBJl6sBSBapaCP3TPR1acOyI4bT+THjmvUga3e3t4mHQP67EqFHZPugc0RIIDgdF2DkpESjGIe7TNZRs88CUB+jAu/0wi74h9jV1vUm3eOER1uy88rLUc+XY788eZmL5uyitXw7L+LPTzL0sLCLy74dMuRz4MjTz3p+tU3XDxm0w4CtHQIQTA4Bjufz+Pq6ioNkEk2RkV0nBsCiIh2Iw53yBjZZrNp5EAzyeVFjfSHnZQiIi0YJI+YyT8TVcgBotMx8hkFowjyTzE07rIHg0Hc3NzEy5cv000D0Q0enS8Wi7i4uEiPW3lHBm8RJ41E0wh0u9nNZhMvXrxIj5uJEkREWtirW37yQj+eRL169SrevHmTno4x0RoMBnF3d5cm7Dgwd/dMrnWhbFmWKSobESntgggFT6F4csJ46rpOT5VYlAyZcv2rV69SNHe1WsX79+9juVzG7373u6Q/IjHckOm7OGjz5cuXMRgM4sOHD3FzcxNff/11eiLEEzVypYmcUrhJjdinZ2hUqa7r+OGHHxq66vf7MZlM0s46TKqI6Eyn05Q/PRgM4vXr16n+xWIRNzc3cXl5mXaXms1m8e7du/S0ivfEAKzohV206Bf9YbLEE0YmX/SJCct2u427u7t0DEAEMK+vr5NMut1ufPf9D/E/xoEkNOWiqqq0CJenf3Vdp8ke/k1knptzfA0grev9DmaaesIT2ohIN/zz+Txub28T7tAnblqJ6HGt4xnH1O+5IWY3Nuqg7bqukw6I2lEPtgxugY88hdCnzJBmWZYJUzQNC0ImwIDPkTZCYMQJk0I9njIBjoF3jKctP7+0HPmEOXKxiP/5f/tfolNVMZ5MYjgYxGg0itvb25jOZlGVVYxGwxjdc2QRexyeTCbx/fdv4uLyIob3vlpHxHAwiLvpNL569Srm80XcTe9iPBpF3D9VGQ2HUdw/+drrcBVlWcRgOIzFfB51RLx6+TLe//RTdKoqRqNx3N7dxnw2i+FwFJPJJDqdfQBwvdlEdZ+BslqtYjqbxfZeL7f3m0BcnJ/HixcvYrFYxHQ6jW+/+y4m43H89ne/i6qsYlfvYrPe48ZiuYjzs/MYT/Zct1wsoijL2G420el2Y3a/Y1/V6cTre46cTM5iNBrG27fvoigiOt1u9LrdB09wzu7fZUY62vY+AybqOn4vHNmpquj3BzGejGM+m8VqtY5OpzpwZF3H9P5lyQQBvvnmtzGbzeKnn36K+WIetzc3cXF5GePRKGb3mTHdbjcuLy7j3Y/v0tOObrcbnftgatXpxG9+85tY3NtWHQeugVeKoojt/bXL5TLmi0V0qirKqorlYhHre37rVFWUZRXd3h7/o65js9lGp9uJ3ZY1mEV8++3/t3/6NBi0HPmfhCN/9Qpo7vroCHfE3GVyjLxqCKaqqvTOB11wB+BOJpME5Pyud5MIHadjog8JoEQm/CgWxyE3fDKZxGAwSDcO3JVj+C9evIivv/461UuEDMX+9re/bWwDSl447+DgKdBoNEqPf1+/fp0m/KTzETHgTpxUOYyoKPa7+jBWnlAgJ9VHXdfx9u3b+Omnn1IKYFkeSLhTdaLe1bHb1bFarqPe1VEWVey2u+h2erFerePli5exWq6jU3Wi1+1FEWVsN7vodftR1xGD/jDGo0ms15uod3WMhqMoijKKKKMqqyiKMjpVJ4qijOndLCbjs4g6YjFfRL8/iHoXsd3soio70e/1Y9AfRr3bP7rv9wYxHAyj3xtEp9ON+Wwes+k8Ioq4uryKy4vLuLp6EednF7Feb+L9+5/i5uY2Xn/9OupdHd1OL6qyE7e3d7FcLKPe7QHvu+++j9///oeYz+bR7XRju9nFYr6IqIsoiyr6vX5cXV7Fi6sXEXURP757H71uL87OzmM8nsSLFy9j0B/EbDaPu9u7iDribHIW2+0uyqKKquzEZDyJy4urKIsyLs4v4/zsPIaD4f34iyiijKgjRsNxXJxfRrfTi35/EFXZiarqxGg0jl6vH0WUURZlrJbrGA5GcX52Eb/5zTcxGo5itVzFzc1t9Hv92Kw38f7Hn6KIMv6H//Jf4/zsIsbjSRRRRr3bP9HsVN2YTWdR7+oYj8axXCzj/Y8/xWazjSKK6HV70ev14+52Gpv1NkajcVRltZfbehvbzTZGw1H0e4NYLVcRUURVdWKz3sZ6tY5BfxC3N7fR6/Xj7Ow8NpttDAd7IK93dbphJYp+fn5+P3HopIhTWR7WTVxeXiZ/7/f7MZ1OU1Sa83nSqjsqdTr/P3tv1mPJlVxrLvczz/MQkcwki0SVAF3osQH1j5PAC5TQ/+H+HAGNBvSiF5VuFWcyM8Yzz7N7P5z8LOxEUVmVYl5AYroDQUZGnPBhbzNb5msvs522VrQkjQA87BpkAJr/0WhkSaB/WSWGeCLAS6FSqZRWq5WBCJ/h7wjIJJQ8K2wcCTTxRpIxcUEQWEIMWQL7xnNJsmSXv+dc1AHwM4DgOVD7ewTMiHun02UrXB/Tk+P9jgQj/2tjZCaQyoW8Dpu1fvj2G02GA6UDSceDdDrosN1Ix4PSihUfD8qlUzpsN2o3ajpuN0oHUi6dUiqOFB8PyqdTCqKTirmMKoW8TvuddDqonM8ppVipOFI6kMI4UiaUUoq1XS5ULRWViiPt1isVsxkF0Unxca9sGKiQzaiYyyiMT0opViGbUTmfUyGbUTYVardeabdaKowjteo1tWpVtWpV1StlnfY7TUdDLWdTXXc7CqKT8umUsqlA6/lMx91GYXxSfNjr4faNhvd32q9XyqVT0vGg/WZt91zIZtSu19Ru1BXGkaajgfKZtOqVkqqlorrNhkr5nHarpTaLuVJxpHq5pCA6KRMGyqZC1coltWpVZQKpUa2oXi6pnM+pVioqjCOlFCkVRyoX8mpUy8qlUypkM+e/DwNVigXlM2mlFCsdSKf9VqV8VvVKSS96XZULeZ12Wy1nUxUyaUX7nWajoVKK9Pmrl6qXS6oWC0orVhCdlA6kbBhou1woiE6qFAvar1eaDgeK9jul4kiFTFr5dErrxVzRYa9SPqe0Yul0VLTfnX+Wy6qQSeuwPY9nJpCi/U6n/U7FXEbr+Uz5TFq1cknRYadiLqtauWSqoAQjfx0Y+a7jg9RwSU+7O8NUMRDc5HPdo19yZCM8ChIJoLwhA1ic0wdUBt8vu/rB9VIOSQYsGEGtVrNAjnSNQM25MBokZM1mU5PJRGEYGojxBk59EAaAEUvnpePZbKZaraZ2u60oijSbzS6YPt7GvUyN1R5JxoKwqtFut00SRvGpX96Vzl2Aym+VxcXb/6Hf3P6PXzrt/6mj4b7Pv/1/RlLJ/Tx6+8Uxf/sZBGtrPakXKCctuXPkJC3efu+Nu/L2S5IOb7/4zMad5yTpVmcmIv/2e46V+z54+2//s+fHxp0nentvsaTls8/hoqm3Xzn3u+mzzzaf/fuab/7f89iU33696+j+Bz9/6b5v/4VzPD/67ns/zwpkTBKSJnz4cDhYa10CPl2zYLXiOFaj0dByeR41/AtAQF6A9EB6KqSF4QMcKCxmZQCGbDQaqVarSZLpzv0mkx5AkH2QNCKllWT+TkxCguXrnvDPMDzX8fkEm+swHv7efS0djCasm2fZAD9iGj8DmCXZuDDGz4GOpF3SRezi2ZLj/Y4EI/97YGSlUtGLFy8smWLlzSdXxCGaNxyPR02nUxWLRVNy8IJL0kuiSzJMcszPuX9WGlk52O126nQ6Vtsbx+eW+8w7q3uoJajDlZ5WHF6/fq2XL19aY4vtdqtisaiHhwd98cUXGg6H1vyA+EuMoWaYumjuGdVHGJ73utrv9xoOh/Zizj5j0nnPMezBx6PHx0cVCgUNh0OLg8Rv4h9yOFZmaAcfBE+118zBbDZTpVLRaDSyldD9fq/lcmnjvNls1Gw2VS6XrRW9j3Uk5j4uI9lF0rvZbKy2nNpwfJZYTczfbrdqNBpar9e2KfTV1ZVubm70+Pho7fr52wQjZfHrvzNG/h+VFHIhAi0SAG6QYI4RwaJJsmVUScZ8URjsO7V42YUPTH4wvJYziqKLc/B2ykDCKPKm7TWkvmsOzo3sCkemmcJ8Pr/QsyPLY9+Kw+FwESBp/ToYDHR1dWXGyj0RQKixgn0g0KZSqYvAdzgcbP+IRqOh+/t726RwMBjY3hapVErH6KBTuFMQnOtfgwAj8zNJYSmln4H7fPBWJWAVtv+BNfxM2WjsSk+Dp3OTWJz/f3kO/29/j4EoAmYPBp7hsgCXa3E5X3TMmc7fP/370lGexoE/+bNrWSnt23+9vVgcxZdF0U9V0/YDxvTpA5fjxxxdjsHTuEmugNud/+kZ355Dcv+J7VxP86CLa/H905g/FRoruLwPPw7+s/xtEEhx6qlNNS2DR6ORMejdblez2cykrcvl0uo5FovFhTxyMpnYfiUwZSQK+Xzekhy6oVEs/PDwYLGDlr0EfEkWAwAKpLHUo+Df+CixiOQjiiINh0Nj4X2tE2yYP5BpUiNJjIQRDMPQkr3NZqP9fm+JG0BIssp5SAphEr3sgXsGfDxweOaQBB+mkHOQAJLwJ8f7HwlG/tfHSGRmxKlisWit2Bkznl2SyRp5AUSyTqJ8OJw7BOL/jAMvaxw0g0I6Tvyi8QgvVbwk8DLh5V3U7sDA7/d7lctlux/k+STlcRxrOp2q2+1a8s0qIysHjUbDpOK8MGKjJPV0yOPlnJd1pF7Y++3trcUp3/AEuSvznE6n1Wg0NBwO7Zqz2czsE9wgDhKjsHMarRDfiWPUDbGH1f39vd0zOJLNZq1hlq8RLJfLmk6nqtVqVg4gPa1oMA403uJFhhUi5qVWq2k+n1s3T/wVjEsw8un474yRXo74/PggL1wEOYIzS3XPpRR+uZE2j7Q5ZQJ4a+VN3mu3pcv9Mzwr4pc1mVCuT7BCO+qL42jw4Fks2EC07mEYWtMHGjTU6/WLeh1qlFjGpQUvzwsDBUBJsmLndDptOnzGkzHzzwU7Rce7bPa8k/xsNjOHrVQq5jzr9dqaLizD7/X1//X/WHAmePi3/91up9lsZqDC5ofH41M3LLrgwFzx98+ZB79Mi34cBo5mEuPRSPV6XZPJxGwnDM/1QeVy2ZaUYU7T6fRZ4/72mabTqa5evNBisbC2sYfDQZu31280GrZPy2q1Urlctg6D1AnAhubzeXU6HbMBmC0A+XA46Pr62s5FcIRFOh6P6vV62mw2uru7s2BFspROpy+6IDIH/jNhGNqmi5vNRtm3bMvpdDK2rlgsajAY2JwwRrDIzWZTo9HIgn4YhmYvsFeACGwQ7Yc9k7hYLCzBxhdhwBgbdqvHB9GAw8gCWpkwY/ZBrcVsNtNoNDIfgNHKZrMXdWWwlyRPxAESTe7RB0FAcbfbWXJDkgKw7nY7Y3PxVz6DDxITCKr4J9eGZUbO5OMIKwYkg7D7HF7//ly6gM8TI6iZYUzpHupZP74AfM/icX/+3p/HU4DSr7R4RhDQTiSF/7kjwcj/+hjJfFD3ywoGL7C8YBIneYGSZPGYOO+TLi/39HGEuFQuly8wElwtlUq2EkIxPnOWTqcNP5h3ksvj8WjPlM2eu8kSd0ulkknIptOpGo2GrUTwIk+Cy7Pz+Xw+r0ajYbaAhI3YzQsrLzfPMfJwOBhG0n6cBJzxwIbCMLSXPe6ZXICxpLU/ZAbY/i6MBKPIJ3hGSAPm6ng8Wq7CXLMBMjYFRm42GxsDXjJ52ab2jJc5ru9XZ+j4mWDkrwMj33X84hcu3vji+Kn7BwHTO5JnIJgcJgD5AsFwMplcGKnf64DAQ9BmEkjuoihSs9k0RgU2jDdbmD4fpLl/Jt6zLn7pWDo3fTgczm1laUzBEiuDjpHTMY4A5IM0DpBKpS6Kf2lggWFwcH8Ec5gtzyzsdjtNJhMb83q9blp5goOfM4wFQ/VLtz6QUkPAcvF0Or3Q/2OEfI7rw6Kk0+eCVF4kPLOKFnmxWFgxMvUBAKeXruz3ey0WCxWLRdPm83KWyZw3YMQeeDGqVCqqVqtWSD0cDrXb7UwaEMexdfnxLx4AFWDiX0D8c/P74/Goq6sr3d6ehYjlclmvX79WrVYzVtuzzQQjX4wJO/zceU+nk+r1uj3Pfr+3YmmCxXw+t5fHyWRiYMPLKMGZvWM6nY6CINBgMDAbYux5fjqfxXFsbBq2h9zAv2zX63Ubl1qtZqCJxAJmD1vCtqXzC1sul7NCXewU/8SemTPP6BNrGDfPTJHMwVYDdDwzCYYPnn6lgXnHFmFGpSc2Gnun0Fh6ajPrQZB7IUnAJokhJFuMC4wyCQYMPPaD5v95IswYPdfmw17jT9yjvz8Or8knifhLgJIcf34kGJlgZIKRCUYmGPlxYOS7jl/cNEOSsSi8efr/w3DwJumXKjGw58GMzzCoyBcYKCYYI8IBYPBhX9BKExB4o8ZIMBqW4GENYAa9vpXBZeKWy6Wur68vzgFgsQrCxOHsdAYEcHibhxVjAzx+z5JotVpVsVg0qQLLtH7iuUdWNTyrAKPBs2CYrN7s93tjBNBNE9zY+Z153O/3pt+OoshaqhNQOL/XnTNHzAuaXr97OZpj6RyEuV/uMZM5t/RlCZzzSTJ5DAEMO2Elzp+DwEhCQhIAw8OyvCRjOpGkMLdo3mFIJ5OJVquV/v3f/13L5dICCnUUQRBYstTtdm2sCEQwdwRxxotn9K3p9/u9Go2G6vW6Fc5z71F0LiTHBoIgULlcNlYSdi6bzarT6ejVq1dqtVq2LI+PsGpJ8oNd4QP8PAgCA+R6va5ut6swDK343W9+SrBj6Z/z4O88O89IApHL5VSv19Xv902Hz3zB6Pnxgq0iACNhQv9fKpVsTPg77yuMIzbimTsAli/m1SemXlLgVy687MDfL8kDDQS83EZ6kqLhuyQPkgxYvQSMcfPxAwAi3gBezxN+bIYDAOIeGIPkeP8jwcgEIxOMTDAywchfP0a+6/jFK1zesFh69/pMBtcPLI7Ig/IWCwPC0iVSJ/9mikaTwaDjChPNciETzkRSKDmdThUEwcVbMvdKYOGtXjoHFLSiURSZNI0OSSyXw0Z5potz8n+ChnQuYJzP51aAikN5gMCIAVsAsFarmaGwDF4sFk1P7usDGG//Fs8Gt8yZZzrQ+cLQISVIpZ46ZsGkMkeSNJvNFMexOp2OMYnoh2lviqEimyCAEdCWy6U5DS3oYa0IPLClBGdYOuYCWQIAiq4X5hC9L5I6WFLGkHkm0SGYYwveSSnSzGazevHihe7u7rTZbFStVo1FZMw4N8XAkqwAF3YZ8OP8jDN2PpvNzEb97wgk/J7gDrsK20fgQcf93Xffaf52Hxr8EP9Cq01y4Bnz1WplyQefqVQqZi/NZlOVSsVa4sOMYpckDx5QfMBDHoCUhMSiVquZNt6z7dI5cHt2ClbMB+7j8dxmu9VqGQMMAAO0bORKvMFn2ZQbn/KMOPeQTqdVqUvlvewAACAASURBVFSsDgB2kXjjE21kFSTVXg7h/YQg7gGFMQP0SE7iOL6QbpDk+3N4SYR/eeIZfJLO54lxMKnJ8X5HgpEJRkoJRiYYmWDkx4CR7zo+SA0XRkLg5o2YQOXfCLlpX//jAzHLkgR0vyzKwCO/wJgJbL7zDMyBJNvLhEEicfBvylyDJXr05ofDwWQgBD+ebTAYGKvAz6MoMiaKt990+rz5GwbFMud+v7/QZ9NtiAmUZF1cMEKYPJa4Ac3dbqd2u20BmOujyZXOgQ2AxfGpJWJvK8YbPT/gEcexut2uMSsAHYENSctkMlEmkzHAYj5LpZLm87kFJOarVqtZwGKMAQXsCsPebDaq1+vGxsHA+Pag1J7lcjkrRi6Xy8bO4kwkH2h5keysVisLnHSskWSaZ2yOgEsBahA8Lce3Wi3byR0751yADHYBIHn2BZuTnpb/vQwHRg1n5/5rtZqxaYAugdRviDwej5VKpazou16vazQamQyCAIw/SLrYmBEWm2cHjBeLhbHa+Dl2DEBzX5wTqQsyIvwDOQ6M7XQ6vYgl2CRMM77KuUkGKTznb9h3hFjE2JCw4m/YYyaT0Wq1svsjgBOksQXslaCL3RP8uZ7XyMNO+/H20isv38DusBWeMYoimzMvy+DfnpEjhhJXib983oMWPycZ9WCTHO9/JBiZYGSCkQlGJhj568fIdx0fRFLo35J5u+XGCQQ4wM8tJ3rZgHS55H86nczInv8NMgz+7Q2oUCjYwDDpSBG4DpOKY/t7TqVSpmkm4PguO2wmh7yAyTscDhfL8Eway8SVSsWWu3Ec2L/j8dx+k8nFmAHnXC5nrWwxfknmoCx78zvuAVaG+4QBYVkZACRgwPg0Gg0dj0eNx2NjVx4fH20eT6eTjT8gB0MFGMOKEPTT6bSq1aoFTw8cOCl6fZ9QrNdrzWYzez7GerFYXLCIOBOghe2wDwX3jGOWy2UD2VQqZawgLVCz2ax9jyPSvYr7i6JI0+nU2KFUKmUgQkLFPMPm8fw4u/+ecctkMgaAz/fCIeHgK5/P6+rq6oIF9iz44XDQarWysQNokVL4JIQEgnHj2n7JH1Y6kznvGTIajTSbzYypZZNkui5hPyRLPD9jSUctJA2efcIekacQS7wUATaVxEySNVshSDN/BPBs9txdKpVKWeGyT5AABeIbiSbSLECe2MD5mSfuEekM8YnYw/jg24yvZ4l93PM2yM+4N/8z/zkv4WLOfLz05+f6+Ab261+2fNxJjr/+SDAywcgEIxOMTDDy14+R73rx+iArXARNHoDJgOHBAP0NYwB0x+EmYUSeTyx/w8+YDA9eBPbT6azp5vzIHDBaGD6/1IvsAHbC/75eryuOL4viuD/fhhJQ5F54Q4eNgKnj85499IwYIMrbv2dhjsfjhTyAscHwPTvnDQX2C6bCX2+9XqvRaBhLSSEtAAc439/f294qPFs+f94JHKeDHfFMGewiz+ABnODnwYQuMwTN0+mk+Xx+3k+sXFa9Xtd8PjdJB78HoNEkw5ZwH+v1WtvtVs1m056PQA4IAIieZaMzlWdJYHmeMyeZTMYYJeyBz8K8UDAKeyM96e0pwmUu8AdYSWQGdAokWZnP59aVERYLKcl6vbaWwIfDQVdXV1Y/4TtG+XHmnryWnODpgxL2GwRnSQes7XK5VLVaVTqdtn11mHPYrlQqdRFkkSOQ8JBUYEskVdiGZ5tIpqQnlvru7u6iiJUY4u0KnwKkAREA2Eu8fIJAxzR+79lY7Jv6Fq7t4xa2gxQLP+X3XgrCffpEmPjK8/A5QJJx8Ywj98u9MO4+VsAaejkVKyYwmMnx/keCkQlGJhiZYGSCkb9+jORzP3d8kC6FBDVYIyaEgOxvgOAnPS0F81A+oDDJ/m95KH7mgzEOTfFvsVjUdru1tpeAFpMHmHk2h7dwjtVqZUwTwdizZsg6uCfAhsAFI8ObuWcCYCYITDyD36QPI+f+uDabK/o3dYzg6urKnpvuOX6pHamGJAvymcy5cxFsFOcEdLlfltlxPGQcsG/SU4DxYLpcLpXP59Xr9S46LeFwBBEChj9XEJw3H5xOp1Y07YEKFo+/gcmElQEkCMTYqqQLZgxARuNMq2WWnKWnBAJWCDv3S+LMg9df++V1pCHMn08sAGYPdBS9TiYTky4wLiTABNhUKqVer6dWq2WbJeInzDfP7tlvmGUCvfdnOj0xFnxJT2yOD54U5cMa8nvugzoNEj+fYPp5L5fLxjZyj+jVPdOZSqUs6UmlnjaJLZVKmkwm5huSzAcI/iQ6/I7xYp44CPJhGP7ZRrPYhAcL7IDfU8T7/DyetfNsuGeXOa8HB+93+JsHC796QozyMYD788k45/DxVJLFZ35OgpQcf/2RYGSCkQlGJhiZYOTHgZHveuH6xZJC3tq5iL8hgo53RowgDEMzWhyPQYZ184bv9avPjY9zAy7NZlNBEFhXE7oV+X2nWOaFYYA1IhCxbOwHH52qX+5Fz8xnvKFxUFDrO6fgCFEUWTEv7KJfMvXdZGD0GG+ACudGAuE1tgA2zF61WjX5AsvTFDtS+MheGN5YcRrGkiDGfDKHjNfxeDSZCQxVsVhUvV63651OJ9M449QwJOl02paZYYXo2HQ4HKz9KUG1Wq1aMlitVu25sBmCF5IRH5ABeezWF5RTFHw4HGyZXZLW67U5Oywm5yUJGAwG9jO/ZwpOTDCDwYrj2Ng2WGdAjv1RptOpadfRwmMTuVzONPSZzHmvj3q9bgXByHVoqdxsNtXtdg1opSf5A4XU+B3tabFBAh7PAlM1Ho8lyTpEAZwcJGT4CcBNET3A32q1TB4hPbFf+BbnwHeLxaLtJ4LUgPnHfhlPzgf4wLz5oIw9B8G57oDaAWKN15djF/gr8hDG1QOmfw5sg8QiDEOzT2yR+yEZxeY84BH38HtsFlv2gAGweIAillJ7Ij215uVzAJ1nQ5PjrzsSjEwwMsHIBCMTjPw4MBL//rnjg9Rw+eU+BsrrIglqAAuDyGeeMwosDxN8paeuPRiAHxwG3Ac8DKLdbtvu3H4gCICACfrZzWZjy7Ee/ObzuV3DM45eZsOz4BDs5QBrwXUJ8Mfj0TaUlWTX9ku/jCHAyZI7Rkx3GxjLyWSix8dHrddrc3yMIp/PW0DGGeI4NodmPCXZPgqLxcI26xsOh9biFekEemE/Xz5I39zcaD6fq9/vW1FuuVy2TQX5W5yPwEpA5NkovGZHd9gmOmvB1GFHtAjGEQEvHJE5hHElkK9WK2N//BwyZz5wwJB4xoUlc5gw5m46nVpgee432FoURRfJAwkStkoXLQI37FcYPi27L5dLLZdLkyfEcaxGo3Fh84wHHYQAKuwYIAeUkWeQHMCGw8B6lpE9cBgX2Gh8GF/1gY+AdzgcLljvRqNh9gU76IvUJRlzCGBh0/hQLnfuysZnqIfAt57HLb7nXvEr4hUSFZ8YRFFkDDT2x7MjN/G1BPzeB2cYOR8jAUq+eHYPjEEQGIAdj0fzWz8HxA3GjfthvLBR/MKPoz+Ht9nkeL8jwcgEIxOMTDAywchfP0a+64Xrg3UphIki6Pk3TwaPN8UgCOxt8fmSHZOE0/KWTZEghgtwcDA5LIFLMoBgYN68eaNut6sgCIwdQTctyUCFCV0ulyoUChaY+R0TQ/tUjM3LJtgwbz6f22Z2nIMiSb9EPx6P7X48+8d5+TlabsbbyzwwxtFopO12q2q1agboQRqN+mazsXNQ0EsXKALr6XTuwrNYLDQejy3oMsa73e5CCgAY+BarzWbTmLBMJqPpdKrT6dzKlkASRZExcpxzvV6bphpggD0FSAAtz0otFgtrk4tTY19o31mupr4AmUQcP7UVXSwWqtVqpt9Gpx5FkfL5vLGlPgB5rX273baAyTJ/Pp+3xISkh4CADZIs1Ot1Y+rW67WxSGx0mslkdHt7a/eLvdHpqlgsajQaqV6vm83wfPl83rTp3o8kmcaaxIVkEN2zJKt1INjA5sIUSzLAg50KgsC6XJHQeJ8YDocqFAp2/81mU9vt1iQwBFE6hvkgSBE+cYVn9ckXz0kw5d4Jvj6R8geMo28FjIwmm82qUqlosVhYbQZdn2AuYT+xEX5H8AYAAApWEfyKBfdN3OOefGJDcs6zA0aMk48XfsWB//O9f6nCN6UnwEuO9z8SjEwwMsHIBCMTjPz1YySf+bnjF69wEag9m8HPPKvB4PBQXotKsANkpDNAYYQMnGc7eJvm39wLXWDoqIKkAUZxsViYVpN7YoK4X4oqYTHq9brp3llyPx6P5lC0DsVwABdfeBxFkTFFMJosr3JfOCPL2oArbBfLrxTo4jz8ezAYKJ1Oq9frqV6v29I95wQIYVxOp/NGf6VSSa1WyxyFsWfZt1gsajweG9PI0nS/31etVrNAgSECQNxLt9u1cUauIUnT6VS5XE6ffPKJsQm+iw6sBPebzWatzS22RODCjpBQYC+wO2j2sR3pSW9+OBysjgFHzGazGgwGenx81OFwsEABCw2gYWOwoz6B4prc2+Fw0Gw2s2VsQIagRktlgACpCLbii5Cz2axtWEnQKpVK1i3qp59+uggE7XZbqVRKi8XCgm+tVjPfSKfTxnCTMCCxgcUFbBmfKIrUbDYtsfL7mvgiYwDpuXwKMAHomGuYPLpEef8meWAe0PR7CQ8F18fjUZPJRKPRSPP5XIPBwOxisVgYgBMzfID2bBySjXT6XDeCHItGAfl8Xv1+X+12W61Wy/b6mUwm5kM8l/TEopLk+JhG3OQ5Saj9F3aOr3mZBuNJ3QPPge8Tn4lB3BN+wjhLly1yAZt3gUly/PyRYGSCkQlGJhiZYOTHgZHvOj6YpNAvb/PQ0lMLRh7cL8F5Xaa/eb90iqHz914T7AHhdDrZhNzf39s5N5uNJpOJ4jhWq9Wy5WwcDT02y5iwDYVCQfV6XaVSSePx2DbgA8AymYztQo6DsLGdJGOZaLXKsitjheFst1sdj0f7TCbztFcEQQm25XQ6GYPmg1qz2dR4PLYl1kwmYzKCMDx3/CGwSrJAgdMTNOhcFEWR7VYvnQP8bDZTvV5XJpNRp9NRuVxWq9XSF198YZITGLZisahOp6NGo2HMHewErCDzfnt7q/l8bgYL+wVDEkXndrJoxgHczWZjmvFXr15ZoCbQwNDBynNvu93OWBRvT2iUAeZMJqN2u21sC3aG0+52OwNd2tjSrhV2t1qtmq0jg8HOU6mUlsvlBSB4KQf3g89IT8E0nU5rPB4b04otcF720yFxgGFm3xMYIuQ0MF7sx0LwhnmGlRwOh5rNZhqNRsYUDYdD3d/fm+9htwRFGG58luV6gIY6AoLncrm08VosFsa+B0FgbWY3m40Vh8NY+6AI2JEo1Wo19ft9RdG5Exufg8nCFkhEkMlQS8EXNkSsAHzX67V6vZ6KxaJ6vZ7JVgBLEh+embhIUsQcI/cBvPgMCYq3gzAMzZb9nCH14VmiKLLNPolT3AuMLDHLvwj42E2cYe6S4/2PBCMTjEwwMsHIBCN//Rj5rpeuD9bjlwvzxunfuGGR/Ju7l1ewPM4yZaFQ0Gw2sweEKYEl8Ut8sCKeodrtdprNZjb5sGbS01KmX17k/tPp8w7z6MRbrZYmk4mOx/NmdL4VaLlcVrvd1uPjo52f5/BvuxRSZrNZa7cLw0BAxyhgvVjihYXYbDbG5LDrOQGC5w7D8270OCUOh8HQMtXLK3huv2+HJHW7XWUy50Lq6XRq165Wq/r2228vCjbRiqNVpnj3cDio0+koDEML8rAmsDxRFKnX6ymfz2s8HqtUKlmSAfPg5zWfz1srVb9TOfYBA4sdVqtVAwCckU5F3jHL5bJdk93RJVnHI5gt2gJjp9gmBZgw1QQ0/p5uULCjJF2pVMrYNtgtQAAQw2YBEiQE2AG+Rccm5jCTyejx8VHValVxHGsymZhPetYQezwej7Z/DfOORABJQ7FYVBAEWiwWmkwm+vTTTy0BIUlrt9tmwzBCSH0kmdaaawKw2BuJTxzHBnAAH13CUqmUASm6c7T7MF9cEz05HcAI8kEQ2N+QPMC4Y0uZTEbL5dKkW1wPwCNZuLu70+3trTHmo9HI5DsUdxP7AHySBuyH36/XaxWLRWN9iYs+8fG+SswB7DgviWkcxzamPqkj9gIkvt0wYE+chYFNjl92JBiZYGSCkQlGJhj58WLkB+lSiBHzVumlBzgYb6jcFBIJBhYNOM7kB16SGQ8MDwwWAyDJWCiKJ0+nk+7u7lQsFs2hD4eDOQWMBxppvkqlki3Xs7yfzWb12Wefqd/vm777/v5ew+FQmUzG5A2wjkEQWHEk4FKpVFSv1zUcDm3SYBUI8rCJfpl4sVhoOByaPIOlZJ4nDEML4AQDNuvzbCngy9hhNOPxWG/evDHNdRAEajQaevHihbFZ19fXxgpSnPjixQsdj+c2wujb0SwzFshW/PxIT1rb0+lkLAnsDDvWR1Gkfr+vq6srC6IwiBS5Xl1dWUD47LPPtFgsTKrBs2y3WwM6mChJ5oAwbsfjUfV6XfV6Xf1+35a+qY1gc0uAaLlcGrOH3p3f+9aoOOTpdLLuRoAH4HI6nff3mM1mdl4AA1uWnhi8arWqq6srNRoN9Xo99ft9C2YEnvl8bvrpZrNpiY3fBBSAIyGCgST5wUd5ziAIVK1WTV4Di8uzMM+SjJnmgHn0RcnECAJ+GIZWpzCZTDQcDiXJgt9qtVIcx6bR/+1vf6tOp6Nms3kRcEkiYIzn87kqlYra7baNg3RmKJn75XJp9+slFUijYOBhQdGO12o1zWYzPTw86OHhwVhdEh3kEjBrHD5B9qsS0hNzN5vNLsZ+u91aAsBzsBoBoBNbYZrL5fKFzAUwR/YF6CLDIOaymgLDR2xPjvc7EoxMMDLByAQjE4z8ODDyXccH2/hYkg0IxsEyqfS0LwIaVF9c6IOPNzi/TAfbxUNxXpYYcUiuz98xMQT5drut5XKpUqlkBkGXF36GDnu5XKrX60k6OwYbQ3Y6HY1GI2OiABMCNEGFnd8bjYbq9boZa7FYtGDhJSP8fafT0WAwMJ06zwYTyr3CPPliRwCSJdPT6WmHdPYm8W/wktTpdDSbzTQYDFSpVKwgkWLc4/Gofr9vzMJ6vdZut1O/379gTCmYxCApdN5ut3Y/dPKBraAYEv3/8XjUbDYztgGWkk5Io9HI5BzUBlQqFX3zzTe2VM3+GqPRSOVy2SQrBPf1eq03b95YsoCdTCYTG5t+v6/5fK5vv/1WlUrFClaRF8DwIa1ZLBY6Ho8GehRNE2xYXgfsYZEJkizT+z1HttutbWSJJvrm5saCO6xeHMeqVqtWaAsLTFG0T+RIBNDiB0Gg29tbY3ZhcwnsyBO473Q6bTKU0+mkh4cHffbZZyoWi7q7u1Mul9NoNLKNQulmBpsHY4kN4e+A6+Fw0Hw+V7Va1Xa71ePjo0kiUqmUte/NZrPqdrsGwCSRSGJIEGezmZrNpiUTzJtfYSBppRMVKwvpdNrAFTAhoQ3DUJPJRL1eT/v93mwAm0ilUsYek2z74nFiHswocSqKImNtkZrUajUVi0VjUwFrDsbYJwBINgAqnpMmDCSWPAu2ir1wj14ehn8mx/sfCUYmGJlgZIKRCUb++jHyXccvfuHyA+KX7pkwb8iAzOFwuOgygsaXf8M4wcB4CcJzZoTrMjgMBGwaOs1ms2n3ixPx94fDuaUmrB0tcu/u7nR9fa1Xr17p22+/1Ww2MzZjMpnoxYsX2u/31maUjj04Asa1Wq3UbDZVLpf1zTffKJfLqdfrGVMDQzifz3V9fa0oijQYDIyRgPGioBW2CUNB48rSMIafTqdtXD2bdTwerVsRgfbq6sqMbrfb6euvvzZ2jbf6x8dHC6ydTkebzcaSAlgXb3iMbbPZvJDLsI8CycJisbDgCggDMAQuWDdqBAaDgQWIXC6nzz//3JZ8AVu63rA8Pp/Plclk1Gq1tNvtNJ/P7fNxHKtWq1nRKOMznU7VaDQURZEFQdgQDxRhGGo+nxtAI19AakIwoUMYLI4PpiRDvtgznX7ayd7vRfPw8KD7+3u12+2L9sTSOTkg2MOCIhEpFouaz+c6nU7627/9W/30008Kw9DsL5fLXfgRASaXy5lef7fbaTAYWGI0Ho9Vq9XU7XZ1d3cnSXY+2OjHx0d7PoIrrFg+f27FjF0GQaCHhwe1Wi0ryJeeNqcslUqWOPzzP/+zFbTj3/wfOQBjh/8QvMvlsqbTqd0PMQb2D609zwnI8P1isdDj46MajYYlv7Br2HK1WtVoNDKWLJfLGVuIf2GjBG7YZpi/QqFgtrfZbCxx4++YP2wyk8lYly/GggSF2MxnaSzgZSE+pgK4gCyJV3L89UeCkQlGJhiZYGSCkQlG/uIXLpb9+eLG+d6zotwgRs5ArFYrCwws6RGMYKMkXSwBEkwBKN7+JRlDsdlsbLkUUAFMCExoUJFJAAoEisfHR9Otc05YRpg5lirpqkJQ63a75jCr1coKDQle3W7XNLij0ciWutH5rtdrGxOClCSTR3gwZrmZ8SsUCtpsNmq32zocDsZmUJjqiwPX67U6nY6urq603+/1xz/+UdVq1RwdMPvkk0+M6ajX62bYmUzGgj4MLnufSDLGD3uoVCpm2NPpVKPRSJ1Ox8ZoNpuZPZVKJXW7Xc1mswtdeKvVMomCJGPLCMw4myRb9vZJTqvVMpkKWmIKfUlCoihSu90+O8rbILTb7bTZbNTtdo1pwc5g3HK5nLGrdPxCc87/uZ9arWY1EH6PFQI/4LpYLKw+guCGDCCTObcRPhzOrX+r1ar2+73u7+8VBIG+/fZbffHFFyYjqtfrNvZ+KR/bk2TJCUnf6XQyhppkYLFYWJJEYKrVahYE2ZsGaQ/zwb1KMsAfjUYXHb9arZaCIFCn01EulzNfIynabDZ6/fq1ms2m6a9JSJA2Ma5xHOuHH34woEyn05pOpyarAbxyuXObXQIvBzpx7MoDFpKGfr+vm5sb8xVsg/bTyFKILdgqSR/1IyR8MLCr1cr08JVKRZPJxP4WFtbHw/1+b8k5sQ6WnBiCHS+XS7Xb7QuGj7hNTCOWc0/JC9f7HwlGJhiZYGSCkQlGfhwY+a7jg0gK/Q0wkLw9Y1j83MsYeEukWw6sD0uEnBMWgbdXAgYHS9wYRqVSsc42qVTKNurzE805PSM2nU5NJsDns9msGTja2Ewmo9/+9rcWzCuVirbbrXWM4n4kmb6bYC6dC56n06murq602+3UaDS03W7V6XS0WCwuOiZhlExusVi86M6z2+2smPa5Lptnh/3C2Nh5fbVaWRDb7/eaTCbGYnJdmKfdbmdA3u/3JcnYKYKFtwfpzALW63UNBgMrTIVZIzBKshahnlXhOR4fH83hYFkajYZGo5FdC014FEXqdDqaTqcmuUDHm8mcN6VMpVLqdrvabDZWdM650XOzoSF6aHauZ4xpn0orXwAJKUYQBMbgsgkl2ubBYGCF6t4+kOkAmDBokkymQ1CiyBTtOTb18PBgcgNkN5PJxPYa+eKLLzSfz/XmzRv99re/1Ww2M3/abDa2iSNz52tOADjuKZU6t/S9ubkxyUmxWNSrV6/0008/qd/vKwxDDQYDiwd030L/jv9ja6VSycAM1jmfz5sEhYTwdDrpxx9/1Ol00meffXbRPhjNNvP+448/GgMFAI7HY2WzWdXrdfMhpD9BENjmlQRfErblcmnMMskH9SKTycRkR4XCeSNLLw3CB0n2YGdh57EVkg2eCTskIUIC4X2NJIXYB+MXBOe9gohvPBvj6pl/GEDuFWaRGErsZH6S4/2OBCMTjEwwMsHIBCN//Rj5Thz48ssv/+KH3nX8/ve//7JWq1283TFoBDQOBuH5zcGWIFvwmnRYVZgXjM/rjRk4Jt8X2SJxoJUtE8eSJFKMXC5nnZdgVQimBAY0qXEca7lc6pNPPjHn420bHTW63sViYWzk6XSybkiZzNMGf61WS3Ec6/Hx0QL7crm0vU54dpbO0dTv93trHwvzAYjDVO52O00mk4s52O/3tm8Kb+20rQVEWTpFSlEoFLRardRoNEwDLD11cPLzjb4fpgLJAGPk9ekEMsYQFtDLaqgnYLkaxomuQs1m08aNpXOA73Q6FxzD0OVyuQv2iudqtVo2jrCLBG/si3tEvgBAEBQIQv1+X4+Pj8pkzl2F2JemWq2aPdNSNZPJaDKZ6HQ6GWM6m81MVx+G4cVyP8GSDj3dbteW77lf/s2B3CUIzoXeh8NBg8HA9hjZbDaWvNTrdWsLC5sH20tyR2BFEjEej/XixQtjjI7Hc0HsYDCwoI7d5vN5q4GQZP7x8uVLq8ugwDyOY/V6PZO1FAoF/fjjjyoWz5uM0gUMoALwYKqQqMCoI1UJw3P3pGq1qlQqZUB6dXVlKwtIGkgkqZUgcYJNg7X3+5CQlCDTaTQaF+wYYwJY40vYEbEUm2KlYDqd2katvk03TDgxjVbFJAE8N2NIks9zSE8b83o5GuwfTCs2NZ/P77788sv/9T448TEfCUYmGJlgZIKRCUZ+HBg5n8/1D//wD//z57Dgg218LP357stoHTFsJBEswyGJQO8LOOA4GDjsChPhnYTl5NVqZXpKlvBZ5stms3Y+v2zLWzjAxLIlwZwAuN1uzVAajYbJJv70pz8pk8noiy++0MuXL7VcLvXw8KDZbGYGuNvt9PDwYBrsTqdj7JCXBwBc+Xzeroc0gaDL8juFojBqYRhap6M4jm2PEkAXbT7LrBgIHWHQSFerVdN1E3wqlYqur68VhqG63a4tb3NPSE2kc4ADoL38BaYUdpExhq3xxamSLJGo1WpWDAuDyXE8Hq0Lz3a71WAw0GazMTkLCQSdpBiLSqWizWaj+/t7W0pHduEZxCB4agvMM0hPbImXUXQ6HZPQ0MkIpguwhNVdLpeaTCYG3pIsqI/HgMI1tgAAIABJREFUY2sZC1udTqetKJ0kxd8vQaBYPO9vARiRvJCAlMtl08d/+umnBs7dblep1LlINwgCq7Ug2CO9wZ+QlBCUGF9kBoAMQI+PptNpVSoVNRoNk+mcTicDW/bxqVarxkah/SeAP+/cxD4vhcJ5PyCu12g0TFve7/dVqVRMh10qlSzZ8XPG+ZCD0J0rDEPztW63a1r0VCplCR0AvN1u1W63ra4Exh1bxH4BGdhoEhbmyhdtw3AydsRPzkXcIPhzT9g1MQFdPIADI++T9SiKbP8hAA+7g831MpLk+OuOBCMTjEwwMsHIBCM/Dox81/FB2sKz7MYAEdRxeiaIQOZ/z81yLlg49l6QnvrfE1gx0Gw2awOHcxQKBet8xFsuINRqtXR9fW2DR4Bg6ZLJh53hfvzSNUaNwfzpT3/SaDSyjjfoY9GioonlrRvWBTDlur6dLWMKc+CDKNpnxg9mhvN0Oh2l02nd39+bvMTLDfhaLBaazWZ2nTAMDUT8vVE8SxEzb/gAk2dbYSZJMAA2HJDPAIAUxXa7XQsSyA9gVAnksH0EPNhKbIcC3Pl8bswjLU7RX9frdWMzl8ulSUgKhYJGo5EymfOGlcy9X4aXnvaWYQxLpZJWq5VteFkqlbTdbk2uwP3BvrDkHQSBXr9+rel0aqxTq9WyBAB7o0MTMoxyuaxaraZcLmfM083NjQGUZ0pZwl8ul2b/tGoOgsCe0xeWw2hKZ1aNa1MwejweDdjxQ/xnMploNptpPB5f1JRgI8xfoVAwVpKkA/ter9d2TpKaOI51dXVlEhp8jODMeMFCUTNB4rVcLjUej5XL5QycSBSQAsRxbEzvYDAwlhcdPDbLvBKcK5WK2VGxWFSz2dRoNDK7ZZx8wwFYaX5HckXsIuHxhb5IS/BzAAJpF0XN7L2DbyKxYW5rtZoBMB3MYPiC4Gm/GcAcVp2VABjF5Hi/I8HIBCMTjEwwMsHIjwMjfSx6fnyQFS7+z0W5MMbob1iSDQZL9xRU8ntYVLSjXnaB1jKVOhfP5vN5czAkCNKTZAOmoFQq2aZwsIUsb3vAo6iQA42mfzOG6Xrx4oWxX+wfwlL74XBQrVYz58CIaCu7XC6tFecnn3xyIePAURkfEh0/nhgsY+W1u+Px2AwROQMBEL06zouEg3mYTCbGDMGy+mJQWJznS8acc7/fW9cfD/owWovFQuv12rTYs9lMw+HQAoO3H/6PHtwnIbCGsFLowwGb+XxuTLAHOFrTopHmGsfjea8U6YnlhJlhHGGTkfGwzL1erzUajUynPZ/PbZwJ/KfTyTokwZ4ej0criJZkrJ1nvOfzubX59fIYgJL9WJDV0PkLHXM6nbZ9UggeBFYSB4IxrB16aP+MjNV2u9V0OtV4PNbhcFC73Ta2uF6v6/7+Xul0Wu122zoRLZdLY7eQsURRZAwViRfjE8ex6vW64jg2ffbx+LT5K3GGgwQMH+P/dOuiuLler6vZbJq0ZLvdGkgNBgNjBdFs88yAIHNQqVSsEBeby2TOXY/S6bQVQnv/9Pu2YF/EHACOxAj2HjDFJokH+An2GMexBoOB7aPCtSlef97tC0AB5H3MJfkBSABp/J0YnRx//ZFgZIKRCUYmGJlg5MeBke86PsgKF4yAZwIYDByRz+KULJfC6DCgGC/tYGGVeNP1Mgj/b95W/QPPZjPVajVjmGazmX744YeLnxG0uB+AxMsyCJDL5dKW5wGiRqNh2me01RTtUviJw2ezWQ0GAyuKjOPYNg9MpVJWkMnyKQ5EUMaoYTJZOocVzWTOhYSMIbpqmDuAkWX0MAzt7wh2Nzc3Frjp8pTJPHW+gpVAt8y88EUwwuhns5lOp3MxJEYK+Nze3mq9Xuvx8dE218NpoiiyIAlQwXpIsuvR2YlrorvHNuiohV30ej1Vq1V7ds5F0GPn88PhYJ+t1WoG4ow/gTKKIjWbTVUqFZNvSOfgy/iwxL3dbtVsNtVoNC4SJ0APXTRBHOaGZXwC1W6307/9279pt9sZs+UZZoIpoMPzl0oltdvti+eez+d6+fKljsej7fvBnjjIKPBj/LBararZbFrxsZcv9Ho9k3oghUKHT/BleZ/AB8Ax1tgpMQNQQUsO+wfbizymWCxapyOkUQTs1WplyQLtsBeLhV6/fm1xa7vdGotKRyTkLmi8+Xkqde4O1mg0NB6PLbADZj6Z8omXv0+emwJ5/AoJBCD1nH3H5rEfYgbjxfewnPgN40QiDhNIkupBmjEE+Lj2XwKU5PjzI8HIBCMTjEwwMsHIjwMj30VKfpAXLpaOWdrniyVyQIM3RQYJUGGwvA7S61rR4RLsCQw8OOwe+lkGLp1O20DG8Vn/7ltQ+sJJWEK0qSyL8nPe6pFLSNJ4PDbWi24+dDfBEPk/z99sNq070Hw+V7vdVhzHtszNG7zXoMJ0UkjJGztMAn+DxrhUKl0s3aNh5m95Q2eOpHOXn+FwaEGVc/slexwOFpNlZ+/EkqzAFIMmMWAMUqnzBoej0cgYCcAJoGP5lqVm2MtUKqXxeHyhV5ZkbAtOjxNNJhMr8mYJHmZTkn1fKpU0GAw0HA4t8BM0CKTSuV3rfD63OgGvJ4b9bDab+uyzzy7GCykL9QfYK7IbHBv2lo5PjBdF6TzrYrHQV199dcFWxXFshc0weNQwlMtl3d7eWiCB5VwsFhbs0IITcEhGSAZ8AoIPkzwSnIvFoiUMXoedz+dNUoI/eZuHqWKu9vu9yTnwU2wR0IQZJngzxzB8sMTIZThHHJ/bCff7fWWz58J12GSeS5LZBYkdXbKIQ/g20izAEwDg77Fx/k8yiI9jZ3Ec20aqzJ2X8D2XdVEUzPyQjMLOsaKBfXlWnDln/LAfknu/cgBb+JfkEsnx80eCkQlGJhiZYGSCkQlGvvOFKwiC/zsIgt67PiM9abZZ2oTp4O0PcPEyCRgaDPG5vMFLJOI4NsYN7SQSBzooMTgUccK0sLxMMGQJEcPyUhmCGcbJvVO0BzME0BHI2UfCB2uCRTabNYP2S5CLxULz+dxawqKDhmHwB5Max7Hm87lp4NGs+jd4ijdhGAnUODRMFDpwAjBFyZKsEFrSBSDjTO12W/V6/WIjPcAR8KfwkMCA06D1nk6niqLIOuHA3MCkwWowd0g6YBOjKLrYIwL7gd3CUVmiZr8U5AwESqQwMHDYI1ICpBQe7HK5nBqNhsrlsn2OLkrFYlHD4dDY2lQqpeFwaMFJkur1+gWzikwijmNNJhPl83ljdLGpbDardrttxa0UOcPwMu9IYxjLMAzV6/V0Op0sSKFZJvi9efPGfIgvAhnPjz/iE7TyLZfLZnueZSdgwwh6wKEQHqYW6QYyI3w8jmPbB8XbF4XSgBJxAMBjXGD0Yd2QMBEssRM6PrEvjvcrYhAst4uN1loZ3TuJLvFEkhXd+8SVuWAco+hpM1zGDuD0yYIv4GV8JJkv73a7i/oX5g3m9Pm/kREBxNybj+v4lY/ZyfF0JBiZYGSCkQlGJhiZYORfWt2S/sILVxzH/18cxw/vPIOeujB50PCgABuw3+/tDRm2Agc/Ho/WD599HPxbpp+k0+l0AQL+LZQ3W4rlCJzs/cASPgPOxDIhDBgOVywWbZCRTaRSKZv8yWRi3Z0wyO12e1GY6JlMWEXpiTH7/vvvDdhwJJzFs3ncB+fzAJLL5WwPBwI1IA27SIEyhk3g94wHQOmTAApZAR/06jg7RYkEnyAIrKAUh6AYlXMQyAAR3xKV5/ZARuBFz346nazlKVpv6akugU0gCXDMCVKVKIrM+Rlj2EgYDJIf2BiCFfeKU3qZA8GOQtH5fK5SqaTdbqfHx0djVZDSSLJAiF2hlZ/P55LOIMQ+OfhBLpdToVAwtgf2R5Ix0J1OxzoJ/fjjjwZyPCuMItKObPbc8hYGmgJxfNcHe4IksgCkKt1uV9Vq9SLwebuAIQeEuRfa7JJAlMtlux4BFjbOy4MIhF6uQA0C+nTGFc06No68CdYb5heARp7BPcCycW5YWHzBJ7JcD5v0wOAZML9CAOvO2PqElO8ZC8/CcQ5AyNv1ZrMxn4eR4x75LAkDsZN5Ye6JP8nq1p8fCUYmGJlgZIKRCUYmGPnXYOQHkRTiZDwEN8X3GBzB2y93crNMGMumDAZBhIcm2PO3nnFjUPwk8XMCDiwaTEscx8YGAkphGNo9cj/c02g0MmNhAuhyxHPDkAF46IMx0EajYcvk3333nd0butTNZmM7jZfLZTUaDTUaDbsPHJVla9rmLhYLHQ7nTQ8JeDBUBH1JBggE7UajYSDIsjDjRrE284dUIY7PS7sedChqJlDTtQfJAEvzgBhadpwChqrValn7WkCfxAPHrdfrpnuGyUPfzv2jLaYdLhtmsnM4dktRahAEurq6kiRjfAA05m+xWJgchGsAtATcWq2m0WikxWKhOI5t40Xmmb1sPBB4MIXtJDiQHPnNT5GHsMcHshgCpiTriuTb+7KvCmxPOp22+adQmHEEiAHX0+lkUhq/+3qpVLoo6idRRMufyWRMFoEfU0cBGOPPjIEHYu6BuQBUuRbPhh3RycgnsnRAG4/HNoaADUzm6XQyZtEX3PukFjY1nU7bmMGSM05ev059AQEamwOwJNnYHY/Hi31LYM0k2XMR24gXjBljge16xhVQA8z5HHHudDrZPcJQepkEzB4xLTne70gwMsHIBCMTjEww8uPAyHetcqX/w9+8xwHjgdMRSGFveDjPSvEGjiGdTuduSjBpfhO7KIouWlz6ZUXOyUDgUH55D6YCY1itVlqv1xc7uxOYYRVYKgUk0LTjpHQ1obiTyZtMJhcFx7whc49cB/bG65Ln87mazaa14ywWi2q329Z5JpVK6fHx8cLg1+u17c7+6aef6nA4mDZdko0lhoFzEJiZH4Aljs+yBlrHUjAKYOOgg8HAAI7lfIyU1pxonXFadNIAEYWkMKQwlDCyfnz4HWwl9kBgORwOJteBYWXu2M9jOp0akBEED4eDtXGF2UGTTIcoEhYYPJyLYEwggX3JZrO6ublRoXDe1+L169dmkyQf6NDRGBNYqAlgaZ+fE3wJzoABAYz52+12xoTBVMP0cX1snkALcw1DRHchwJTlfB+kkAkx95z7ue8gnWFjQ1qy4gckWLDhyCfoYFWr1Yw1wm58bQGrBMhpsBEvd+A6PimiUxS+7SVNsKveV7LZ88aNMH2r1Uq53HnvluFwaIXrPviT+B0OB7sfxokEhOfCtwCRzWZjoE7y6hNYQJC2wcRXNPvIlbgm+7Qwn7DjPgF/vkLgE3NsnntOjvc7EoxMMDLByAQjE4z89WPku1a5PtjGxxj28x28CQoMXj7/tO+HlygwsAwQBo+2FebDDwJvzZJs+ZPBg93yS5Qs0QNagBqB3TuKf/MncGEY9/f3enx8NGOklz8TMBwOVSgUTO/Mc3Ev6KrZSZ0uNcViUdPp1DZ+hPUiABI4Adh0Om07wqdSKdNhPz4+qlqtajKZaDgcGmOIZjmTyRhLAwNVLBat8JHzU9jJRpVsnAlrx2d4LsYjm80a+OEIzActZPkZ80JQgHkjwDwvrOZeCXYEv/l8bkwrDotTAz75fN4Kk0lksCmfqPi9IB4fH40tpLMWBeMEaB/EGIdCoWC7yO/3e9VqNVuyxsYBL57LL4uTmOFLvhPSeDyWJHsmzo12frVaab/fq9FoGBvD/iT4CfY5nU61Xq81mUwURZHJKZCXUPxNkG2329put8bMMlc8JzUIsFMwXfgAMgx+zv/DMLT2ufweP43js2SnUCiY7/hz4pfMO4ksNgariGwgnU5rtVpdbLaJvImkh5UGwIFicubveDxaa2nikS/kZX8f2j+T5PA8MGeeua5WqybrIknmHhgn4hVMHufGfj0r7VcdvOwFX/TMO7Hcr5IwFowz50+O9zsSjEwwMsHIBCMTjPw4MPJdxwfp8evlDyz7Mshob2GsuGnP0HiDk2TaTgaChyQIMIAYEA7H0h8sAAMFoyLJgjF/T2Djs0gEcEwMlWcKw1CNRsMKPuncwrP3+30rsM3n8xdjwbUJ5MgaaBVK29Q4Pu+dsVwudXd3p+VyqVqtZhPrl2nr9bpms5na7ba9uXc6HWNbqtWqer2erq6u1Gq1VCwWjZ0AzDFCmIMoioxFAlCRD7C0ig48iiKVSiXbxdwv66Jfhtktl8tarVY2bul02kATsObzLNUXi0ULwtgYDApL0WysuN/vNZlMLhISmKUwDK3gE50zzCLLzgRzGGWCB8EaBgNb8gwHthwE5w0T+/2+MW+/+93vVKvV1O12dTgcNJ1OLQDwXOzDgYQA+4dx6ff7SqWedjYfj8d2H8+LPWnDDLuE9KhUKl3se8O8eIZVOjPSsE88m2dRkY7AsBOUsR/06vgs+2rgq1yHoAX7TyKBXXL+m5sbpVIp20ke2wS0ua6XogAGPsBio8w3NsIzkEyQ2JxO571JYNe5n/F4bMmzByG+kF7hj5wPf/HzyvwT/JEVYX8wzd72sFNiFp/x7CAMKXOIHz9f1eCLmEmC7Ve0AJJETvifPxKMTDAywcgEIxOM/Lgx8oPUcGEMvrMLA8QyKDdHMMJgGHRJF5pKr6EkmMEG+TdL3r7ZEM4bEcuo0hNAecYP3SYFrwRCzpHL5awoE4YC/SnLruwQH0WRHh4eVK1WDQiOx6fOSHEc2+8wdpYzYcVwUFrp1mo1bbdb3d3dab8/b+hHpx+WfiWZA7PnAUzTy5cvbed4H6BzuZwVZ6PfHo/HFjCm06n9niViD8Ze71+tVtXtdrXdnjf7Y24rlYoBBSwOTsX+LIB8JpOxvT2YT9gP5oGxAiCZv/1+r8FgcME6sIEh94CjMgZs+NjpdIxN3O/3WiwWlgQQHKvVqslNfFE38xWGoer1uv0N0odaraZer6f1eq1Wq2WMFMwKOn4c+XQ6GZuIFIbkw18/DM+7oZNcEADY5JK/bzQaKpVKajab6vf7xhRVq1U7F22aYX5fvHhhTCqbKgLMsPLYng++0llCJMlqLNgsMZ0+1wgUCgW1Wq0LW4Kd58AGsJkwDI2JZCPFVqt1EawJyvgAAd8nqTDz0jmo0pWNFQBiEgDiE1aY/clkYgEeu2y1WiZ74VrIX3zXJYJ6Op02ORgSLb4AfIAYHwNwYBT5PPGVJI84DMB44OD8PD8AyeHtkL8nxvJvmLtkhev9jwQjE4xMMDLByAQjE4z8xTVcXorgNaYwb2jG/SDwpslys2dtcDQ6nCyXS9udHIbPM4MYMIbvtbIEfSQQp9PJghJBCwAjUBGkcRze8ikmjePYHDwMzzuP42zD4VDdblf1el3L5dKciCJPlvWRIVSrVTOSIDi30Gw2m3YvGGwURQZ4SCgKhYLpx6+urjQcDu2tnyVkNps8HA4WJAFQv2Eif8N8cR7flnWxWKherxsr6KUMgA5slyQbv+vrawsYq9XKNgM8Hs/drSgShmXYbDamdWcecSp0+uh00WHTVpU9LnAgAj8SERIJlrr7/b4VUfOFpIb9Q2A3YWiYGxybMeFeWIqP49j2k3l8fFQ+nzdmjufmeqfTyYIQzB1jBDtFUAeEqC+YTqfmaxyeaWPzSZI9L9fAH2EN8S8/5gQQ/DKVSpmun45pSCs8K4gGv1wuXySIsK9IQthvh4TTd01iDNLptDGqHmiw5efSAthWAO1wOGg0Gmmz2Wg8Hl8wigArCQWSER9AiQflclnT6fSioDwIAv3444+SZAkFtsxYc27GmJ8zT77QFyDi2QA24tzpdDIG0ssZYDAZDxhWz8pyPs/UARiSLuYB9hhtPayjX2VJjr/uSDAywcgEIxOMTDDy48DIdx2/GD1x2MPhYPtv4ExIJWD1WE7c7/fG+BHIMHDe2LPZrBXdbrfn3a398p43UCQHAJqXYzC4GCCT4XWdvP3DGhHoB4OBJpOJ2u22MUhxHKvdbtteEOx70Gq1lMmcN41EQyqd95OAlYFF4G28Xq9b4ehud94RHRDCKJFgAGKffPKJisWirq6udH9/r3a7bfsUeK1uGIbmyBRBExRub2+VzWbV7/eNDaRN6nQ6VT6ft2JpnDqOY11dXdncMfckAux+jjHiMMgq+DyAxblhMn1yAGhjMzgTrFY6fe4a9Pr1awN15oZuQCQkyBPYbI8ldgIGwZ+aCcCUILDdbvX999/bNWDngiCwugTkJ4DOeDw2ycp8PtfDw4M+/fRTY4cptJVk7JIkNZtNW4oncWDsYZGoeQC4W62WdfhivH1gJyADwNQgkNC8ePFCp9PJGFuuS9BG4w3DBVPJPNBGGDY7is7FtpvNRo1Gw56BMdvtdsa+AngUXTOXsGrYYqfTURzHGo1GtrdJqVQyEMHeSK68NIGagl6vZ9KaOI4NGL0Gng0YfVtcwCgMQ63Xaw0GA7XbbQNxmNrlcnmhJU+lUnY/nU7H5DnsAYMEA/8i2SZ+EbyxUYr/vXYekMDXiC/4If/+ufPxO66NnyFF4TrYKd/7pCU5/rojwcgEIxOMTDAywciPAyP/j65w+cDujZ8L0+2I5Ur+z/dxHNsD+OVeBpo3eQbCa6j5OQ7N7wjwaL7ZGR3tdy6XsxavgBAGwyDClPAmW6vVVCqVtFqtNJ/PzclyuZxms5kOh4MtQ/L2zaSPRqOLpf1c7rwp4HA41Gw203a7VbPZ1Hq9tnOhpeUNm/aftVrNghUAy33DYCA7SKVSxhJls1k1Gg0dDge9fv1aw+HwQkKBrhemlPmBCaAjDkwRDIIkA+p8Pm/LzzClSCdSqZRJEmD6OD/nSafTJglgGdyzFzg43aVw5kajoUKhYOODTIPagdlsptFodNE+FUal3++b7IYiahxzuVxe7MmBfGK/31vwoIXq9fW1RqORgiAwe4/j82ak7O1RLpfVbrc1GAzsWQAKkhyWvUnKarWaBWmSpGq1qlQqZQDgC8RZ2gesOScMJgxaKpUyGctms7Hl/fF4bPUX7EnDdbEvH5QI5nSf4jnoTLZcLtVut20eac3c7XbN9vDfQqFgG6QiaYBNa7fbCsNQzWbTWC6ecbfbmW/7IHo4HIy9lGRBXnraMwgmDhkRch4Y52azaZtT7vd7XV9f682bN5b80QIZVhObx+8k6fb21mQ8jJGPc8QNpA/FYlGTyeSiboKATkxk/IlPzA8xFMkIfsi5OR/XY275e9hYEnEvXcPWkuP9jgQjE4xMMDLByAQjPw6MfNfxQfQhsDcs6aPVZDK5Kf8ggASTyySw3M1EeA03bBS/44FhujAsBgXnYakaNhCDR+7Q6/XsdyxfHg4H5fN5e+sfDAbGvK3Xa9XrdQvcDH69XtdgMFCv19PhcLCN+dB4V6tVY03y+bzu7+91OBzU7/d1PJ47N+VyOX3++eemt+cZTqenQmW65rDx3Xw+NxYgCALrklOtVhWGoebzuQ6HwwXT2Gw2rbsQe5jA8tDOE5Bip3TkK6VSyQASZpClceYXnT8yk91up9FopOvra3v+6XSqSqVywSBQFErXK9gq5B2FQkG9Xs/2ygBQCCAsD1P4ikMC5MVi0difwWCgMAzV7/fNThuNhoEnIOa7YTHmSGFyuZzu7u7UbDaN2Xv58uUFK/l3f/d32u/3xgwTCNjokqV0QBz2EC03zwn47nY7PTw8GBPU7/fNpyRdaK3T6fRF56fBYHARICVZZypJtveND06ASKfT0XQ61WAwULVaNZaLwA+oMfaz2UwvX75UpVIxuQ5F74vFwgrBc7mcbdRJQlGtVm0PFp4ZbT0Ja7Va1c3Njd68eaNisahPP/1Uq9XqQj6BFGm9XqvT6VhsoM6BRGo6ndpcPz4+aj6f69WrV4qic7vt6+trA0PYsZubG9tYFPaSVtWAMcH6dDrvz0LtQLFYNJsjFpJgMRf4BYABICN/4W/RzcPcw/Tyb5I9it9JIpBL8TKADz2vG+LfJCfJ8f5HgpEJRiYYmWBkgpG/fox81wrXB9GHeNaDgkL009wkb54YPYBDgK5UKvZWGwTnLjYEA9gLgqWXQ3Ct4/F40RYTZgAtL8wfDsCy8Gq1Mj024IDsAfBj2XU6nerrr7+24k6CjST98MMPtrHgcDi0VrbS0y7oktRut5XNZq0LE+AHILGkjkZ6sVjodDqp3+9bUeXV1ZUKhYJ1IioWi3a9TCZjhceedQEsNpuNms2mrq+v9erVKwvuzFWlUjHWpdFoqNfrGTARuKUnps3rpVkefv36tXUZgjEhwP2cwcNg9Xo91et1M1rPWsHChGGoyWSixWKhKIqs4NnbgHS5gV0YhrYzeTabtXqB29tb/cu//IvCMFS32zWHxvn4O+51u93q4eFBh8N540yCY71e1+3tre0Pg7QAx8fWCW506EIWAHONhAbwmM/nWi6XFqgA29lsZnZD/QIyEMC20+mYRht2qFgsqtfrmc3Trpg9awg2SEk4F5207u7uTEIBs5nNZnV1dWVBEGAhSchkzpsz4re5XM72tnl8fNRkMtFkMrEaA1j2h4cHjUYj5XI5/c3f/I0Feuo9kPb84Q9/0GAwMIkPRcOffPKJMZl0LCMZC8NQg8FAj4+PkmQSJeQQknR9fa0gCPS///f/1jfffKPJZGKscRiG+uyzz4zNv76+Vj6ftz1XpLPE69WrV3r58uWFT0dRpNFoZAnhbrez71nZQLpFDQFsqV8Zwa98soDUg2SJYnjsj3jw/LzUREiyVQySfHybuIn0KTne70gwMsHIBCMTjEww8tePke86PghdudvttN1ubfd4dJgUP8Kyeb0nb9XoPpvNpjqdjskKCDgs96K/5u9gWjFSpAuSVCwWzUlgk+I4Ngaw2Wza0iEggr6aSWPgYJ/82y+Tvlwu1el0tNvttFwutVwuL5acCRq8FY/HY9vAkG5KBHqWiGGrcrmcLa1WKhXbiZ0xY5Jhwnq9npbLpfL5vI2zlwYQ4AAZNMytVkuHw0E//PCDMUk4yXBP0ys9AAAgAElEQVQ4VD5/3nkcPTBHp9PRer3WdDq18eH+cSq0wblcTs1mU9Pp1DoFUQS53Z7b9hJkMXhYE3T1aPp5Jpiy0+lkLBhByndKgtWVZA5BgP7d736nh4cHffPNN/r7v//7iwJzOlIh8eAZ0L7DWt3f3xtzNRqNdDwe7X7i+LyfzGg0MmaW3/P3XhsMiMKeLhYLYznRKBMEYUczmYzJbJrNpiqVijF9vtA1nU4bQ814khAxttgWRel8DuaIoMS9w+zW63VLCJfLpcrlsm18CqPl61CQO6FxB8S3260xrJVKRe12W3Ec23yRhAKOr1+/tvsNw1DfffedjsejPv/8c7s/6hPQuMM+4++SNJ1OLcnNZDJ2/dPppF6vp/v7e2MfqeuYzWZm2zc3N7aHC3uxYIuw9STDBHHAg2QFYCEBIt4R5Dn8RrR8Br/gnr2+vNPpXDQhQDePn/ESBSvntezcN3UZ+FNyvP+RYGSCkQlGJhiZYOSvHyPfdXyQF64wPO+74Q2VzjoMEEEgnU4bY0OQp1Cv0WjYEr5/WN64Wb4HnHg4zs+mhlF03k+BwE9QIwCNRqOLvSsOh4MVaTJoLDNWKhUrGpzP56pWq6YhZyn3eDyaoxJMHx4ebB+NbDZrjsHzrFYr1et1HY9H1et1ff/99+ZoFHsSvNDM0gmFfSP2+71tZjifz3U6nTs2AU6wWujTCaqwEASR/X6vXq9nenWek+d79eqVUqmU2u22sVAERvYs4b7YNDOTydgyOTun4zSSTKcehqHG47ExMjCryBHK5bJms5m1D2WZl3miVoCAyzK+L54kCclms3Z96hn43Y8//qhOp6OHhwcLfARcwFLSn3V48rUI4/HY5ubly5f66quvjEWm4BgGGz1zFEW2oeJ2u7UNG2EZ8QGeA/sgYGUyGU2nU+tchZ/5pXZsBfkJjFoYPm1ISHKGHIc5poUssiNsQpLZDvIhahl8ggeQ0akMxpfPslzvZUSwXLBsAMg333xjzNxgMLBzBkGgyWRiY/fq1SudTifryFUoFDSbzdRoNPTpp5/q4eFBqVTK9OUEYgCO8YXV7HQ6xvAh28Bu0M6jj6eoGOkDYH04HKzQPZM57z0ynU51d3dnbDsMN53PsJMoii4SQRJsEm6SRcY0DEOrsSAeI39iLn2tAbUmxGWSOZ/EY/PJ8Z87EoxMMDLByAQjE4z8uDHyF0sKAQUuDFvAmy3BNAgC0xEz4OgjJdmbrte2E0RhS2AbkDrw5rrfn9s/Elhgg3AujNgPCNre0+l0Ia9gApE0oLGGQfMMTxRF+uMf/3geyLcaUpbeKbxFXsBydy6X02g0MnaKoBeGoXWPCcOzbh52b7fbmaQDAF4ulxYghsOhGeBisdBPP/1kQQiGaDgcWvcZlv95k5/P59rtdraXxf39vb3Vo7UPgkD/+q//qtFoZLrsm5sbTadTDYdDDQYD3dzc6OHhQUEQWCeh5XJpMgPqC0qlki2hw/7QladcLqvRaOjly5fq9XoGLq1Wy1ib9Xqtm5sbTSYTqzuI49iCIAzyfr/X3d2dLSmjTYZtgoGpVqv64x//aPaz2+0saBQKBWN8COowaVyHZfwwDE2njxSIAINfeNkJRd9xHKvb7RoQE9Qomn14eLDgCdPdarUkyWQ9yCx8APetaLE97jGTObc0JsmCySYwoenHZo7Ho9Uk4O/4AtIfzhuGoRWgM84wR4AH/4fN5tq+m9XxeFSlUjFfnE6nyuVy+v777/WHP/zB5Bdo8Un8SNSazab58HQ61VdffaVCoWAyk/3+vAlop9NRqVRSu91Wt9s1qQeM/HK51MPDg16/fq3RaGSJHXvYwBI/Pj5exDbkC/7ZkG4Q0F++fGkSikqlYvsKIW+hjoGkkFjnpVxIbrgPgA2W3jN8xDdfC0AM9jVa2BFMHvbhGfzk+OuOBCMTjEwwMsHIBCM/Dox8lwrkg7xwccMs4xHk+D1vrAwazsiN0uUFEJJkRaA8tA/wBE7e6mE5mDwM8vHxUavVyt66cRDeXGHs6OACULF0TQvfWq1moMl5pPPye7vd1maz0WKx0HQ61Wg0kiS9fPlS19fXph3PZrO6ubmx4EdhKQZxf3+vOI41mUz08PCg6XRqy/QwFKvVSre3tzb5/B8H8su3d3d3qtfr1oWoUChc1ABghAR1ghr64+l0qm63axp8jIlAOxqNrHAVQMZRl8ulZrOZMpnLjSdhgNB1M1/X19dqtVpm7LBldDlCekLgQZ+ey+X06tUrFYtF9ft9C1Kwf2i2PRPhzwuLSUvlr776Su12+0Iig24fVgVtL4WnyC+wD+mJVabwerfb6f7+/qKQk2J5EqfpdGqMG5IHZAT4B3PIWAEq2WzWbBjfgZmTzgA9Ho81n8+tHS1MEcvjMELM1/F4tGujQ6fgu1AoqFqtKp0+7/0xHA5N6gJLW6/XLeHLZDLGTMLscU3qL5DPdLtdq6HI5XJ68eKFyUJ6vZ5Op5OBAgwxwESh89dff23SJZKFIAhsDtPpc3e2MAz1u9/9zsab4M08EsQp4gVYoijSfD7XaDTS119/re+++84kLIANAd8XL+fzefMFVgqCIFC327XVD6QhxCV8j8ScOMk4ezYZkC8Wi+bnSK9Itr18iHoHEjzqeBhvL1djNeMvdWFKjj8/EoxMMDLByAQjE4z8ODDyXS9cqS+//PKvxY2fPX7/+99/WalUTL+Nc8IywcIRoDEwmIRsNmvsAG/stM7F8Vj+Qz+KxpNghdNhlLxlEpgYHAYbloQAg/Mz6JIsyKMX3+12ViyMTIDleQp+YR5xZDoTZbNZLRYLW95lgh4fHxUEgRqNhu7u7qxQFe08RgETxPPlcjmTawyHQ7tHDCqfz1snqPF4fFEk+1yuslgsVKvVlMvlNJlMLIC2Wi0NBgP99NNPevXqlR4fH419Ywl9MBioXC4b6OKoBOJOp2NzlUqlNJvNjDWB1SEgSDIgotNSLpezZX2KklkCRppBV55cLmeaY9gqnMOztnRy4vlhh3u9niaTiQ6Hg90j7BrL+pKMwaWYlvHk94VCQfV63ZaywzDUaDSy8SkUClbPgb2zJA4jBgtHJ59s9rxHxn6/t7nnfAAiCYsv6qUbELp5xo97pkYCeyD4kGB4mQZ7plBki2/ClvLsJI+LxcISE3ylVqtZh6tyuWzMGywfCdbxeDSbTKVS+umnn2yeb29vDUjpJBQEge19Isme9/PPP7cW00EQWBc04k4qldLLly+N5adwFzafcYHNy2azur6+NvCGvW00Gmq32/rkk080n8+13+8NKEhssSOKuWHzuZcoitRoNLRYLHR/f29AQcIL4ybJ2k8Tk2ClOXxnJeYIzTsJPKBO4kH85Yu/41wk2ofDQev1+u7LL7/8X78IOD6iI8HIBCMTjEwwkmdPMPLXjZHr9Vr/+I//+D9/Dgt+8QvXP/3TP33JGyRBzzMHDAJOjoPBvsGIIXvg5xg9xsmbJiwGE+A1lgRelnG73a72+70qlYr9LZNHtxjYOAyRpW/aw6IbZUmeTj8MOEWXs9lMq9VK7XZbjUbD9J5MxHq9Nt030gCC6W9+8xtrnct+HGxOCGiMx2O77nQ6tb00CP6wD8vlUsfjUS9fvtR0OrW3/NPpZOyLZ6MI/hgOe5nAyHz++ed272j6mdvNZmNO32g0LIhLsr0luD9kBWij9/u9tSCVZAkEtkMXIGwHx6AOoNPpqFqtarFYKAiCC2ZlvV5b0EUmgGSGc8LuAHblclmpVErff/+9fvOb39g8+V3esRFkJmjdpXPCw+7wXIdiXdq5BkFggOABFElPsVhUHJ87hdXrdVsaRw8dx7FarZbNMa2KwzA0jfPpdLLWvPgbxbHY+qeffqr7+3srQo3jWJ1OR9K5Qxj+TAJWLpdNBkB9APf2/7P3Zj1yZcfV9sqTJ+d5zhpYxbHVatlGw4AFX/kz/LtsoF/Y+hPvv3mvDBiwLclWD2yySdaY8zyP30X6idpJS4Tobt+I5wCE1GRl5Tl7x44VZ8WKCLT5OGCCKDoxsb7sK0yk9FCgzXnGVvg59o/ubi6w8d9u8Mj5931f7969Uyh0aP9MUf56vbbgDyaRIuRisXj0jLBYgBv2ms1mjxhvAtX3A0kyGPi7yWRiAfZ8Ptd8PreA1mU98Vewb/gdpCjvX9Tj8J3ufCECLz5HgCo9tNPFx3LhUwluWFsXZIIXro+7AowMMDLAyAAjA4z8NDBysVj8wReuH900g7dBQIHFhbnj7RU5gyR7a3elFHQugWnhoJPiI7242RzamqIBlmRvybAI8/ncBtOtVitzvL1eT6VSyZgw9MfoO0nhs/CuhpY3aNL0OE0YvidPnhxpO9HYJpNJcyCuZGE0GikSiejs7MzmV+RyOUshD4dDZbNZc0gUFfu+r6urK+12O11fXxuLBeMDS0VxZKVSUafTsXWDnQiFHgoO0ahGIhF71tlsptPTU+33B913uVw+YrDm87lqtZo5B9/3rVsTBwkW1w0EisWiyuWyOp3OEdDBhHAfSCfQi3ueZ4XbSGnQI7P3ODjfP0xmdz8LewyjJ8lAhxR0KBTSixcv9Nvf/la//OUvj9iNcrmsWOxh0Cf2CfOLfbqsHgwWTvb9QmOXSYH5wpY56BREr1YrG8oJsxmJRAzIZ7OZwuGwdbkqFotKJBK6vb2178R5Ij2JxWI28BSwZb2wKRyrq22GIURCg5NE+oDmnfMfiRzmjcznc2NM3boNGG7qg1w2HjmV5x2aDuDUKLhlf2Aa3eLk7777TrvdTufn52Z/2WxWL1++1Gw206NHj/TDDz9Y1iGbzdrZpR0uZ54gtNVqWbc5wIRAw5UhsJfIi1wpED4Klp5noSYCLfn7QTKAiQ3GYjFrgkBQ5fvHmnPOH8EvZ4z7w9/hr0Ohh2Jm9lJ6GNz6IblEcP3+K8DIACMDjAwwMsDITwMjP3T96BouvtyVKHCDSBn4O9cgcQ48KI6Y1DoHGYfPYXy/4whvqRg3bSfpSgMgxONx5XI5S1XTg58FZOPRlrJRACEOksPFYYcJWiwWptlGnsDBh8Vytftow3F4AOwPP/xgAwthcyTZ7I1cLqef//znViwqHeab0BUKpqfT6RjTQGqe9WZteU5Su7Bzg8FAl5eXymQyJiNiEB+djkqlkmmrcYywjPzezebQ3tVdY3fOAy2SXfbh98li2CuYB/YUp4cDoLAYRxqPx42t4QBvNhsLXHDgpJyr1aq18X379q3ZFLp/AiVXYsOhjkajGo1GGg6HFsTQ+tf3fXvu7XZrNgCTSSEz94zD3e0O3ZmYPL/b7axQHPvkTC0WC3U6Hb18+dJAdbfb2aBVHF00GrXZI/l8XhcXF3r+/LkFdsvl0rTynDFYXzftj2aZtL07A8Zd60QioXq9bjbmnm/AHPkQ9x2LxTQcDu1+OAvIR1h79gHttCRju+r1usrlsq6vr9VoNBSNRk061W63rY0zsh7YTUk2jBEQ3mwOhcndbtdsrdFomJSHs8Ma4CdYPzezwRoVi0WbQcTnlsulBWX4PM4/fo713e12ZlMACHbKerA/SJDcRgqAFffo3jvMnSSrO/C8Q5cqgDe4/vgrwMgAIwOMDDAywMhPAyM/9NL1ozNcHGo0mbzRszg4GzdtR0Gbq5HkM65+msXk97NJ/D4eFKfA4UELDRDhvBKJw8wKfi9FmejcATqXweOgwCziKGiXOZ/PValUzBmg3Z7NZiZDgGEqlUr2zBT9wcz1ej1NJhOdnp4aS8Ukcd74YXUwYHcAoAuGOHv+W5Ixc3RFgjWAfWBNaT2MHhjHNRqNlMvlVCqVbB8BEdrEptNp+3dYgs1mY0DBmg4GA0uPc5ii0aixnOwPxcM4FQAqn88frSF63WKxaOl7mBicHWDOPcA0knInMAFIp9Op1uu1ksmkdRAiCHDlMjhhimwBLoZ4jkajo/S89MD0xWIxrVYrtdttDQaDI6lFKBQyZkt60Aozx4P1ZH5GOp22Llek/QEU1tDdi+FwqHq9fuRkAX3Yyevra+12O/3lX/6l3YMrEfA8z6QDdO+iTexud2g7DSgSBLIPrBkOCpCpVqvyfd+6LeFMQ6GQrq+vVSwWTVrFGrAXpPoJkPL5vFKplNrttr0sMCizVCqp2+1aTYPnHWo1Tk5O7L9Ho5FJYjiLDFudz+dqNBqqVCp25mDKsAvWCrsJh8N2bzwvLCcdvNxaBT7rSiGwIaQc2DbZBeybM+OCD2vOHgMmrB/PwJp73oNunWcjgA2uP/4KMDLAyAAjA4wMMPLTwEi+//ddP/qFy/3i3W5nrAip5NVqZelTSZaWdnWrLJzL4GG4HFBYChaNzZB0pF2lEBM5BTpffha9OfdMAAE7wcJyT8gaVquH9p84PJw7bIP7u9gYV8fOYDnAkq4sjUZDvV7P0tPT6dQ07LzJw4QAppvNxpjFx48fWxp8MBgcpdxxqKSc3XvGEQPK7GWv17NORJPJROfn50eSCIwxHo+bE8RBhcNh62gEYGcyGSt2xDnjaPb7B210NBo9kohst1tjEJk6D/ux3R7mqcA0wa7hqFxAA1T5nRSFIuXhWiwOAybpJtVoNPTixQuNx2ONRiOFw2HTwHNIsTlsn6ADm5zNZioUCkokDvNSYKxgZ5gbsVwe2hpzb2jQcfD5fN6026FQSDc3N0omk7q/v1e5XDb7PT09NdZ5PB4bCFKknMvltFqt1Gw2j3Tw0oH5woEDKNKhjoL7JFXPmUwkEsZ2ISMiwIPRotaD9QJwCTYIHChYRgqD8wKcer2enj59qtvbWwOiXq+nSCRindCQjXBvSFi2261ubm5UrVZ1eXmp7XZrxfLlctnWkPujvTAF4rQyXq/Xur29ValUMiCEeSyVStb2lrPPenAOAXTWmEATaVUoFFK32zX74rOcT4JzziM/g5MnqCfwIoh37xO/4Po69h2/h/8loJZkZyu4Pu4KMDLAyAAjA4wMMDLAyJ9k8DFfgPN3WTBSrBw4bha9MIvGGyyGz0Hgd5FKh2XBAFlknAYsFt+H86Mgz9WQu0ydJCumc9k9tPLoeDm0w+FQ5XLZGCTesD3vQYMKkLopTRiAu7s71Wo1040/ffpUg8HAuhoBurAwaKNdmQaH0jXOcrmswWCgxWKhfr+vQqFgHWRgLrlnDAwjZ00ymYySyaRev35thz2VSsn3fTWbTTWbTT158sQACW0u6+l5nrEEFDifnJwcMWi5XM4KgzlMMBiuZpaLe+PAwwDTbpYUMSDhpvtdRmS9Xltb0Wg0as51Pp9LOhSpTiYT6woUjUatZSz3T1pakgEfUg0Ku3GgiUTCWrjCTmPH6IolHTEvsGBuhyGYYNZ2NpvpzZs3xlgjJXJT9G7Rd6FQsMGfnBMGEiLZ4buRHK1Wq6NiZOlQ6M1zz+dzlctlawlN0XqlUlE0GrVaDhhvAqVYLGZDQ10Hyx6yppxhnj2ZTB6dSX6Os4sP4XyEw2E1m00LBr7++muVSiU9evRIo9FIzWZTk8nEAjR36CK+iyCV4A1gms/nFtxR3zKfz234I7NNAEP8lytlkXTkEzebjbVSdiVnPBv7JOkouIaZxY8Cwqwt3+VKHVzgcJlosgvcI4G/C/zB9fFXgJEBRgYYGWBkgJGfNkb+JC9cFPTxALBPbucP3vwlHTlr9L44OTaRz7CAPISrSXc3hHQg6UgO9GQyOUq5wzxwiGG11uu1ZrOZOTS0nBRhAhLcJ0WfvEnjEDmUGOZ0OjXn7epqSbujfcWoYX8mk4k5fO7ZLYblTX82m2mz2ZhBwwx1Oh3T+CaTSeuU5BoDz8XBg1FMp9PqdDparVZWIAwrut/vdX19rUQiocvLS9PQ03aXWgWYRGQzy+XS2CA69ODYYJVwtC4z6rJXMBNuMWo4HDYgYP1crfFutzNAD4VC1s6Wz0ejh45UdCmC0YNxxpnB3roXBw7Wy5XkALQMsCwUChoOhwZ2zL7BKcOy4FD6/b7VNlBMy7BS7rvRaOjLL7+0AAVnPZlMTCYkHZwENRQ4jFwuZ+cTux2NRioWi+Y0uR8KeLF5iuhxtgSMdAPCOfu+b8XjdDGr1+ummXclSPv9Qe4EQ4nTJVhstVp2BrBdN4ibz+fGfKLz7na7BoqJRMICBwrjy+WyJpOJnTdY2VwuZ4NXy+WyJKlYLMrzPOt+NhgMjth5mFf8IfIk/A7BGs/kyhWwTXxQJpMxlnS3e2jHzjllT/FRBMywbrCx7D8Buft59965R4JXNzAkaMVv8Jng+rgrwMgAIwOMDDAywMg/fYz80PWTvHBxqBaLhd0MjoW3Sg4vaWUMA0fpMizz+dwehodwD9tqtbLfzc+4KVz3rds9wLAxgAq6WkmW6t9ut0fyAd6i0Qev12vrNBMOh8158GbsMmKxWMyMGdaQz6NRxUntdjvTuVOwjMGx6dwf7Xpx2q6OlIPkaqXb7bZms5mleJESoL33PM9a1nL1ej2dnJwYA4Sxp9NpPXnyxFq24jRdaQKMWjQaNQZmtVppMBjYjI53797p/Pxc2+3WDoMkCyhgsEgLsx+bzcOQOiQzu93OCn8B2mg0agWb8XhcqVTK6gawRTfFTADBXBeYuFevXqlarZozAdzc9D9peeQaOAc3SKX42/M8G7QpyTTMi8VC5XJZy+VS19fXVlvB74TNxQkgddhuD3UR/X7fNOKTyUTZbPYo1Y59A74U2NI1zWXYsEv33mBa6/W6fQ/rsN1uzQliQ2jAcf4wqrlcTpvNRo1Gw8CW+hTXsQPgMEYAFQEFQRJyHwY3MlQVeUgsFtPJyYl8/zDIMRKJaDqd2rwbHC6yHFiqZrOp1WqlcrlswMdaFotF2wN08hTDc5aQ5xBEs6YEkNguPiWdTltQzTrQwADb5KziXwjI3z9r7jmG7cY++SzBN74a23JZQZe9w5dgt8H1cVeAkQFGBhgZYGSAkX/6GPmh6yd54XpfJ+mmK9HT8vaOI+NN2T0w2+3W2lrC0o1GI9No46AxAPf383kcKgPi2GjeWgEkFsh9W+We4vG4OQ3+LBaHGRg4s/cX32UbYRAw+slkYoWHOJpcLmesUrPZ1LNnz+T7vqWYuT+X8cT4SNfSOQjQgQ3t9/vmFEgto3/e7XYqlUomTcFJAECNRkP1et0AhDbA4fBhmF82m7W/6/f7xmROJpOj9D6Hn7UKhUJqNpvGzsAcsi6z2UzFYtEYKDphEQDgkAADtMkEJhQJwyZJssOXSCSUSqV0fX1tUgnkBaxbPB435hWWLhqN2nT0QqGgUqlkLX1h0JDCdLtd+335fN5az242G+v0xWewMxhY1ow9q1Qqpud3tfeAXigUMlb77du3+vLLL03njI7clRRJB0DL5/MqFArqdDoGSjie7XarfD5vf88e8V20YnVZehwh7B1/T2ABAwkDPJlMdHt7a9203JQ9ZxjmFEZ2Op0qmUyqWCxqPp+r2+3aPBj2gWJfWDmXBT8/Pzet+WKxUK/XUy6X05s3bxSLxWx6PYHFcrlUv9/XbrezoESSAToXIIjEg4wDHcEAJvwAAQzZAVrw4pdYP0kmtcEvYq8E3QAT5wp/iX9jT5E5wZSzt57nmV9hvWFDJdnv4ueRkOA3g+vjrwAjA4wMMDLAyAAj//Qxknv8vTjwxwLGH7pwNLzx8f9x7KRNXS26q7Fmg1hU3jRdZ86DuW+9fAbjJgXN/4dR2u12tpE4JwDJlSAAPGwCn+HtnIMM+7Tb7UxyQdoSp86mcH84fooNKYalCwxMEaAD6wcL6TotHN5+v7cCWwBls9lYahZGSZK1UsVJ9/t9kySkUikbPPjmzRt5nqezszNjdRh45zoRjPnq6soYmkajYR2YYrGY8vm8dbDBUcEg1Wo1RaNRDQYDnZycmCYXm3EZK+wGpx8Ohw1sp9OpASPFpzhrbCcSiVhLWb6HA5ROp9Xv901fzoHO5XImX/niiy8M7GB6CSSQl6AfZh9gd1grJCSe5xkAA8hIaNgrmOlwOGxtjpl3EY8fhv65zPJqtVK32zXJDRIFnhPnBRgRDFGYjd0g6eGeKPKlpiKVShljiyPiDLx580Z/8Rd/oeFwqMlkomq1qnK5rPv7e4XDYRu+CSOHg43FYqbnJvCgkJ86Aeoy5vO5dYpaLpfKZDKaTqcaDAbKZrMWQFJIzPkajUZ69eqVHj9+fKSB59zDWsP+j0Yjuz9YTXxIu902ENtsNlb8izSMbk84bhhc/AXrjnOez+eq1+tqt9u2/u12W77v20DRzWajUqmk8XhsgRTzX7hn5EiuBMMNImD5ORNkO/Aj7ksU7CW+Gp/M+QNoguuPvwKMDDAywMgAIwOM/DQw8kPSwp+kS6Fr3LAK3Axvnzgi3hzRb/P2SYElrJ/78Gw2zobf56bQJZnDC4VCJg8gXcihcZ0CxY2wgbwdz2Yze1tG/8oBx/mGQiGNRiPtdgdN7XQ6VaVSMcPjDR3WkQPCpnAPklQoFLRcLo9S4sgl3G5WLjMJuME6YoxIQ2AWEomEpaljsUOnKEnqdDrGYGUyGe33e52cnNjh8bzjIYswY64UBCOMRg+FtbCeaMwxbECuUqnYerKng8HAZi3wfLAEODaXGZIOjpduRrvdTsViUZPJxLpPwe6EQiENBgPd3t6qVqvZWnqeZ1IM9pH9ns/nBnaAIGypy3ZxqOlSxfdFo1F1Oh3d3NyYpn4ymZiTQ6KSTqc1nU7t98xmM81mM2O5F4uFarWa2TdBFc4jm82q0WiY0wEcYOBgpGHMW62W2RhdoMLhsNVvFAoF06T3+/2jmgzkLy5AspYA3Gw2MzBvNBr6xS9+YQzeer0+YqtgpzjbBJuAX6PR0MnJiUKhkJ3dRCKhUqlk2n0CRpfl7vf76vf7Oj09NSnDcDjUixcvbJgrNottdrtdhcNhnZ2dSXpgfGGu8RMuk8W6Z7NZVSoVRSIR9Xo9dbtdNZtN5fN5A+Vv5W4AACAASURBVAv2QJINh4Rt2+12xq4RPLpF8Z7nqVar2bMtl0sLigjG8afuCxEZBWodOFsUjBNI4yNcRs5lBNkbvpd7D66PuwKMDDAywMgAIwOM/DQw8kMvXD/J4GM3BQz7gpNbr9fWjtUtiqNwlQd1U6oUt5JyhMUB7F0nFQ6HzVmhY6Ugz01jusP2SO0j7UgkEsbKsdC+f5hzEI/HdXl5aawCRsBiowePRCJqNpva7/e2+OFw2AoVi8WisYQ8x2q1Mh3yycmJORwYqvF4bIWqq9Wh4xAGzsFl/Slk9X1flUpFm82h0Hi3O+i8YWMAvvl8rqurK02nU5XLZWWzWZ2cnFhRI2CQz+cNbAqFggF6sVjUs2fPjIUpl8tH804A0nK5bKzLxcWFdrudda/KZDImjxiNRpJka+cCXbFYtJS923lnNpup2WxquVxa8TEHA/CZTqfWqlSSsXSsyXa7NWDv9XpqNBpH+t3RaGQpeN8/7jZFgTUMTj6fV71el+/7KhaLKhQKdgAZzsj9u8wZoEKHrWQyqVarZUGL53nKZrP2/aVSyViofD5vBbs4V5g25BgEVeVy2QItNNgwSQRG4/HY2DzP8zSbzRSPx4+KTmFO6VhWLpeNOUskEhoMBjbvhP2AhYMBoyCZ76LOo9Vq2VmnviSdTiuXy6lQKBizjX0Ui0V7xlKpZF2QpANrfXFxoUKhoH6/b4wf9kyW4Ne//rWurq7MzwyHQ+12OwsW3LqGzWZjgRl2PRwOjSE+OTmxOpl6va5isWiOGMc/m81M2gFLDTONHIkWwLFYTK1Wy4LyVCpl6z+fzzWfz41V49xMJhO12231+337uclkYuwqAQ3BBUHOYrEwG8O/Yn/4gz9Gpx5cx1eAkQFGBhgZYGSAkZ8GRrqZr/evn6SGC8aEB4IZ4It5U0T3itHDeLCAi8XCOrbwO2DmmBjO2ykL4AIKbA6OA8e83+/V6XTMWZNydnW/OJ/39cPT6VSPHj1SKBQyFqNYLBpTRgqWFLvL2LGRaHlJO6IHpwiQdqj5fF7RaNQKRnHOaPh5m8ZofN+3ieie56nT6ZhBuhIRDlEul1M+n7diYlrhzudzc3QUxMbjcdNXoxVnfabTqW5vb3VycmKyChiFYrGo6XRqDoi0LcHaer1WPp+3/eJZJ5OJksmkDc+DOUomk6YFhoEAxFkXWrlKsqDFdRj5fF6dTseYWxjB7Xar6+tr6wpUKBSMXWJfSMdLB8kOBdxMdPe8Qz1DMpm0AImgCNau3+8bU4VeHbDC0XFu9vv9fwuGYLdpn4v0hU5JFxcXxmgTCJDWv7u7kyRzuq1Wy34W+QJynrOzMw0GA2Mo0dXf3t5aip5gybXlTCZjEiHP83R6eipJBjzIJQjWCEr4bwIjgsLT01OFw2H9x3/8hx49emTnDBYcUCGo2Gw2KhaL5gcYoMk6z+dzsx+kBQQVhULBQJ/7JSvA8EwkDgxxLZfLNhNotzsMPmWgKLbKcFlscrfbWac6/mBrsJQEildXVyoUCha4ILNwGUQCBTpjuY4etlB6AHH8G+cNmdv77B0XjD3rRjCFbw2uj7sCjAwwMsDIACNZowAj/3Qx8kPXj85wwWThxNlwNoaiOsCBvwckksmkMpmMab2Xy6UdTIwedgEWiw5Ew+HQ0rHumyigJckMgIVxaxBgCXG8OD+KYXljhZ2Lx+PGlkwmE41GIxug5/u+qtWqpY9JIa9WK41GI2uDGo1GzUHCXkoHDShOcTKZ2FrCUrnFyxS4Ij2A6QDU5/PDTAuM7uzszNL+lUrFZkIgO5AOwHt/f2+AxJp3u111Oh1jfXz/ULTM2rMuOHPYqvF4bGwAzwSzGIlE7N+n06kKhYLK5bKxvAA6TCPMCDUEMHUU4DYaDUvXczAAPtL6sKQEOUgK0DJLsnVPp9Mm36GY0l1j2DDWF8fImrlgBUvIocf5JpNJLRYLjcdjK0am4JaU+fX1tX2egIciXLTfyCckmV0gKcDGW62WPM9TqVSy4nBsk8AHx7fZbMyJElRg88gcYKlKpZJSqZTi8bi1iE2lUhqPx3r58qWxt0hRarWazs7O7EwxR4WzkkwmVa1WFQ4ftOCwye12W5lMRicnJ8asYZeuLht7Pj8/VzgcNvvc7/fWSanX61nrYHxSMpk0AEMjj44exhJNuiuT4kw+f/7cdO4EyRTgw9wRBPG5ZDKpwWBgxey9Xk+dTsfACD+43W6VSqWsVoFAFNvHl2K76/XaHD/BDxIXwApdO8G/JMtMEJwTsHPvBOhBDdfHXwFGBhgZYGSAkQFGfhoY+SFJYfirr776eARxrl/96ldf8TaOjpQNwDjRaboMVCwWswnsbBZG7Gov+T0syGQyMUfB4lFA56YMYVDQekqyA49DRuPKwtJGV3qYLA0LxJs698Iz0tYSY2b+gltgTvtNmB3YCPc5R6ORksmkGQkFwjCb6NHp8ILRw56t12tzmBg7z0o9AD/raqw5IAQBnuep2+0aoDPQkMMOI0gRZDweNykGzhkJB8P4cDBo2F32ElClgxCyF/ewSg8DNylWBXB837dC41KppFwuZ2wLeupQKGRrix0Aoslk0mZrlEolSdLr16+Vz+fNASCjclsxs/ewPfye+/t7xeNxffvtt/r5z39uQBUKhcwOarWaHfThcGjMKOyS7x+KepljARuHXSAzCYVC1rGM74AFGw6HisfjFlw0Gg3rDkbw1263VavVNBwOlcvlzG45FxS604oZ5hypwHA4tPOHRt/9k8/nTW5BcSt1D+zlYDBQLBZTpVKRdGAZf/Ob35jUgMGV1WpVkoxdzOVyajabdq6Qa/B8/X5fjUbDtP7tdlu5XE53d3emzydAqlQqajQatobr9druezQaGdO/3+9NioReHLtrt9smSWG+D3aSTqcViUQMPMlCUMSN/cTjcc1mMw0GAxuW2Ww2LRjjXnzfN2kNPg8GED/lZg2o5eHnJVkQGolEjFWWHgrT+QwAwzn8r5qS+6+++ur//ijg+ISuACMDjAwwMsDIACM/DYwcj8f6h3/4h//z+7DgR79w/eM//uNXvA1zc24RLA8fDoftf9nI/X5vrWlhG3DMkuxN1O1ugzOA2SOV6EoLOJjICKLRw4A+5A84B9KMMBm8FaNXhuXCwd3f30uSarWaddAJhULG1vGsaGiRQOCEN5tDoSwaU34W5gQWYjQa6d27d8rlcqpWq8Y2VSoVm+8gPcxtYWI3bAF6anSwvJFT/IghwhJEo1EVi0VrC4rjJiWOE8U4KUrebrfWsaZUKhn7CXMKiwsLCWOG0dOVR5KxvIA7YIs8Ybvd2nBDnAX7gmQEgMJpcm02G2MAcUYwgaSMI5GIgQlOgsPHenHwJFnxcTKZNIdOMWqj0TCbhaU7OzuzLlFMtMfpr9drlUolFYtFSTJbBeTZz3q9bin5UqlkBaNugObqiJHxJBIJ3d3d6d/+7d9ULBaVTqeNWfV93/Ywl8sZa8M5Wy6X6vV6BqquLIWiYRwTUhUCDhhw3/fV7/d1dXVlHYqQ6+z3Dy2Hy+WygR5BWjabtbOEPCSfz+vs7OxoUCl1DbDgaP6z2ax++OEH9Xo9PX361M4JbYlh6viOm5sbbbdb5XI5C+KQYMD4UTTc7XZtfd++fat8Pn+0ZwSL+DwkXtwnrC3Fx+122+Q0sVhM9/f3VlTebDaPgknsm+5fbtADYGHD7r4hg+K+CI4Aevy429UJyQR2FbxwfdwVYGSAkQFGBhgZYOSngZHT6fR/74XrV7/61Vc4BBcweOPD8QMu/B2FZrvdTq1WS/l83nTVvOFTsMcDUmiHwycVyhs6xg3QYCC5XE6z2UySrPvParUyFoeFT6fT1hkHZ7harczgJFkqm1auMBE8E9/HBiBb4JDRohV5iKuxpgB0v9/bPAgmgxcKBdMnAy7uIEoKqjebjTE78fhhmCF6WDTdHBIkAXSEgnEAAGEQK5WKfQ4WLhQK2QwJ3/etEBsGCUAnwIBpzefzajaburu705//+Z+bvh8wxo5gXUmBs9cEBuFwWI8ePdLXX3+tXq+nZ8+e2VrAbsA4YgsuwMCmFItFLZdL3dzcqFQq2XBImLNisWiyhnK5fAT80oHRg1mSZFrni4sLvXz50hw9bNZ+fyiYplAZhni9Xms6ndp+I0mpVqt6/PixSRw879A2t1armRPZ7XbWSYnfDYvH9+52O93f31tR9Xw+tz2WZOxZo9EwaQdyCWQfgDf2BcuFXdFhazgcKpvNWp3Hdrs19jkej6vRaJh+fDab2bmbz+e6ublRKpXSv//7v5t0iYGV7Ek4HLZ6iLdv31oxfCwWM2DA7+RyOX3//fd68eKFfN9XLpfTZ599ZkEBgF6r1f4bU+dKXq6uriyQe78QlzXEB+VyObNhAJ7vo30vznu/36vX65nT5xyPRiNjNm9vbw2QCLYJGvBBBImuxj6ZTFpADICxbpw17Nj9Ge6LM+iC0n/ZX/DC9RFXgJEBRgYYGWBkgJGfBkaOx2P9/d///f/eC1ehUNB+vz9y1EgPeHg04hgiDpY3/1gsZsPUMCLpQb8Os1YoFI7SjxwcnCeH3dUPu2lCDh+Mned5ll4HKNwhaK6UAo05LBbsFOwRAImDpJiW1KXv+zo/P5d0YH82m43K5bJisZg6nY4VyXIvrCXPMBgMbA1JY6IXp1AUowMsYHXYD9LhkkzyMB6P5fu+SUgYdNfr9TSZTJRIJEyOApMEcBcKBYVCoSPWlALfSORQsJnNZlUoFKzol9as7DFOCqYABpj9wOABPX5uuVxaJyAKK3HGTCJ32SiYmGw2q3g8rn6/b3KA0WikRqOhWq1mmvXNZmPtc2EECYAogO90OjZwcLFYKJFI6NWrV1bkend3Z3IIBiPm83mlUimTinBOYrGY6fZhql69eqXhcKjz83P1ej0tl0tjjSKRiO7u7lQoFHR3d2fp9n6/bzIkCodhNgEN5Dmc1ZOTE5sxw/oidaEw2PM8k5FMp1ML8EajkarVqhKJhDqdjgVo3W7X2CykF7SKxXFybwBbLBbTzc2NqtWqXr58qVwupydPnlgRO2DCfrZaLevQRJCEH0J2QhA3HA715s0bVavVoxbDyDlms5lyuZwk2dmhKJ7OVzha9osAkKLxVCplbBiF4Jw95tZEo4cW0ewN0pXt9lA/0ul0dH9/r5///Of63e9+p9vbWxWLRZP7SLL9kR4KwGHk8IcUtBPUEbC5LDo+DL/F3rPGLhMvHQK9Xq8XvHB9xBVgZICRAUYGGBlg5KeBkR964fpJ5nBJOkrlkY4mFUfHFlgcDiSMAxvE/yftNxwOdXJyYo6TxYtEItaycb/fm86d9CgOlO+CzWETYOdgdTgYkUjkKOXsOjnaivLfOK1ut6vZbGYSCNLjdFgB7ChIHI/HSqfTxhDRdQpQcosQpUNBKiyWC2IYM92R3JkUFHoCfhgYb/8wqKlUysCNg4ocYTAYaDQa2V7gfN09LRaLikQidg+AM0WvAFw6nVaj0bDhg8+fP1cymdSrV6/s90ejUUvhs958z2KxMP0xgEohNnUNDKrcbrc2UBKJCIwIen5Jxr4AFNQFML8CKQzs0maz0c3NjZ4+fWp6bxxru922/z8ajaz4Gjsol8vGqkSjUUtZS7L7gd0BvCg6ns1muru7Uy6X03K51LNnz/T69Wu1Wi1dXl6aRh4pEEGLW6CLTIaaA+4jkUio2WxqtVoZkHBOU6mUtel15Sv39/dKp9MmY/B93zqUNZtNC3RCoZDJeFqtltn7+4XUzObpdrtW6xCNRtXtdvX8+fOjPQbUsMFMJqNHjx5pPp/r9vZW9XrdQJSgAf3/er22M3x9fa0XL17YmafjVTweV7PZtCJoAgjfPxSyZzIZ3d/fG1tJ693T01NNJhPl83lrv0wb5+XyMICS+6KZAB2tkFsQ+IxGI00mE6sbGAwGKhaLVszLPXPeWG8CEH7G930r4I/FYur1epIeJFYE58hakAUhcYG5w99I+oPdmoLrw1eAkQFGBhgZYGSAkZ8GRn7o+knawrPg7gOg/XVT567WGUfFTZOqY1HctrGkgGOxmKbTqaUF4/G4FYayUBRx0nISIOEN3E2p0z1GeiiQ420bZ42Wk3tCniE9MCAYOJIEd6jgbDazDkOdTsfAZLVaWUqZgsztdmuFqovFwgYw0ikK8Oj1epbeBGDD4cOAvnK5rNFopFgsZh2iWCPSvIB2q9Wygw9TBVPm+77q9bp1SII1xAhJPaPXhVVaLA7TyykQLRaLGo1GBlTffvut/vZv/1aS1O12jckDgGKxmDGA0sHJTiYTm/1xdnZmKXdmMTx58sTYoOFwqE6nc5Q+3+12yufzkqR2u23zHWBwY7GY6vW6ut2uJKlUKtkaF4tF64xDVx7P84xdBPhZ481mY07o+vramMXHjx9boNXv9zWZTKxNMrbmpsx/+OEHc2I4WQ7+3/zN36jdbqvT6ahSqajX65lkZTAYmCzj3bt3KpVKqlarSqfTevfunSqVip4+fapsNmtzN5bLpe7v71Wr1dTv942VzeVyVreBw6GLE88IAwQzjJ0DjgR4tFuez+dWM7Hb7XR2dqblcmlOf7/f288S9HQ6HcVisaPCVEkWjE4mEysOPj8/t/bZ6/XaZrlIB+A+OTlRrVYzdpROSfwuOiDd3t7q4uLCQJNzCJuN36jVasZSwsATEPIMaNIBNQInMhBkL2ALkUatVitVq1XV63X7ftg2WEq34JyAG3YR6RLZAwbFIongItNA1oEXBGyRIBqWMLg+/gowMsDIACMDjAww8k8fIz90/SQZLg7Dcrm0VDxvgZLsEGEc6K9hHNzNRmPreZ45bfetEWByHwx2y33rJL3KIccRkqIl9c6hwPkiTeA5eHsOh8M6PT21lORwONTp6akxI8vl0vT1+/3eOgXh4EmPcv+e55kzXq1WpvWu1+u6u7szVg8WFOkELJ/v+wa0/yXzUbvdNq307e2trXe73ba3eJ4Pw6YzDql7N7XNYEIOEQXJdL1Zr9fqdrvGFMKWUmjp+76ur681n891fn6ubDar29tbdbtdpdNpdTodC0Jg3kjxf//999rvD206y+WyaawxfLTQyE6oDSBAATyoTWAfuf9kMqnHjx+r1Wrpm2++Ub1eN6YKTTdMIYcPZ0hKHK0vaXcc3nK5tNkf3333nWn4YW3evHkj3/ePNOQEO7Bm4fBh8CFAdnl5qcViYWubTCZtTQaDgZ2FcrlsILjf7/X27VsrvmWwKMwisgSYc2obOAsw8tg1RdUEVQCcK6Vx2WlaNcOkYq+0ysWG7u/vLaXPWULCUCwWrZ7hr/7qr6zuoFgsqtFoGEPprg/toCnspvCWs3t/f28sN1p8NwsQi8VMEnRzc2M+xfd9nZ2dqdfr2RBIBmWiKcdp032Kgnj2N5vNWjbDlVAADnzvbDYz+dJqdWh37BbyI+dyi4Pdz9NtTZIFONQc4F9hqJHIcJ8wjdSUAODYbHB93BVgZICRAUYGGBlg5KeBkR9SgfzoGq5/+qd/+qpcLtvbHywoNwsjBpPGTWPQsGPSgQV8H3RgcyRZcXA4HDZtMxuJpplFIx0ejUatowlvvS7rx3cj34BtCIVCVoDIMEi00XzGbRHKWzMpSMBkuz0MUqtUKvI8z4xEkrEh9/f31qVHOshE9vv9UUEt8gwXWNGw872s92Kx0H6/N+YLphRQYsBfPP4wdBGHEY1GbRglxcSwnbzBN5tNYzb7/b6tJeuJ3MAdbIdcAsABKE9PTw34ttutsbtIL0qlklqtlkkkkC6s14dhg5KsYxbdpGAfEomEzUIBACjsdRlQ7gt2uFarmcYZ5hbtOEwt9RY4f0mmx18sFmo0Gjo5ObEDCygTiDx69EjhcNhsRDoELsvl0uQuyEZg4er1urVmpQie4AV5SjKZtLoPZEXYT7vdNkkTBcgEFkxsJ+Ag4EFShKZ5PB6bvAk2iZoPwDSTyej6+lqxWEzZbFbr9dq04wQZ/L52u21nH602ZwR5QyqVsufDP8BEYePMQWH/V6uVOVrmz0gyGcp0OrXPUHTO8/MsFIPDJANE6OhpWkA2wQVtAt73td+JRMKCQRw5ALrdblUsFg2caTWNY8ce3NoLZE7cH3uPJI1AHhDBZ7rsv8v88e98Hy8L/P//koMENVwfcQUYGWBkgJEBRgYY+Wlg5HA4/INdCn8SSSFObLs9tHQkVcq/4cRwFjhKVy7BggEky+XyaAMoQiQNyP8iS8BpbTYb07jCFiQSCU0mE9ORs0Hvp7rdzjO73c4mf8/nh8nZODHPO3TIoYgxHo+rWCzq7u5Og8HAWDgYNle7jqaXzw6HQzvUksxxshYAH3NDNpvDLADS+KyXJFtPiiDRpabTaQNW9oRUOGuH0WBc9Xpdi8XCGBzuiaGP0uFt/uLiwn4Pf7darSyFXCqVrFj65ubG7psWu6xHq9WyAuRcLmcpY5gF2g0TUKTTaWOTcPQwjBwwGBaAmDaeq9VK6/Xa0vgU1H722WcaDAbW8YfAgLoC2A0YZ+wnnU6r1WopFDp0pUIb3Wg0TJaA7AAmkMDALZBfrw8zN9D0t1otC9C+/fZbPX78WBcXF1oul/r+++9t0GEkErGOZOiJN5uNBQSk5DebjX7729/qs88+UyKRUKFQMOaKMwfDyH9jt9ScwAoi83CBFs37bHaYbE/Bcb1eNyYdZng6nVogSaEqjtvzPHU6HZ2cnOj169dKp9PGmgGY0WhUhUJBrVbL2r5SIF6pVMzZApyj0UjZbNYKyPP5vIrFolqtltl/LpfTeDzWeDw2xr5arapYLKrX6x1JtaLRw5DKwWCgQqEg3/f15s0blctlbbeHFs+sGRr7drtt4ELQBIjil3zfN4kGLOlyubSAiSCBoBpfy7mA3QaksAlsDIaWoJTPE4BJOpJghcOHltAAFP8WXB93BRgZYGSAkQFGBhj5p4+R7s+9f/3oFy7eHHnj44Dxx71h3pRd/aQkSx0ieYAl4uFhgzgUvPkCALwdwyCiD+UAo5V3WUR0mxgGU94lWatcmMj5fK5ut2uHB0OAEaH7zn6/tw1g7gmHiDWgaBBGjHsolUoKhUIaDofWEYruL/wMz4BGGAeEc+r3+xqPx6rX62aA+/1e9XrdWDZaYPK2joOEjaAjECwAunVYG1LErCFa/uFwqGazaYECaWdYokgkoqdPnyoaPXQlggVjaGS73TYZg+cdCq0LhYLG47E6nY4VLv/ud79TNBrVF198YUC2Wh2mo282G3W7XV1cXGiz2eju7k7n5+eqVCq2R7TrJXDAqcNqJZNJdbtdFYtFA5HRaGQpeOkAmOiHWef5fK7RaKRaraZ2u61UKmVSGQqRsVWXnXTtslKpaDqd6vb21rpEzWYzvXjxwtYIPflgMNDz58+PbB+tNg6F87DfH+bWMKOEZ0DSwFmKxWJWqOp5nhXO03WL551OpxbgwaZhfzjOy8tLqy+B/SEQgTGcz+cGKDDmgBu66HQ6bcXeML5IXjzPs6CE4KLdbtv9LhYLC8L4TqQgr1+/VjabVTqd1v39vUlwWA/08wCC5x26el1cXGg2m6nVatnZLpfLmkwmKpVKdm7YE3xCKpWy1raADYw0djcajawWZbvdmmQF5nmxWNiZctlySQZSyF5gIQmOyUx4nmcBKZkPnhWf7Nol8gmkPYGk8OOvACMDjAwwMsDIACM/DYzE3/++6yd54QI0eEPkv0kXwoR53kN3JdKdFJvh+Pg8b4nuApL+JIXPwWCBYG14u3VByZVIcNjQfLusAW/8m81GuVzOWlkyUBAAIdUKMwRQAEAAJxKN92c68Nxow6PRqE1gl2Qpc+6RjY1EImYUGKIr1ZjP57q7u7M2wfxbtVq19Dbryvqg4V2vD11qKNqE0XDf3Fl79pd9QtNKETOHH108Tpt947kvLy/l+74VUw4GgyO7ubq60nw+N73vcDjU5eWlaYxxSo1GQ5eXl+YsKLKGqeN3AprVavXI9nCspVLJUuAEJewX6XCXRQUISPOfn5+bQ0arjCPi+XHMHHIOKQXG5XJZb9680eXlpTnrXC6nRqNh6/3FF19YXQAsD2woYMXzlstlzeeHYZL5fN4cOp+jUB5nxHliPgv2QdchfhaQQGrS7XaNjavX6xqPx3YfBBzYNA6RInr2Amc2n8+tfW40GlWv11M8HrfhoTB2pVLJOl4RnNAFDfCj2LrX65n0YrPZ6Pr6WpVKxdYCmQaSC7cIG+YfuU4+nzeb4UUkEomYtAL5D/eFJCOdThsIcA6Gw6EFyAAq/+bKf1x/BgjAEnLOQ6GQBa4E3i5z7zJ4fMbNiLzv1wkWCTjczExw/XFXgJEBRkoBRgYYGWDkp4CRnIffd/0kTTNYOG6eG2SBVqvDQEQOcigUMscuyViAzWZjhWz5fN7efklxu4sBqAA+6/Vhujjfi1PG6JFfAEgU5bFoTBCHoaITFE4IdoUNZwBfoVAwQ2dzYKwoAqVlKwMEOaz8LIwBxgmLxRrhZPidrDWH2X3DTiQSxhJIh4LYdrutk5MTk37APOHkWUvkJTghtxMVDkSS3TuddHDYtDhF30xKG0fr+76KxaIVIVPwGg6HdX5+bo6NFrowrM+fP1c0GtXbt2+VTCZ1dnam8XhsbVoTiYQeP35s+v5Wq6XpdGpOtN1u6/T01OZr7Pd7K0idTqf2PIDZer02bTf1DACB7/umd2dNYK03m42azaYxSuy3y1Qzz8U9wIA1NQYnJye2NsVi0aQ5Nzc3KpfLSqVSNhfF1ZID3gQI6MEXi4Xu7+81n8/N+SBvOD09NQkIewTLRWDGOSaYAMQl2b1LMl01QIsdh0IhTSYTkyBMJpMjZgmNO0AFcwTLtdvtlM1mraMYunAAmlbDo9HI/AQSDAKAq6sra7l7enqqXC6n4XCoWCxmRdHdbteeGTkMYDefz81+KHSGCTCPugAAIABJREFUvet0Otb1arfb6fb2VrVazUASpjEajR4FwgQU4XBYnU7HBoISJAN6aMxZe8AYP0AQiB24UgqAGEkVGnbX9jgTLrDwx2X18IfB9XFXgJEBRgYYGWCkFGDkp4CR/6svXDgNt/UiN+em83BIGMrRTfi+FTfCkiSTSXt7Zh4DB4XDj6NHo82h3+/3xrSx4CyMJHsjT6VSpqlnDgASAUnWOYbnwgkXi0UzBgowYdm4p/1+r+l0al2GKAgmBcra7HY7+7tisWiD3lKplMbjsd2b7/vGFJBG5nlh+nDqiUTCugVxEHu9ns7OzqzNK44QgI/FYhoOh8a4wDROp1NjrChqxYF0u107eBhysVhULBZTo9HQer3Ws2fPjFGAzeDwRqNRkzcw7+Kzzz7Tzc2Naf5Ho5EuLi6s5TESisePH1v3pcePH6tQKNj9ojvPZDLGxtIhy2WIpYPD45k5aK7GmzQ4NuWyJ9vt1iQCBDDj8dhaBC+XS/V6PWNxsU3OBw4G6Q3MDYXNDEmkqxI1GqVSybTjnndoXey2H6blrBuQkO6GWQ6HDx2VKBAnfU+wh8Nj/d6XPtEliX3Hac/nc5uHEYvFjmbVoG0noGNN+A78CRIYtNEwetJDW+vpdGrsXDgcttbCsJNIDJbLQyvmWCym8/Nzk0/haPFbBBCw7tyf62vo4uU2KeB3EIghZ0HigfzH930LtlzWHD+BtARGXpIBmuvMqclw5WgAErYFmLCW2DxZCQANm2CNeX7OB3aAXbhMYnD98VeAkQFGBhgZYGSAkZ8GRn7o+tEvXC5z9z5Y8Obpvg26gM1Gug/vbuJisThK8XNoeXgWMpPJ2JT2xWJhTAoHH4YKB0ZXF1pusrmSjEWEhej1eqbD5j5hG6WHFCZpZt9/aLtLup2N8TxPw+HQ7h2NMPciyQqmKeLl/mEjAbf9fm8OCNYUdoDDgsyiVCqp2+1qPp8bIwb4IGeYzWbGhsAGAK5MbHdZG2QVyFlIp+92OyskjsUOcxPookR69+Tk5Gj6OOwEYMFhzWQyFlR4nmdDK/v9vgaDgc7Pz63Y9u3bt8ZS5PN5nZ6eGoC59kRQs9/vrciT9YfRlGTPzWHE5gAaFxSY4UALZYpECSQYvOimoNlvDjySIoDi66+/PmpTGwqFrM1ruVy250AaEgqFzJECaEhJKpWKseIU6VIb0W63jTHCzmF00Liv12ubTeNKkgCB/X5vbBFs8nK5NIlOq9U66hjlyjlch7fZbFQqlcyxcw/YlyuFYq8I/nDW2WxWi8VC/X5f+XxeiUTCgi9kDuPx2BhqmGXa3XqeZ4GLq99nH2Eb9/uD/IOgDpZ7u91ap6zdbmcSC4KGfr9vRcvD4dAK58kauHIXntG1WwAWv0OdBwAA07rb7Uy+wTPyefTorg8H3LgPAg/W2/UnwfVxV4CRAUYGGBlgZICRnwZGup97//rD1V0fcbk6Xm6I/34/5cq/ccij0ai1miRNCPPR7/fNEaMhxfmhrXV10aTvYQd4E6VNJI4Ap0AxIildjJo3ZYpHkT/0+31raRoOh43xwyjZEN6Ot9utGRsbUiqVjLnD6fL9ODEABPDi9/GGjUF5nnc0GwTQYTAk98abOsxQOBy2egAcIQ4daQqSAobcrVYrY8s4GDwH9wrDWavV9Pnnn+v09FTh8KEAO5FI2NBGDuN0OjXG0vM81et1JRIJnZycWFebcDhs90hAwNDFxWJhDBvMWiaT0Xq9toGRPBcOiNQ3nwXgAFpJJrtJJpPWtYcDyPcRCLEf/D5+nlkXblocrXo4HD5iaDzvUExLa1mYsel0ahPoE4mEisWiMXrIf5DTDAYDa/MMINM6udls2mdgrfnuaDSqfr9vwUa/37eADGZqsVgcdTlyWXUCDECdPRoOh3Yv/PdsNpPneSZ5SCQSymazR+ea84lfmU6n6na7Nlw0n89rNBqp2+0ag0rmBbaSrmbSocXt6empsXbYKhkDptZTZA3Q8NyAM9p6gIHAGECElUNug9/gLLlyMiQwnU7HAly6P41GI0UiEds7/CnrStDrsoCw0/y8+xnuDXCABXX9Nv4FbT0+BOaXM4wfCK6PvwKMDDAywMgAIwOM/NPHyA+pQH6SFy4WCycoHQ9i5GcwXpg8NJ6uZhJH5RZk4qT2+4OekqI+Vz6BEyR1yAK4b8YADXKI5XJpWmjSrywYP8cGwUKMx2NNp1M7UPxhhoEka43J/Y5GI718+VKtVstkDOHwQacLK9bv9614k/uNRqNHzodn4ZDNZjPrAJXJZIxtYMNJ4fK7WEOMDAeFPhktLUwe94eWfzweaz6f20FKJpPK5XJWUAoLgN6Ye4KJWC6XNsQukUgYwxGPx83JA4CkuwFkApJwOGztSne7nU1cLxQK1uo3k8koFArZvQJ8HO7FYmH2CRhz79vt1uaSoPlFT03bZBhU1sUtwOQZm82motGoSX3QfnueZ/9Oe1zuLxQKmW3V63WVy2Xtdju1221jYejy0263FQqFzMHm83nTLSM7IlX/3XffHRXdJpNJA+Plcqlms6nRaKROp2NrRCtZnCqf55xtNhvrTLZcHjqKxeNxXV1dmSb69evX2mw2ZmOuzTFhHnYLpgrHDThiO+xXJBJRr9fT7e2tXr9+bUEawUQoFDJAGA6HymazJs/hD3ZBUJFOp81Wsd3lcqnhcGhOHJ9CF69CoWBtsAEJ5Ez8fuwXP0ORPaDN2aYWB2nJZnPoeubKRfBt8Xjcmhtgh259AyDCZzabh2GhblEwBccuG8mz8G/8DoCRnw+uj78CjAwwMsDIACMDjPzTx8gPXT9JDZerL8XwcObcPA/g6thxjhhKOp22DkXuGyuGBgND+1F3tsZgMDDHwedgGNBhbrdbjcdjO9jSQ5coDiutIVlc2u3yO0ldbrfbI50r3wVTBMDwXTgoDg4OBGdLGhvmBOdOqtNlQKUHXfVut7NC4UgkYtruarWq0Wh0xHBEIhEVCgUNBgNL9fL9MJMcgHK5bAeqUChIkhWJol2GdaV4mkOI5AInCwsUjR66TK1Wh7a5PC9sI46EtXdlFDiHSCRiM0DYf5cJ3W4PrUIpBgYksEMumDueZbVamVQAhwlDAvsBywQb69YiJBIJs5tIJKJut6vb21vlcjlzMhxudPKcE/Tn2D6sCXUNDNnE6dDNazAYqFwuWxcml6Wdz+eqVqsWpMBIxuNxC8ROT09NNsD+0sGK4ZecHUk2CBNZRDQatfkx1ASMRiPrZNRsNtVsNvX5558rk8mo0+lYMS2DDTkryWTSAjjS9PP53DTT4fBBw391daVms2lSp9FopFDoUHCMpCAej5tUBZ8EcG+3Wxv2SWvfTCaj8XgsSRYUwXAiW5rP5+ZjsDPkUdvt1p4rnU6r3W4rGo0qn8+bg8enwYBy/3RlYr+4kEIAIgTd7zN03DfrB0tIzQr+mXPK7yOoxB8TsAJM2KHnecYqY+vB9XFXgJEBRgYYGWBkgJEBRn4wwxUKhf6/UChU+NDPSLKDhybbZUq4aTSTpJX/6/cfpRI3m41tEMwdBgEj5joBFp0FIR2PfhPd6nA4NO02AIGOdjabqd/vmwOEjWHaNwWSFJfCFqzXazMipAuAEpuDw4vFYtYJiftCpypJNzc3kmSp6FAoZLNARqORsQwwn8g4+Px+f+gec319rcFgoMlkouvra3vT52fv7u7sgPBv+/1hlkkqlTLgrFarymazRzpsSTaDgfV/n60dDofqdDrqdDoGdL7v297RFQm2tFQq6fb21gKQ7fYw0b7b7Wo2m5keGEYOg4f5RB7BPgPy7BmzKHCW8/nc/rBHoVBIt7e3ms/n5rSx5fF4bCwZ6/748WPT67Pfq9XqSBZTKpWsJoCZD8heEomEKpWKac+x7Vwup8vLS+XzeZPPXF5e6vLyUmdnZzZPheAqm80ag4x+G6cKEOCwCoWCBWA4Hc5VpVIxuQFAtNlsjPnjjO73e2UyGbM95mzATgNyT548kecdCsMLhYLevXun0WikTCZjP9vv93V3d2edwiKRiBW3+75/xMQyUBIAu7u7s/3A38Awsa+uLIcOTgRP7Bt2AEMF6+kGCtQ34BNwrLD69/f3xvAh7Xr06JExtL1ez0BHOoAJdQn4r3w+r/V6rUwmo2q1au17+/2+Sbdc2QTfT50J7PZmszFfAVDTXAHfCovP2kqyug18GGCHj47FYtrv9+bHXLlFcAUYGWBkgJEBRgYYGWDkA0Z+6Ppghmu/3/+/D35aMokD+s9QKGSHmXQeRhkKPczJQOLA27MkK/4lRcjbPYwJD8pbKr8TI3TfLilupMsMOnnuGWcEi0KaHzYCZ+OyjbAOkuxNnsOJjhsDRoax2RwKKovFom5vb1UoFGzQXLVaNSaAz9INZ7FYGDMDgOXzeTMy1hXGBt00wwCRmKDLx1l2Oh1VKhXF43HrqLRcLlUoFAxcAU5J1qaUA4i0glSuy9J5nmdzVmAah8OhSSDc4YC73aHgt9/vq9lsKpvNmhODvYLlBFTy+bxNIr++vlYoFNKzZ89ULpetBmG3e+hK42ryWWPAngOyWq10fn5ua4iTxSZcB7NarUyTzfNKMntFx04nJMCaVPV+vze5w/X1tWazmQE48zPS6bQKhYLu7u7UbreVy+W0Xq/V6/UMPLE3dNis7d3dnT0fjCbsDRIKajiQ88AkIWUgKJpMJup2u8pmsxacoYdmH9FT53I5FYtFtdttG/jJGSqVSmo0Grq4uFA2m1UsFrM2vb/73e/UarX0s5/9zLqEUavgskW73c4Kfbkffoa1JpDwfd+YagrdLy4u9Nd//dd6+/atGo2GyYYkGfvHME+cJvKpSqView+ooSWXDrOHkC+4tuuyerCBgBAF+QS4FGO7kg/OF7IJfBXBlFtrslqtjD1kPfCP7vBXfDPgxDpiB/hI/m42m1kTBu4J/xdchyvAyAAjA4wMMDLAyAAjXfXAH7p+kqEqgAZOj3QeBoXTc5ktt6CSh5lMJkqn0+bseIuEteJtmgPighMA44IBb6zVatXaSsLM8FYqyQ60q7ONRqNm2BRnZjIZTSYTxeNxa3ULc0LBMJvH99HFp9Vq6eLiQp536CSElGE2m+nx48f2vcwr8TzPGBuKVZfLpbFn7mbD9pTLZWMVeZuHvVqv16ZTJXCC+cSoKQBmP2CY7u7uTMqCfh6njlZ3s9lYOp2D3u/3rQ0pz/D06VPrijObzVStVq0LTSaTMacFgJDexlawtS+++MJYRBxUJpPR7e2t6Z8pQGW2BE7Bdf4ARL1eV6VSUbvdNvvbbA5zUABimKJY7KFNMOl0vgswxdkim3n06JFev36tWq1mYJxKpZRMJpVKpQyQYXUplEbXjCP2PE/VatU+70pa0CLP53O9efNGz549s8/MZjONx2MVi0XrVLTf7y24wDkC8tQ6uBpp9hYJ0tOnTzWbzVQoFMyBp9NpNZtNs5Mvv/xS3W7XJEGwxev1WvV6Xd1u1wKz6+trZbNZC2Y4V24BLg4V9gvn6mqs9/u9+YFkMqlXr17ZsE2aPhCgkg2QZMMge72erQN+i4wEzDFriX7fZfoAMX7/YrGwIJRAkkCm3+9ruVyqXq9boDYcDrVYLI5smLoTsiAM0GQNWH83mCaAABBcKRn2RsDm7rMbkAGMAE/QNON/dgUYGWBkgJEBRgYY+aePkR+SFP4kbeFxzLzduw7flUPA3LBA3DgOnv78tHddr9dWGMpFURvf7bKAHJzd7qG1J61wcaoYYqFQOJJk8HZKSpr092azsYNGW1RYkXw+r1js0GIVXTkaYPSgTOcmJbnf71UsFtVqtewel8ulTUlvt9vmJOhOEw6HjTEg5cq6MQuEgwALM58fJsvXarWjglOkEalUSoPBQN1u19qjxmIxtdttW4/JZKJqtarxeKxqtap2u63t9tDSExkKTCzPls1mrZiY1q6JREKpVEpff/21aaWZQr5aHVrL3t/f6/nz51YAKsnWAH26JCtARh8dDoetDiAWi+n09FSlUukoOKDDEg4qHo/bnnQ6Hb19+9b0/Bw4V6eOTfA8OBVsHHuFRcKGCaIo8qWIWZLK5bIVwiONSafTWi6X+uGHH4zVqlar5jAJUnK5nDk8ggjsgXtxwZk0uwuY9/f3ury81H6/183NjUqlkj0z9svvSaVSJg0JhQ5a9Hw+b0zkycmJ7u7uTKKwWq3UaDTUarVUKBSUy+XUbDaNGeb8E5AVCgV99913Gg6HWq1W6nQ6ltJnzg5DGN0MC/KZdDpten1qVdCzr1YrXV1dqdvt6vLyUt9++60FQYPBQKVSyQq9WQN07rBg2WxWyWTS6k/u7++1XC71/PlzrdeH7mwMrWRv8Rms2WazsVoTpEaJRELj8djqc9hXwInABxvDp1DsDRDgY9y9G4/HJoHq9/sGGAzHJDjn9yAz4t4kmS93QQwQCq4//gowMsDIACMDjAww8tPASNcXv3/9ZHO4SImzyZLMKXAzyCncIlUYFYyedCIMEOlGZg/wpr7b7cz5u+yCJHtLh8mAiWNB9vvDBHiKeZFWkEZkyF8+n9fbt2+N7eJNHpDjvkOhkAaDgT27y3TwfOhn3717Z2/9pLQBGuZHoO2fTCZH8gqMDwcM47nZbGzQJOtaLBbV7XbtAMRiMQPicPgw0K/dbtsBp1AYaQWgt16vzUkxn6HVaikSiVjh52x2GKiHBAPGEUdH56kvv/zS9h+2gBamvu+r2+2qWCxqMpkok8mYRGYymdjz4hilg7yGAzOZTKyjEywSmutkMqlEImGAi0SgXC4rk8no4uLCpDIAFG1eC4WCpeqxGxy325EKhoTOTbBcZ2dntiawOxTa9no9s3nP80y7TS3DP//zP+vv/u7v9Pnnn+vVq1dHUo7pdKparSZJVmxLATET2bFVaj4k6e3bt0qlUppMJlqtVspkMqZdp4gam3KlHG6QAiDe399b8SvSm2g0qlqtpt/+9reKRqNmu7CrrCH1CWdnZ+ZHarWa6alh79PptBKJhKrVqv7lX/5F6XRaFxcX+uabb6xgleGs3Odms1EqlbIzncvlrOA9k8mYjnu5XOr6+tr0++l02ljn2WxmLCvAR8aB4IirXq/r9vZWkozlWy6XxoIzcNOtr3HZNor7i8WiyXa4d+yRYnSez/d985PdbleSjKUH1CWZXVM0TtDtZqzwN/hqScaYIruQHmRvwfVxV4CRAUYGGBlgZICRnwZGfuj60S9cfDkOnfahOH7eWvm5ZDJpG+QyprzNhkIhjUYjcwYwJrCD0kP3pu12a9p0ggEkBxQuJhIJ3d/fK51OW+pPkqXh+TuMjy4l6/XaZmqEQiFz+K4unnt49+6dGQg/j54cJwQbgea0WCyaTvf09FStVsvS1KTjcf6wooAwQJRMJtVqteR5h0nqg8HAHPyzZ8+Uy+VMZuEWpbq6bYokOZiVSkX7/d6c4mAw0OPHj00jDAjDSGHwGCn3Tq0B0gdkKzA2kgw4YIhc54o2eDQaGYMCA4SOHfaGNVmtVsaYbbdbDQYDOwAwNPF43JgwGMdIJHJUqMwhf/LkiRUFs94ETNg17Op2e+juFQ6HdXJyYkyg7x86Hb169coYs06no7OzM9VqNXU6HWNXWJdEIqF6va58Pq9//dd/1eXlpc7Pz43VwSnu9w+DBCkypwMSjoezR/1CKpUy9q3T6diezedzDYdDk6CgQ4e95Hl3u53JiAqFgi4vL/Xq1Sv98pe/NCb35cuXNk8F/frr16/15MkTe9azszNbz+VyqUqlYvImingJwB49emS2fXt7q3Q6bQ4TX8Mz02EqlUpZcexqtVKpVLKfQQqy3W6tNsb3fbVaLdXrdfNJSIfYf9/3Va/XNRqNzEexTrTVDYUOBfUw2/xumGQCSRccCAqRvGB/BL+e5xkYLJeHtsPYYb/fN5+ED4XZTqfT1h0NWQ9/tttDtyWkKvv9/kjuhk8n4GBvcrncj4WMT+4KMDLAyAAjA4wMMPLTwMgPkZLhr7766keBya9+9auvMpnMkdHCgvHlLAybDzBwqCm+5OcikYfhgtVq1Vg4GL14PG7pRDY5FDrMW3AdOanTfr9vGuBEImEpZUlHWmM2D4crPbSplA4gRvEj7BMHmbdqfh5QodgvmUyqVqspHo8bGwTL5hYSswa9Xk/ZbFanp6dmYDAbbDzGFI/HdXFxYU6X54pGD8PiEomEGR/rjhYWSQm/f7VamVMEoBuNhrUd7ff7xj65xYiAAOuIsbIfAEQqldLz589Nv89Axu12a0CAnh1nLclY3EQiYfrlUCik+/t7kxHc39+rVCpZ+prDDuMZCj20K0UakMlk9Nlnn2mz2ei7774zMN7tdib7SCQSyuVyxhTyWQBruVxawW8+n9fJyYkFPtvt1mQFpOTb7bb+8z//U7lczgqOCYyy2aw5D9rdzmYza4263W7VbDaN3SKAWi6XarfbisViR/MyYItIoTMXhp+ZTCaaTqc2PLHb7VrgBXDzfJ7nGUBSe0E9xZs3b4xt/eabb1QoFMw+CGQIAtLptEkZdrudrq6uNBqNzHbC4bAVGIdCIZ2fn1srZ0AIOYrnefb7OcfZbPZI/hMOh3V6eqp2u61CoaD1eq1Op2NtgmHyNpuNhsOhHj16pHw+L0l2NtPptGq1mjKZjMlastmsZSV6vZ42m43q9bokWdc0mOLRaKTpdGqBIdpxAq1QKGR6dAIo6bilOLIk/o0AnrPtatD5nbDMXAyyJQDCfjgb+B8kStQVJBIJA8FWq3X/1Vdf/d8fBRyf0BVgZICRAUYGGBlg5KeBkbPZTH//93//f34fFvwkGS4YEt7ud7tDi8n3H5C3WTYexmW321nRHGnP+XxuvfclWTEhYITmnUWG+YvH49YJhoI7V3YxmUzsXvguUqGLxcJSp/v93obLsZlok0nbb7eHTjgYDQ6TjQEwcPyk0TebjS4uLrTZbIyd+M1vfqPpdKpsNqt2u61sNmtv0XThgXEaDocGgsVi0e61Vqup2WxaqtwtUoQNpeARbSvT6m9ubszRdLtdc+SRSMQm0+92O11eXiqRSNi9S7ICajSw6F4pVMahEWyMx+P/tvcABQ4kn8+b42JPPc8zm8B+isWisZA43/v7e2PNSMnDcnS7XUUiEdVqNZM3XF1dqdFoqNls6he/+IXZCRKIbrdrsgoKg2FvKaRmxgbBC84J5whzdHNzo5/97Gc6OTkxllGSAT/Fx6T4T09PVa/X1Wg0jCFFKiTpqFiV9rWwgNgcRev1et1S5+Px2EABEGNPKEjGftzgSpI5q9VqRQCuRqOh0Wikzz77TI8ePVK5XDYQBETp4PXNN9/I93397Gc/0+npqXq9nsbjsbU69n1fvV7PaitYH6Q5L1++VLVatUJwZCHY/G536BaGI0yn02bTnC0YNRg0ztl+v7eubUhjcNa0ox4MBppOp1b7AmsMEw7AwtwDwpx/mOj1em02Q8DqniOAjsCA+htAhQCQ4NUFl3Q6rX6/r0QiYWeGoDcWi5mNAMjsNZkKzqqbMeDvguvjrgAjA4wMMDLAyAAjPw2M/ND1kw1VwdAxNP6Ow4JR4Fhg50hXk7KHcZAeujOFQg+zSzB+/peCSt402VjAAd38fr+3jWdR0RDf3d2p0WhoNjsM1UOjO5vNrAA1mUxaSpT7dNPVvClTaMrnkY3AyFBwS/HgarVSt9u1wlmYBABitVrZRiNfIJ283++tYJqDAZMZiRzaDgOirgyEe4IFhOVAY3t1daXVamWHkAMxnU41GAzU6XT07t07S8+7aXnuh0GNDGncbrfmwNGhY7DofwkIYMTC4cOEcQo+R6ORWq2WsaKbzcZkA4lEQoVCQbvdoXNOJpOx37VeH1oOl0olazVKQABTVSqVdHp6quVyqW+++cYmq1cqFX3++ec6PT21lsHsdzQaVblctuCDdrJof906hlarZXvzzTffmBQnn8/L8zx1Oh17tmw2a3ZEG13kDjh3bAvQ4kygq55MJlosFrZ2PDtyBbpz8fzff/+98vm8zs/P7exGo1F1Oh2TFME8b7dbO7fY1eXlpbbbrX79619rvz90dqKQ+OzszCQ2i8XCGHCGVI5GI9Xrdf3Zn/2ZisWiLi8vFYkchndGIhHd3d3Z2SWIjEQiNmgxHo/bOZFkhfHY3d3dnd6+fXskNXr27JnC4bB1kyJYTSaTts9uTQhMpiRbO9aXAajYPt/reYd23gTXbq0Mhc6+7yubzdqzsO+wk5KM5XblW7DsBGKS7Dw3Gg3d3t4qFDq0nEaS5HmenSv8FmCCH0B7j2zClfHg84Lrf3YFGBlgZICRAUYGGPnpYuRP0hYe+QPSBdgFUtoACm+NbocaGBpXLiE9FBrjqAAZfpaf4fPu99HhCDByP59MJq3gFQ0s6X46xbia6nq9bvMv5vP50WGiqHM0Gtk6+L5vRp3P540JgTkLh8Pq9XpWfDibzXR6eqqXL18eFariMEejkc1VgKVBHxuPx3VycqLVamVsxJMnTzSfz9Xr9ZTP503TnU6nrYMTBk/xMhp/GEKKMHHYvu8rl8vZYZxOp1qv18a2brcPgxTRSTNA0vd90922Wi1zvADOu3fvTLu/3++N9eEA0LUIZ8z9ICOAwctms8ZAsl4806tXr6y48+TkxIIBWAvuG20+czJms5l1duLQVqtVA735fG7gzKwPPucGAc1mU2dnZ6bX7vf7JsPY7Q7Fsvl8Xq1WS/1+3+QEy+VSd3d38n3fOm1R7Nxut40B5LAz3I9gio5QbtBE6n+32+ndu3daLA5zO2q1mp2n3W5n3bmq1aqxUpvNxtYBQMbh7nY71et1DYdDq7XgTDKIsd1uK5FI6Pz83NYMX8F8DWywXC6rVquZDMT1FbTTLRQKajabBiwwVuFw2O55v9+r1WppMpno2bNn+uabbyywgWlGPuDKJubzuc7Ozow7aYQLAAAgAElEQVQ1m0wmKpfLFvSSBaAQmkBhs9lYdyw6uDE01m35u9lsTF6CY6cmRDrMMELyROBNUEFGgLPntr2eTqfmC9DgY/uZTMaCOYqn8WPRaPSIbYStRbaEfw5euP5nV4CRAUYGGBlgZICRf/oY+aHrJ3nhwrGTdoQ9Qu/ITfDWTREiThkHgWFR+MaDkAJl8yWZLCEajdqi7nY7KyxF84wsgMX3vEPLWNpi4lj4X74TjexisVC/37fngp2kiwkAypsu+vlMJmNFfLFYzNpM1uv1I200aV/AByOEmRkMBjYvgftMJpN2H/f393rx4oUk/TfZSKvV0u3trbLZrGlp6azCm7wkc8owdI8ePbLvdmUNpGppxTsej20dJBkzBmDQ3hbpDEwGzA5/YF7R0gJwtPZkTgva43a7/d/kGNFoVPf399ZVCWBHerFYLKy96GKxUCaTMYf76NEjY0TPz8/16NEjvXz50uokYDvQT/M5vjedTlsBajabNTasUqloMBhYmp/Ps85IMgiunj59qu320OVrsVioWq3q8ePHur6+VrvdNh09TNJut7OfpXiTlD9SFhxLNBpVvV5Xp9MxtjeTySifz5v85erqypysO2eDfUdKwXp0Oh1zduv1YWZIvV7Xr3/9axtqSXcz5qTAcPu+b521UqnUUbvhRqOh/X6vWq2mVCplc2hc34LU4MWLF/bszPhxbRJwYq4P7NdsNrMBqLBp7A0F8+7gRgJYmC7X5geDgTl1AtRcLqfJZGISjlKpdAQKABhniDONryMLgZQLf+rWkVC3gh/DHpE4ENhQeA+g+b5vv9OV9eAfYfYBGoJIgsfg+p9dAUYGGBlgZICRAUb+aWPkh64fLSlEM+3KIAAW/qDldcGG/2ZRMVQcNBpfHpif4yFdxpDDSAet0WhkjhJH7RowYMEisVGwEdznbDbTcDg0QxmPx5pMJpZuJiVNur1Wq6lSqSifz9shR7Pu+74VI3OYJens7MwKTknXwrgAbKQ0AUgYDLdw8+LiQvv93lqQ8v+RG7iDIV2WZrfbGWOAkedyOSuMTacPU7xJ22+3W5tfUCqVVC6XjQl0GdlkMmlaf1gr9pa2o+Fw2CbMc9ByuZwqlYp1jxoMBsZU4rQ2m40B9Ww2U7vd1mAwMOdKS1VJKhQKBrCx2GF2xNOnTw3I0Rij6Z1MJspmszaLYzweG9t2e3troMRcE/TotL6dTCbGnC6XS2NgmB2BLVCQC2tMyh9nXSqVzGnm8/9/e18eHHd5n/98917tfUkrWZJl4wMbglufgA0YTGJzmuEcaNJOW4OnMxwZAgFMyzGlJKEBQqekM6Rt6ik0DWCuKYcBA6XEMZdtbGNsA7YkW5Kl1Z7SSrva6/fH/p6P3xU+sZyA/T4zHoy1+u77fY/P877P+zn8sFqtouJ1dXWJkaYiQ1C9I5mXy+UaF45oNIr6+nqMHz9eVGIq2azxMzQ0hEQiISSTzWaRSCTQ19dX4/5C9ZebB7rW0D+dBRq9Xi9cLheCwaCohDTI3OwxVoB9AwBffvklEokESqUSWlpaJDg8m82iq6sLPT09QqTBYBCBQEAMMDehmUxGgsiz2ayk9aVLV0dHB0wmkxhYbkJ4uDAMQ9Il012ImwBuzmhnqHoywJubDqpkzBo2MjIitUCovpIAaItITNxAc9NNhVHNJEZ3MPU76NrFdck04P39/RI/wtuM0Zt12izOVwAyp2grNA4fmiM1R2qO1BypOfLE4MiD3XId9YGLhokvS9VleHi4JgiNpEJjQ2WNnc7TMA053SL4/zSaNBT8w+/hs0dfYTONJztdzayUy+XkZ1QVVT9z+hvz6pdX/Jy0/B0uer5vPB6Xmg88FVOR++STT7Bnzx65nqTCAaCG2JjBhQYyk8mgu7tbVEBOFLvdjo6ODgmS3b59u2SIUd1LSFAcs0KhIOTGwGVgX5VvjhNrKVitVjQ2NsrVOABRz6igUe3jFW0qlZIxZt0MqpP5fF5UJZvNJpmHuDlgPAOvaUk2NNw2m00KYyaTSQwMDIhhpfLKIEwqMZVKRa7o6TceiUSQTqdF1S2VStixYwfC4TAmTZokrh6Dg4OymOg3D0DmLTc8nE/0Wy+Xq8GpKmkx2JvqFDMicQF7vV7x/aaSxHovxWJRFF81oJWbBKp1+XxegpcZZEzle2hoSGIgSFQ0JIz3iEajGB4eRnd3twTq0gWFKhvXC9cFXQkMwxD3jr6+Pnln1vlhnzHeIBQKSYwJx59rsFSqBsVmMhlxbSiXy7J+GJPCWBeuc25C0+m0ZDaiHWHfkLjYX3a7XYiTczIcDstGj+uOqhmVXLqxAJB3JLhpGxkZkc0g5zbXrDpvuLmifWEa6Lq6OlHSVFVQdfthG5iFjRsR3izQztHliWPFdUa7TFtN+8U5zzmucWTQHKk5UnOk5kjNkScGRx7zAxewr3Aj/7DD2Dh2GF+MDebv0tDRf5MvwMGjSwQHhROAKgoNOTubn7fb7QgGg5KRiadbnmjD4bCQA69W6W7AEzLVHw4efd8DgQCCwaD4blPpKZWqaVtdLpcs0kKhIAGVVAVpkOjfSr9sKmWqoaCxILHR4PKKs6enB8FgEIZhSDs4ETiBadj6+/uRSCTkmVSkDMNAMpkU40kljCf8zs5OlMtlNDQ0iCsEFQcugEKhIIHUhmGIqwQXBFNnUjXk75RK1WxWVHTY93v37q1ZbCQNr9eLdDqNeDyOUCiEjo4OlMtVP9pgMCi+wUA1QxTHolQqob+/X/qdfum8gqaSs3PnTng8HjQ2NiIYDNb4jLPoH0mU78G5XigUkEwmUS6XRTGkqwiDhBsbG0WNVI0ZlSYaA24uuLkhGbPuDgmGmymuG5vNVrOh40aZfU0DVyqVRGXL5/M1Wc84ltyscX1yA0jFm0osCYDGlyo8FfNKpSKxIHa7HaFQqMZFge5CpVKpxu2mUChgx44d6OnpkTEFIKofA3E5Z7lhSKVSsp7pB67GchQKBVGfh4eHZa3ScLNeCzcztA/cANKNhO3lnKLyzI20qorxhoI/UzeyDPIlUXCzSLunGvx8Pi8kwvVBG1osFtHf3y8+8LSH3FSqv8sDAO0x+5/ziDaYSqaaiUvj8KA5UnOk5kjNkZojTwyOVA+TozEmaeHVSc9Bph8nO4zqV6lUkkEhibCjSeh8GRICf8bv4xWw+pnBwUHU19fLSZpXph6PB+PGjUM2m5UCiCzERyNIZYJuCWwDryGZNYWKEFC9hme7uGDZNk4CXu9ykRUKBXE/YDpTKjA05jS07EcqMJws7EcaXU7aHTt2wOl0SqFAXkPTj7tSqUjwMokRqKoNVDLpg1qpVOR6mcY2Ho+jUCigpaVF+oXqaKGwr6An/cgbGhrQ19cnEz+dTku9FKqrVLxSqZSoFoZhSJs4zj09PbBaraKyceGyejwDNXnlrBp1ADLO6XRavoNX4rFYTDYDdKfhvNq1axcASB0YGm2SK+c+r87ZBrqfcCFSOePmIJVKoaGhQQJFuah5zU0VKZPJIBAIiNHnlTznkWrASFLlcrkmdbRhGOjt7ZV+4NpjfxcK1RTKhmGI/zjbzXSy7HNmFVMJIBaLydU8DRmLCdbV1YkKyEBtEh6VLAASPO12u2XzQVJgUUYaRsYYMB6gVCrJZo/KMgmgUCjIOs/n8/LODodD3OOYDphzzev1wmaziRqmuhLZbDZZV6oqR/cYFn+k2xjnFAOd+XnaCm66GeBOomGf0E1DVYa51lR7yrlGe0sXF9Vdhu4jnJ/lchnJZBI2m00SNVC1p52l7WWgO+eoxpFBc6TmSM2RmiM1R54YHHnMD1z8Yr4oJxv/rhpZ9TqOUP0s6SupKg58ESokNAZ8LgmrUCggkUhI5hQaKU4ei8WCkZERUWMGBgYkmI8TnORBQ+1yucTg22w2mbg0WjTqbAdVLSpg9FOmapLP5+H3++HxeJBMJlFfX49yuSx+8MViNaiXwclUK3kCVw0Cfd7Zjq6uLkydOlWeS3ImaQcCgZpUmixSWC6X4fV6YbdXC/9Rmezt7UUoFJKARbUSN9UuBmZzLpTLZWQyGVn0VAxI0IlEQtQlqmW8eiaZc9GoAcVMQctFp7phjIyMoKWlBel0GuFwWPx4aQSYoYbX3CQDzlfOrUwmI1myOBZUVkOhEEymfcHgXIAcI7oe0IBRIaYrh5oRiO4b6vqxWq1SiJIKYTabRTKZRDAYRFdXl5DA4OAgANQYDM5JGj3GajDo1jAMMWR0UzGbzVIUk2RL9ZKbQ6vVKkHtTDtMAuW8Z50R9g+LgiaTSVEis9msuD6oAatUeFkDh2ud71QsFmVzwmfV19dLDIq6BhmETDKmQq/GnvDdaLNUY8pNlclUDdj2eDySTYubLH6OQcfsA6vVKrEFXKvcdNE+qYaftwJ1dXUoFAriy+5wOGTcmFGL655zhc8gSXH+cQ0xSxbdRKjIMY6BdpDPoH3gZodt5vxU1yQ3oRqHD82RmiM1R2qO1Bx5YnDkwTBmaeHV6zS+tPp3Tg71BdhgvgAn08DAgHQ8/52dyGfw56VSSbKMJBIJ7N27F5FIRK6nmTWkXC7LKZuneV7FUwVhZ/G7eM1OX26qcrlcTpQqq9Vak3mFyiXdHQDUqCpcSGyXw+FAV1eXbGS44Hl1yWdQSeDJ3+FwwGq1IhqNSv0FFizklSgz2wCQyUI3BLbBbrdL8K/qN852NzY2iopE40a1i+NGhYdtpwpRLpdFDWNWJHUBq9e2bAsXAic/1Zbh4WHx4R4aGhL1goYin8/D5/OJCwhVRX4vlU81RoHvQENAI02yLBQKCIVCKJdriwHSt5nKEecLr7j5TCpKbrcbe/bsEcWXhGSz2WQTwmtvvovH40GxWJTMRXQJ4OfoEkAFjkaSrjRerxcDAwPiFqMaVV7pc25xftOdxuVySUYgdUNC9xjObxIo+9xut8vGJxAISOpl1puhmsz1x/VWKBRkc0Gb4PF4MDw8DJOpGjDd1dUlREmi47NIogyQ5+fMZjNSqRTq6uoQDAZlE8W6InSbUG0XN0e0U+wDjjM3lhxn9i1jC6j6cf2SBGi7OOdoC8xmswTdcoPG/h69UeXmcbSfOJVwrgnGb9CHnzaB4895xI0hx4bKKm8y2D9ck2yHxpFDc6TmSM2RmiM1Rx7/HKl+72iMyYGLJ1V+If9LULnjgPG6nx2oGib61arXf+oLjFbt+H00tplMBtu2bcOsWbNqcvrzD324meaVQZdut1tO23we/04lzefzwWq1ykTv7u6uCWTlYjaZ9mVQUaux09eXz25tbZVry0KhIKlk6Vqh+o3yqpMn7EKhGvAcCARqFojT6UR3d7eQw86dO2XxU3kgoVitVkQiEfGNpXLF4EuqqjQ0NF5UopixhoubfrdUxWi06cISDAbFiJJ8uDjpEkJCUhU8wzDk+pzpO9UNBlVHLkLVz5xt48LnomQNEz5fdWehAkcDTqNLMlcVUT6fbitUf9TFykBg+qBTQaWxoEJG94a+vj44HA40NDRIgctAIFCjeDKFLo06XQHUjQjdBkgQlUrVP5y++sViEQ0NDejq6oLP54PZbJb0rvws+5dkTJKjkWVbmO5YdV9isUP2ibrZpOFi39E1hyor56PH4xHXI8ajcF2qKjc3H1TiqbDt3bsXFotFAtIHBgZE9aKbCjdy6iaFc4uKMlVFKpecr+xvFljkuHPNcMzZpyRp2j66UKgqNtU9ko/677SRJFUaftpZoEpaPp9PXD34DqPVP/Yj55C6SefvqbaXdk7jyKE5UnOk5kjNkZojj3+OPBiO+sClGl51krAjVALhhAIgjefVKE/dxWJROpoGh9eU/AwHn9/NSWW1VrMEFYvVwmq8VvV4PGI8eBIFqqdeEhcJrVwuSyYfprelgXc6nTCZTFK7g2oSDSoXcT6fl4xLqgqpZk5hHZTGxkbs3bsXiURC3EWoZDE4lxOX78B2ms1mDA0NiYLCYMJSqRqQzBoQNFI09iQSTmSqegzsVA0S3Sf4Hm63G8FgUCp+8yqWxoBjzTnBTQGVUMYDqP7zqspntVpFMaMyQqWRi8LpdCKTycBisUitkGKxWnuDKU1JApxzJPRYLFYTm6AqKnRjYZA5r+LV9KI0XKx5wc0Pv4dtzeVyUmQwlUqhpaUFqVQKiUQCkUhENgP0xafxYQarVCoFr9crV+a8nk+n04hEIkgkEkilUtInZrNZrt3t9mr9DwaYU+2hm0koFBIllaosMyYxIxdJRXWLoFGk4aZSSHcCGkQqwtlstka9p8pIBZDKKjdNJA9mVqKhK5VKcLvd6Ovrk3o7nJOGYUiNEI4jyYLznmqraoc49txo0aWKrjzcGJBY2A4aev4b1dZ8Pi9xDACEABkYzmerY8V+JSF4PB4YhiHpm6lKMiaG9oTKJG0gbz0ASBttNltNFjgAYkdUwmK71M0+n6u6UHDMDqbeaewfmiM1R2qO1BypOVJz5JjccKkTRG0klRn+nAaEJ1KLZV9xMWZVUn/Gl+bfeY3PzuQkpcExmUyYMGECAEiNCWY8omGmywQJyOfziSsEoX4/fWqBagG3zs5OfP755/IMTpTRCg4ntdVqlUVPtSifr9Zx6OzsRF9fH/bs2SOTi2lw6YZBv14uSpX0BgcH4XK5EI1GpS8GBwcxY8YM5PN5bNmyRUhZNcScbCRM1dWDV/bsT/qTM00rFZru7m6ZmGz76E0Dx9xqtcrvm0z7aqdQgaBRAPZtMurq6hCPx2XxUHXKZDJwu92IRCJixGn0aDA5Pzge3ATQEFFBAyAplNVid6rySNWF84kES6KjIeRC93g88Pl8SKVSMAwDoVAIQFUtCYfDSCaTEuPA7ySpqwaKBMmUuIZhIJPJYO/evUKY2WwWPp9PsjypPtsMMOf1PQDZkAwNDaG+vl5USvYFMw5R+aN/NucyCYXGj3ESVHVI4sz6w7VAZY/9wLlsNptrqt6TBFiXZmRkBG63W9Iwc66l02k0NDSIoksXA7oHlMtlST1rt1drx3R2doq7DNcENwvDw8NoamqC3+8X/3uuNRI5VWVmcKO6y/ZXKhXxi+ezuVbpFsQ+49znvKFSrvqAcy5QKeYGnT/j+7EPOE+LxSL8fr+4LFGh5A0LlXU+h+tcVSyBfXWgVDI5lLuExoGhOVJzpOZIzZGaI49/jjwYxqTwMX2xafj57zx9UzVio3ji5wLkwqI6wkEHIIXraGB59U1ViJOXuf75d/Wan4GefGZ/f78QBK+C2alMZ0niYuHGxsZGpFIpbN++XfyUed3PK3oayeHhYaRSKfHVVo2VYVSv5nt6emAYBhKJBLq7uwFA+oL9QCXHbrfD4/HU+OLSxYBF5JxOJ/x+f40fNwkmn8+ju7sbvb294jLAsVBVBKqnHKuhoSExUCxkmMvl0NHRgXg8jkwmI4uZV+hcGJy0Pp9PUsWyQB8VWMYKZDIZMWSsG0NFhUHZnPCpVAqdnZ2yIICqgaKrB9+DZEkDxT5jXZRMJgOTyYRgMCgbArqykMipnNHtgXNaVX2YWpguAslkUowgDWM4HIbT6YTL5UJTU5PMdTW4nMaIhEtVk4HDHDO73S4+51Tf6O/NOUQi4Tqx2WwYGBgQRS6bzUrqWbrZMFDX5/MhFAohGo1KXAXXId2L1DnCfgWq6pBaIJPBuSx+yTnB9yZR0RZQUaNbBgPWuY6prFYqFVkbdEFwOBxiWyqVimwuuMmpr68Xu6IqeexHrgUqu5x7fr9fNickqXg8LqmtuTapKPN53BjS95zvyT8kFLpVcV1wI6POBW4GuMkicfD9qaKyP+lWQ3cfrg2uCapwJEWV/NgvqusF1VnaT40jg+ZIzZGaIzVHApojT3SOPOobLp5KqcwR6kmPL001gQaTV3scFBITDbyq3vHakAPOF6cqlE6nJXiXAYlutxu7du0SsmJbmPWEp1X1upEdOzQ0JGpOS0sLBgcHsXPnTjGOnFxUxPhfKh5UcnhlWShUs+5QHcjn86IWmM3Vqun0T6eqNjg4WKO+0F/b4XCIzzdP1W63G729vejv70cul5NifJFIBCMjI3J1zaBU+unyPThGVFI56dhn+Xy+JgVqJBKRMaaRYI0IKl2NjY0YGRmRaufcDAQCAVEuWAeDV+Uca9V9gvVh6P5Bdw2LxQKv1ytX1Iw1oK8/DTSVZC4Mtpvzja4TXHB0hVBT+LIejMlkqlFYaVCpOjM42+/3i6H1eDxi4NkWtofqLH32OS/K5WrcgmqQmXL5888/x5QpU5BOp2G1WiUNbaFQkIKNDPil6weNaqFQDZLeuXMnotGo9ENfXx8Mw5CMUqqxJXFynBlwyk1GIBCQAGyOh8/nk+9ikDD7iWRKA8sNJNPbulwuUVpJMm63WzYjVGX5vFKpWvNEvS3gxg4A4vE4otGouNjQtYbzm6l+acdIwvwObpA4HzgHuru7ZYNitVZTMqu3F9ygqko1XUY49kBtEgCqeqwnMzw8LHEIwL70zXxvlRxUVZBq5uDgYI166nQ6RVnmnKeiSCUcgKh2/B0AshHQODJojtQcqTlSc6TmyBODI1WhYzSO+sBFBY1Xkzz5lUr7AsvUU6J6omZn8DqQn+UplM8bHBysuRrnlSdJgAoH/+7z+VBfX4/29nbJYEPDRJLiCdlkMslJmYuFV+Z2e7X6Nw1iOByWd6GKwEnHgeE1dG9vL4aHq9XT1Wthr9crxjkUComKRLWD1630V+Vi4gSn/y8/w76hSkdjsnv3blGdWOvB6/XC6/WKcWdqXrpwUEmge0ulUpEikqobCJUSTnCqazQSVH1ohBjUCaAme9Hw8DCcTidCoRASiYQoG3wWr8YtlmrhPsYiUIXgVTKJLpPJwOl0fiVAuVgsCqGqriNUKJl6lAqeSqIkEdbtUK//k8lkzeJTA6u5+AAISQwMDMg8NAxD0gJTbaF6RQMybtw4GV+mQ2UgtsPhQGtrq/gcs1gmlSv+O/9rtVoRj8cxPDwshRsHBgZqCiem02nJXEZ3EG4Sc7lczUYunU5LZizOm3g8LobSYrGgoaEBiURCFEYaKCp8dDmgXVD92iORiMwjEgVjIDweT417Euch390wDMTjcTgcDtTX14tiWShUU2KHw2HZyJpMJonBoLJLMuA6J4kx85gaO1MuV2vGMCUzbdvIyIgQJ5VBkgPHl/Od85q2k/aMCiLtHolXfT7nP9etx+Op2bz7fD4hco6TmoWN/c7+44af859jw80757vG4UNzpOZIzZGaIzVHnhgceUwPXPwCnpjVhaouKF5JU42jKkJli1ArPpvNZjEufHm+HJ/J57I2Q6VSQXNzMywWC9rb20W9Khb3VWGnwaOawkVH48fvLhQKCAQC6OjoqHFBiEQiACBXxvy9dDoti4yKYltbG8LhsKQoJVFOmTIFXq8XnZ2dYvRVIuPpW/Xhz+VyUuyPi4z+90zpOTQ0JG4O9KE3DEMmGQN5eZpnH/L9M5mMkAtVRqZtpYsGx4jGPJPJiI8zVQWqHRxLjjF9zWlEaaTYnwDEwJM8M5mM9AevwWmYGG/Q19cnag7Ji20DIG4hJCmqh6ydQrJmYCuzUHHBs34EUF3kPT09UqiyUChIlqFwOAwASKVSUqCQqk8gEIDFYpHikrxap5LHRW2xWESNVZVqZpqaPHlyjWsNXQ+4tlRfZ5Iu1U6/3y+GPJfLiboXiUQQDAaRzWaRy+UQDoel5gh/l6oqXQicTqe4CTidTsm45PP50NvbK9mUWCeF70wFOh6PI5/Py2aJbiicwzSYQ0NDkmGLV/9ci1Qb6ctfV1cn6j3Jga4VVKVTqRR8Pp8QDPuLtoGEQEM9MlItsEgljORM4kyn01IsVVXWqH5yLpLs6SbGtUfFuVQqiZrLeca1qm4MVOUOgGxGuaEhGTHYn+4m3HRwI865zX7hBp82VR0T1Q1O48igOVJzpOZIzZGaI08MjjwYxiwtPK/uaUi48Gj8aSypdLFaNRvMU6zdbhf1j1fMNCAkKC58fjevPm02m6gPnOAsCuh0OsWAcdHSGPLkSwNWLBZr1DSqHbxO50CqfqNUX0qlavXyYDBY497Q1taGzz//HC0tLfD5fNizZ4+oc3V1dejo6JD3Vq+qGfQZDofF8HV1dUnfRyIROeXzahqAXGnTCPT392NkpJp9h76mJDwqmHRRUINfaRB4Ne/z+dDX1yfKBjcLXIAWi0VUnXQ6Lf7FVGy4UNjvgUBA0g97PB65VuaE5jtwwjPTDomUKh2LOfIqm37jbrdbxtThqBZ3ZMYeLjYA4ibC7En8/1wuJ58ZGRmROiYjIyNobW0VozIyUq19wnnDvk6n0+LaQqVI9R8H9qUEJjGwTwOBgBgPqqeBQKDmyp5qC10V6B9PVxTOp1wuJz7v6nU+r+1TqRSmTZsm4x4IBNDe3g6v1ysqHA04XRrY90xHy3VkGAYikQh2794t/Z3NZsUNgil3uVHi+9Gff+/evRKQzN8PhULo6emR92Z2NP4uU1ZzQ8L2MCsSNyZUG+kmRIJPJpOiWHLtqRtZADUuKcyWRCNNn/9yuSzqIn9Gu8NNEO1huVyW2iWZTEaU0kqlIu9J9y9+lptd+tCr5McNE7+bz6ELWD6/rw4JNye8JWGfqSTKeU+SUWMRNI4MmiM1R2qO1BypOfL458iDYUxcCnnKVo0rO4y+nTwF0ojw6pAkw5dXr+P4WZ6Maew5uDyd06gVCgX09PSIWhQKhTA4OAiTyYT6+nq53qbPLweaKhWv8n0+HxyOah0IBv8ODg7KNT9rQHAwbDab1ICg0QqHw3INW6lU5Jkulwvjx4/Hli1bxP2AKiaJKBwOw2qtpsXl5HG73Zg+fTq6urqElCuVCvr7+8Wlg4od3UlIpvF4HCaTCU1NTXA6nYjFYuKSQIPGKuY0SPl8XrK3UKE0m82IxWJCVHxXnvg5EZnlB4D4Zg8MDEilew51IQMAACAASURBVAb58mrY5XIBgFRdV8dYnStUGc1ms/S/w+GQKvVUYvm9JGf+O6/d1c0P1R/6KCcSCTHu+3OhKper/vd+v1/GgQs7n99XT4OGl8Ypk8lg165dcDqdophFo1H5bn6O64kkxr6ORqOyVtT0xHwft9uNdDqNVCqFUCgkxot/qIoxILmjo0NiIUKhkAR2h8NhRKNRSR8MAOPHj0cgEEAmkxF1k2uTG0h+XyqVQn9/PyZPniwqHJV1GjT2D9NCcy4lk0lJwRuLxdDW1iaKPxV7m80mWcHoD64q/dxo0EUlFouhvr5e+oD/TafTQop9fX3i5021jWRUKpWEnLmumMnIbDaLkm632+FyueQ9VdJQN3Ykc66pSqUiyqtK8Pzu0QkLGFdDe2EymcTGcO6zbdwMqX71/Jzq+sJ5rR4IqPqqdlsdB43Dh+ZIzZGaIzVHao48MTjyYDjqA5fZbJaFwD9sIINZ2VG8gqd7AUGiUH3V+QwaDKpzKvFQLWhpaUGpVEJPT48EH9rtdjHKdKNQ3WLUUzlVNxISDRMVmP35b5PISJjFYhFer1cWiNlsRjabFXLgoJtMJnR2dkpWqZaWFvT398NkMuGkk05CqVRCf38/HA6HnNIrlWqWGRohZlwioXR2dqK1tRXjxo2Tq1GmvYzFYjAMA8FgsCaQGICkJk0kEkJEJGcaQ7oVcGypCHFcqHT5/X658lX91tn3mUwGnZ2dogiy36k20Ve5rq6uxl+bC4jfwz5h3Rcu/r6+PtTX16O/v1/UJNZtIcFms1kxZiT2oaEhCRANhULiUsGrbbq/cKPDjFyhUEiqw1M1IanSoAwN7SuqWC6XMW7cOGQyGZmLNM4+n0/eGYBkrCqVSohEIigWixgeHhbV0e/3i7sRx3B4eLhmA8D5SNXH4XCgs7MTuVwOfX19ACD9zk0X20PVs7m5WeIqxo0bB5vNhnQ6Da/Xi3g8LkGshUIBDQ0NssmigfR6vUgkEnC73UIqdNMh+N6VSkU2MdwI8Drf6XTKGqHLgsPhQCQSQS6Xk4KKIyMj8vt0T3C73ejp6UE0GhWXItoVqsTcfFJJJ3lwI0XCo/Hdu3evrAnGL3DcOb8rlYqsUyq4qh89N9j0cecGiao0FUpgX2wE26eqcVwjXNcqSdG9hP1A28TvIxkB+1I7cwOlEkelUhE7xM9rHD40R2qO1BypOVJzpObIoz5wqadbukrwv5w4VJZ4BceX5VWv6gpB8uFzqGTQcNIo07DQyPv9flgsFhkIToJ4PI729nYMDw/D7/ejt7cXuVwOzc3NcLlcojSp7eEAVCoVJBIJ8Qfu7e2VjqZRZSBmZ2cnIpEIpk2bVqM4xuNxScmpKjoejwexWEyqkvf396NYLGL37t1IpVKw26vV5un2wXdhphtOetadGBoaQkNDA3p6esR/nJORrifjx48XQ0kj4PF4xEhls1l0dnYKadPY0ABzYptMJlFJeTVOP10G3+4vNWlHRweam5tlwtN3PBAIiEFgEU7OGyp8dFWgkkrlxWarFq9jBhxmPiJxjFaHVfcGKsW8XmYaYtb06O/vF5/jQCAgaVu5UVDflQalXC5LBicGuLIuBYObqeTQ5UAN+B0eHha1letHdQ+guqWqnwzu5bMzmQwCgQAKhQJisZgYXcOoBrt6vV4A1c1ELBaDy+WSec1+VBXsvr4+eL1edHd3Y2Skml2K86tUKskGgOvRYrGIIma1WiX7VzKZhMvlkqB4NSaDv5fNZhEIBKTifUNDA+rq6pBIJOBwOETppHsPg12paubzeSFLugI5nU709vbKd1F1ZipsbtbsdjsymYzYHf4BIHOdRTK56aCbQ6FQQDAYlBgS+uJzHFkfJ51Oo1KpiOsQY3qofFssFvT29oqSyJsJzjWuCW7I6WpEguLzqP653W4hG5fLJbaZGzHVFYSEwvXF8aN/OuMdNI4MmiM1R2qO1BypOfLE4EgKYfuDcbQuIuPHj6/cddddMlFV/1oaZ6oJbDjJg9/NjuFiZ3aScnlfth9+Xv0dXrOqk4EdpE7kcrksi4+DqVZ0p3JHdwXVeFosFsmixEnC72WAXS6XQyqVkqBIXt3yapKTPx6PS60QBjjy6jSZTEq2FJ7O2Qdut1uusJPJpPQTJw7dMiZMmIDu7m5MmjQJPT09smhI1IFAQBYRlQT+P6EGp/LqFNhnKEnkfDez2QyfzyftUY00x4qKRiKRgMViEb9jGlduHFTfdBIQlVyqULzSpepCVY7jTfcSto9qItvNVMb02+UmguRJA6c+y2q1isqkqpPcfNJVhG4YNARsA9UcEhE3RQ6HQxQivg99o9kGs9ks6U9J8uoVNr+D7QcggdPcJNE1iKqrukFgm+nDzzXkcrlqFCSqxVSyuFmgseacz+fzEi+ivqvZbJbAb8YMkED4+Xw+L2qsGlMAVInBMAypO1IoFMRVgSlZ2Qb2B+2N6pduGEZNOmuOCdNWk0zUGwZuKNg3hmHIeud78T05PrRbKhlxfXHM6fLAdcSbAFVVo+sC40z4M6rC/Dk3TPwdfifnAecF+4Vt4Oc5RpxH3Ehy8872FotFLFu27ONKpTL7iIjiBIbmSM2RmiM1R2qOPDE48t5770V7e/t+UxUe9YFr9uzZlQ8//HDfA0cZEvXfD/Zdh/q5hoaGhsYfHyaTSR+4jgCaIzU0NDRODMyZMwcfffTRfg9cY+IfwlOgSgZUFkb7M5Js+Ble/fE5PAGPBY6UoEarg6PbTKXnQL+rfnascDjPHf2ZI3nvsSLxI30OPz/69/b3nof6+YmAg809/lyF2q9H02fqM7g+xmoM1Gftb+4c6nsO9H5H8t5Hsk4O9vmDrcGx6LMjHcuj7ZuxHGcNzZH8mfrZsYLmSM2RgObIQ7XtcP79YM84FDRHHhpjcuCi4U0kEti8ebNcYU6cOBF+vx+xWAy7du3CuHHj0NraWhNslkgksHXrVgDApEmT0NjYeMCgs8MxWIcyTgd7BoMQAUjmk/09Y3+/zyvMI8GxNIxH8uyxJO+v8/lD/d6JSiAqvk4fjUW/qSRyoM0Wr+dVd4H9YfRzVL/s0Z9TN5UHWn/7W+dqG/fXlv0ZSW6GD9X+A70TsK8P+Bx+P11p1D8HAtsx+p3Unx3Oc9gudaPOf+P7H8i2aRwbaI7UHPl1nqM58vChObL25+p/1c9ojtzXrj80R47JgYuN3Lp1K/7sz/4MlUoFfr8fp59+Oq6//nps374dP/nJT7Bs2TIsX75cMhylUincf//9ePnll2EymTB9+nQ88sgjmDx58lcmzOjTMztFnew06OoAArUDQfAzKmKxGN5++23YbDacd955Ehg6+l3VdwZqiUQdMLVt/P7RP9OGUuObDM7fYrEoAbRut7umdksikUChUC2ASl/x/a2bSqWa1YwB3nwOA7jV9WC328VPfX9gMC1jSphyl0Hl5XJZ4jH4TKbDZYrpSqUaM5BIJFCpVNM9MwPV4fYNUC1GGovFUCqV4PP5JCMdM2dxc80A7/0prczgpP5cjblg3Z+6urr9Jq4YfStSqVQkBsZut8MwDIlvsdvtUltF26A/DDRHao7UOD6hOfLgfQNojiTGNOUUs8/MnDkT8+bNw7PPPotyuYxp06Yhk8nUZKoxmUxYtWoVfvOb3+CKK67ASSedhBUrVmDatGl46KGHJMNLPB6H1WpFfX29BFCmUikMDg7C5/NJkcFSqYSGhgaYzWbs2bMHANDc3Ix8Po+9e/eiUqmgoaFB0qH29fXB5XJJMJ/VasWGDRvw85//HG63G6FQCDNnzpS0pqPfM5lMIpVKoa6uDg0NDTCZqnUTEokEcrmcpJjt7++XQEgWiQyHw1IUThOKxjcVNGqFQgGbNm3Cyy+/jD179uDkk0/G1VdfjYaGBqxduxavvPIKstksvvOd7+Dqq69GMBismdd8Tj6fx4YNG/Dqq6+ir68Pp512GpYuXYpkMok33nhD7EOhUMDMmTNx4YUXfmUjBlSN9/bt2/H2228jm83i1FNPxfz582EYBtasWYPf/e53yGazOP/883HBBRdIqtdYLIbVq1dj6dKlkpXqrbfewquvvgqTyYQ5c+bgsssukwxgB1uXbMvIyAjeeecdvPDCCyiVSpgxYwauu+46OJ1OfPrpp3j33XeRz+cxc+ZMnH766fLs0e+VSCSwevVqCQ4uFos49dRTcdppp+HDDz/E//3f/0kb582bJ6R9oBuDnp4erFy5EnPmzMF5552HWCyG5557Dhs3bkQgEMBFF12EefPmCTFpG/SHgeZIzZEaxw80R2qOPBKM6YGLWW0mT56MH/7wh0gmk2hvb0ckEpETJD+Xy+XwxhtvwGw244477kB9fT2effZZ7NixQ9KPrly5Elu2bIHH48GSJUuwePFibNu2DS+88AI6Ojpwyimn4KqrrsJbb72FzZs349Zbb4XdbseDDz6Ik046CTfccAOee+45vPfeeyiVSpg/fz6uuOIK9PX14ZFHHkEkEsHg4CDq6+tRX1+P119/HR0dHbDb7fj5z3+O22+/HWeeeaZkJgGqg/fFF1/gP//zP/H555+jsbERl112GebMmYONGzfi2WefRTwex7x58zBz5ky89tpr6OjoAAAEg0H09PTg/PPPx/e//33JBKSh8U1FpVJNVfz444/jiy++QEtLC/71X/8VHo8HixYtwj333AObzYZp06bhb//2bzE0NIRbb731K1f15XIZX375JR599FF0d3ejqakJv/71r+FyuTBx4kT09PQgm82iq6sLr732Gm655RYhE5VIyuUy9u7diwcffBCbN29GfX09XnrpJdx6660wm814+OGH0djYiMHBQfz0pz9FW1sbJk+ejHfeeQevvfYaXn75ZZx33nkIh8PYuHEjVqxYgWg0inA4jF/84hfw+/24+OKLARyaTAzDwObNm/HDH/4QTU1NmDBhAn7605/C6/Xi9NNPx9///d+jo6MDHo8Hq1evxl133YXzzjsPQ0ND2L59O0KhEJqbm2G1WtHR0YEVK1Zg1qxZQnQ2mw1OpxMrVqxALpeDy+XCG2+8gfvuuw9nnHEGkskkdu7ciWg0isbGRsnmVi6X8cQTT+Af/uEf8KMf/QiLFi3CSy+9hLvvvhuXX345Nm7ciA8//BCPPvooTj31VL2h/QNCc6TmSI3jC5ojD9wvmiNrMaYHLvVq1ev1YvLkydi8eTPi8fhX/Fuz2Sz27NkjKRUB4Oabb4bP50OhUMAvf/lLPPHEE1i4cCHWr1+Pjo4OeL1ePPPMM3j//ffR0tKCJ598EpVKtXja888/j7POOgsOhwNPPfUUfvGLX+DNN9/EnXfeCZ/PBwBYs2YNxo0bh3A4jKeffhpWqxXTp0/HwoULYTKZxCedfx/t7gBUFYiHHnoIr7zyCs455xz87ne/w549e/CjH/0Ijz32GD799FO0tbXh17/+NTZv3oyPP/4Y6XRaaiEUi0Vs3rwZF154IRoaGsay+zU0xgyq8d60aRM+++wzLFu2DJdddhn+5m/+Bu+++y4mTJiAnTt34oEHHsB1112Ht99+G08++SRuvfXWr1zfUwHctm0bbrvtNixYsAB33HEH1q1bh0WLFuGBBx6AxWLBv/3bv2HdunU4//zzv+KHzTb19fXh448/xg9+8APMnTsX9957L959911xnbjrrrtQLpfx53/+53jvvfcwfvx47N69Gxs2bEAulxN17Pnnn8fg4CAee+wxlEol3HDDDVizZg2WLFlySFcCFkZ85pln0Nvbi9/+9rdoaWnBli1b8Pzzz6OpqQk7duzA8uXL0dLSggceeADr16/H2WefjVgshn//93/HmWeeicsvv7wmze3SpUsRCAQQDocxbdo0rFu3Dk6nE3feeSeKxSLuvPNOvP/++zjzzDPR3t6OlStX4rvf/S4WL14s/bVu3TqsXLmypsDjhg0bUKlU8NBDD2Ht2rVYvnw5Pv74YyETjT8MNEdqjtQ4PqA5UnPkkeKYVLGkvyXz1PNKTlXB1L9z4P73f/8X4XAY55xzDv7rv/4Lp512Gh5++GH8/ve/x9q1a7F582Zs3LgR3/ve93Dttdfivvvuw2uvvYbbb78dra2teOmllwAA4XAYF198MR544AEkEgmcfPLJKJfLaG9vl993uVw4++yzccstt6C5uRl+vx8nn3wyvvjiC3g8Htxzzz2YNGmStI3+79lsFqtXr8b8+fPx8MMP47XXXsOGDRvwwQcf4L333sOVV16Jv/7rv8ZPfvITbNmyBUNDQ1iyZAm2bduGXC6HdDqN9vZ2DAwMIBqNHovu19AYM/Aq32QyobGxEX6/H9OnT8ebb76JQqGA1tZWrF27FkC1+OTSpUulAr1qiIeHhxGLxeBwONDW1ob6+nq0tbVh69at6O/vR1NTE/L5PFavXo1x48bhT/7kT6Sex2hEo1H8+Mc/xqxZs9DV1QWTyYS6ujp0dnZi3LhxCAQC8Pv9aGpqwkcffYS//Mu/xLJly9DR0YHt27dL0cOdO3ciEAhg4sSJiMVimDZtGjo7O9Hf349oNHpYqtaOHTsQCoXQ1tYGi8WCBQsWYM2aNbDb7bjjjjswa9YsfPrppzCbzfD7/TCZTAgEArjgggvQ2toqtnHnzp0YHBzE22+/LYUX7777bpx22mm4++67EQqFsGrVKni9XrS0tEg/LFmyBJMmTRJ7mkwm8Y//+I+or6+vcdWaPn067HY7nnnmGXz++edoa2vDtGnTAGh3wj8GNEdqjtQ4PqA5UnPk4WLMD1x0hVi/fj3WrVuHYDCIaDQqxQe7u7thtVrhcDgwefJkbN26FTt27IDf78d//Md/4IILLpCCaiwuZrPZEI/H0dTUJAGCVMJyuRyi0SjmzJmD5557DoODg7j88ssRDAZlQi9atAhWqxUrV66U4D+Xy4UzzzwT8+bNk0nDauKsBt7V1YVoNIrt27fD5/Nh8uTJAKqdzwJ0iUQCu3fvht1uF5WiUCjUBCXTV51BeVQINTS+6RitoFksFkSjUSQSCQDVQqEbN27EwMAAMpkMzjjjDKxatQqbNm2Cw+GQjZjT6cTevXthsVikyGcwGEQymUQ2m4XJZMLu3bvxySef4OKLL4bVasU//dM/yXoh6urqcM011+AHP/gBtm7diieffBLhcBjnnnsunnrqKVl3Pp8PwWAQu3fvluKwJJH9vZfdbkc4HMbOnTsPa6OnboqBfTE3zc3NSCaTcLlcuPbaa7F+/Xr89re/RVtbG84880xYLBb4/X5ccsklAPYpklarFWeffTYWL16MXC6HG2+8EbNnz8aNN96IUCiE1atX44UXXkB9fb2QQGNjI5qamuQ5pVIJv/nNb/DFF1/ggQcewPLly4WsWltbUSqV8Oqrr6K3txcNDQ1obW3Vh60/AjRHao7UOH6gOfLA/QJojlQxpgcuVo3+4IMP0NXVhVQqhWXLlonq9eKLL2LTpk0AgLlz5+Kyyy7De++9h3vuuQdOpxNutxt/8Rd/Abvdjuuuuw6/+tWvcMsttyCVSsHn82HWrFlob2/HK6+8gg0bNqCrqwuXX345WlpacP755+Ppp5/G0NAQLrzwQphMJpx33nlYtWoV1q5dC6fTiWw2i9mzZ0smGDVzkmEYaGxsRFtbG95//3383d/9HebOnYtLL70Ud999NyZOnIjHH38cLpcLF1xwAVatWoWbb74ZX3zxBcaPH49Fixahvb0da9aswY4dO5BIJDB//nx89NFHGBkZQS6XQz6fl/9qNx6NbwvUuVosFtHV1YXGxkZs2rQJu3btwr333ou2tjbs2rULK1euxF/91V8hGo3CZrOJ8bZarUJAhmEgm80iHo8jFApJkOy6deswODgom7/6+noMDw/XGDyHwwGz2YzPPvsMP/vZzxCPx3Hrrbdi6tSpNVnVkskk4vG4qGr7eyd1w5fL5RCLxRAKheD1eg/byPIZ9A3v6OhAMBhEIBDAhg0b8NBDD6FcLuPmm2/GlClTAOzLBGWxWGC321Eul9HW1obbbrsNc+fORU9PD+x2O3p6ehCLxTAwMIBTTz0VV199NZ5//nmsX78e06dPR6FQkBsSfn7lypVwOBzYuXMnhoeHsXHjRqxevRr//d//jSlTpuC2227D1q1b8fjjj+ODDz7ApZdeikqlcsBMVxpjC82RmiM1jj9ojjwwNEfuw5geuAKBAJYsWQKHw4GmpibMmDEDF1xwATZu3Ihzzz0Xg4OD4kpRKpVw+umn48EHH8SLL76IZDKJ+++/H9/97ndhNptx0003wefzYdOmTZg+fTouueQSzJ07F8FgEOFwGJ2dnbjkkktwySWXIBQK4YwzzsCVV16JkZERLFiwAIZhYPHixXj44YexZs0aFAoF3HHHHfje976HZDKJ73//+zjllFNqTvGRSATLly9HMBhEX18fpk2bBo/Hg+nTp6OlpQWVSgV2ux0//vGP0dzcjM2bN2Px4sW46qqrMGvWLIRCIbz00kvo6OjAlVdeiblz56KtrQ0nnXQSotGopK6MxWKygDQ0vskwDANNTU2wWCzYu3cvBgcHsW3bNjQ3N2NwcBDJZBITJkzAnDlzYLfb8cknn+Dcc8/FWWedVfOccrmMN998E2+//TZ27tyJ1tZWdHZ2oqWlBcFgEMPDw1izZg0ikQimT58Oh8OBq6++umazR2QyGTz++OMYGBjAihUrMG/ePJTLZbS0tOCDDz5AIpFAPB5HT08PLr30UlitVhhGta6G2WwW4zlr1iysX78eX375JUqlEj777DPMnTsXoVDosPtn9uzZWLNmDdrb29Ha2orf//73aGtrQ7FYxOOPPw6z2Yzbb78dM2fOlDicrq4uPPbYY5g7dy6WLl0Km82Gf/7nf0apVMK8efMki1tdXR1+9atfYfv27Xj00UexcOFCrFq1Ch0dHTAMA1u3bsVTTz2FhQsX4vzzz5dsd7lcDh9++CHK5TL27NmDXbt24bPPPoPf78ef/umfolAoIJlMoru7ewxmiMaRQHOk5kiN4wuaIw8OzZH7MCYHLhrkKVOm4NFHH4XZbBY1rq6uDmeccQamTp1aU/isrq4O4XAYS5cuxbx581AoFNDQ0ACPx4NKpYJx48bhxhtvRCqVgtPpRCgUgtlsxowZM9DW1oahoSH4/X5JHRsIBHDHHXcAAEKhEEwmE7xeL6666iosXLgQ5XIZwWBQ2nTLLbfA5XLVTFKTyYR58+Zh0qRJyOfzCIVC4mdqs9nkPSdOnIibbroJqVQKDocDkUgEZrMZM2fOxEknnYRsNgu/3w+n04nx48fDZrOJn2i5XEaxWJQJq116NL6JUNMxn3baaTjllFPwxBNP4JVXXsEnn3yCFStWYOrUqXjmmWdw22234dRTT8Unn3yCa665Bna7HVar9SsFBWfMmIHp06fjl7/8JZ577jl0dXXhoosuQigUwvbt27F+/XosWLAAkUgEAOByuWqUQ6rub731Fl588UWcfPLJePPNN/H6669j/vz5OOecc7B27Vrce++9yGQycDqdWLBggaj0w8PDUqvDbDZj6dKlePrpp7Fs2TI0NDQgl8th8eLFEpNyMJcmrtsrr7wSTz31FK6//npMmTIFu3fvxk033YR33nkHr7/+OmbMmIH/+Z//wcsvv4yFCxfirLPOgs1mw4QJExCJRITgvvOd7+Cee+6B1+vFtm3b4PP5sGjRImzZsgWPPPII/H4/gGoihebmZumftrY2BINBmEwmRKNR3H///SgUCshms3jppZcwf/58XHbZZejq6sK//Mu/4MYbb0RnZydcLhemTp06tpNG44DQHKk5UuP4guZIzZFHCvN99913VA944okn7lu+fDkMwxDfS6/XC5fLJQbYbrfD5/PB7/cjEAggEAjA6/VKsLDX64Xf75esRzxp2+12eRYD3sxmMxwOB7xeb00aXcMw4Ha74fF4aq7+LBYL3G53zedJdpzwqoJnNpvhcrng8/mk+JvT6YTT6aypPM22sbgdUCUjp9Mp32UymeBwOGC32+FwOOBwOOB0OmsKs2ky0fgmg+tq/PjxMAwDhUIBF198MZYuXYpJkyZh6tSp4gZ00UUX4aabbhJ3g9F/PB4PJkyYIC4GS5cuxYUXXohAIIBEIoGBgQFceeWVmDhxoqx39Q99sHft2gWz2YxoNCpxIo2NjViwYAHGjx+PgYEB+Hw+3HDDDZg3b56s82QyCb/fjyVLlsgaP/nkk5FOp+HxeHDttddi8eLFNRvHg6FSqRavnTFjBvr6+mA2m3HNNdfg4osvxp49e2C1WhGJRKSNbW1tmDhxIjweD6ZNm4a2tjbY7XYA1YDdpqYmdHd3o6GhAbfffjvOOOMMjB8/HlarFVu2bMHAwACuuOIKXH755airq4PX68W0adPQ3Nwstsrj8SAYDMJms+GDDz7AOeecg3PPPRennHIK6uvrkUgk0NjYiOuvvx7nnHPOYb+rivvvv7/nvvvue+KoJtYJBM2RmiM1jl9ojjwwTkSOfOKJJ3DDDTfcv9+5crR+0rNnz6589NFH0rlf+QLDOGJf7AP9zsH+ffT3H+4zRnfiodp6pG071LM0NL7JYEAvq9aPjIzIpsgwDHEBKpVKsNvtqKurq9l0qc8BqgUbh4eHUSwWZYNlMplEcaqrq5Nq8+oz+PtU4XK5XM1647OKxSKGhoZQqVTgdrvFVQIAhoaGkMvlJBMS03MPDg6iUqmgrq6uZkN7OH0DVP3N0+m0BD7bbDYhELWN3EgCqPGNp6/8yMiIBEB7PB7px2w2K6qj2+2WWweOzf7aWyqVEIvFUFdXB4/Hg3K5jHw+j+HhYclY9XUOW/9/TD6uVCqzj+iXTmBojjz4vx/qWRoa32Rojjx43wAnFkfOnj0bH3300X5/YUwPXBoaGscf1PpANF40dGpgLQBRww/1HOCrWZBUA3u4z+B/+b0Her5hy11OXQAAAj1JREFUVONiRrtBjA4MPlLjyu9i4DPbMrqd/Hc1CcFosH3q+6j9Qhyoffsj79HP589G31ocCfSB68igOVJD4/iG5siD9w3bThzPHHmwA9cxqcOloaFx/EA1yPv798PN3nOw5xzKH3z05/dnBA/VztFkQWJU2/91FfXRbT+YoT7Qv+/vGfvrl8Np4/5uE77OczQ0NDQ0Dg7NkYeG5kh94NLQ0DgMHKlhPFbPOdTnDvbzA5HM0WAsDfLBCHKsnqehoaGhMfbQHPn12nO0z/o2caSuLKihoaGhoaGhoaGhoXGMcNQxXIZhxAB0jE1zNDQ0NDS+4RhfqVQif+xGfFugOVJDQ0PjhMEB+fGoD1waGhoaGhoaGhoaGhoa+4d2KdTQ0NDQ0NDQ0NDQ0DhG0AcuDQ0NDQ0NDQ0NDQ2NYwR94NLQ0NDQ0NDQ0NDQ0DhG0AcuDQ0NDQ0NDQ0NDQ2NYwR94NLQ0NDQ0NDQ0NDQ0DhG0AcuDQ0NDQ0NDQ0NDQ2NY4SjPnAZhnH6WDTkjwXDMOYZhtHwx27H14FhGKd/i9t+tmEYgT92O74uDMM441vc9+d8y/v+rG9r3wPf/v7XODJ8mzlS8+MfD99mjvw28yPw7bfR32aO/Lb3/cGg63BpaGhoaGhoaGhoaGgcI2iXQg0NDQ0NDQ0NDQ0NjWMEfeDS0NDQ0NDQ0NDQ0NA4RtAHLg0NDQ0NDQ0NDQ0NjWMEfeDS0NDQ0NDQ0NDQ0NA4RtAHLg0NDQ0NDQ0NDQ0NjWOE/weewGggFgupFgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "lMombPr0GF9a", + "colab_type": "text" + }, + "source": [ + "The images used in this demo are from the [Snapshot Serengeti dataset](http://lila.science/datasets/snapshot-serengeti), and released under the [Community Data License Agreement (permissive variant)](https://cdla.io/permissive-1-0/)." + ] + } + ] +} \ No newline at end of file diff --git a/research/object_detection/object_detection_tutorial.ipynb b/research/object_detection/colab_tutorials/object_detection_tutorial.ipynb similarity index 98% rename from research/object_detection/object_detection_tutorial.ipynb rename to research/object_detection/colab_tutorials/object_detection_tutorial.ipynb index c83b67ede..9063f2cd3 100644 --- a/research/object_detection/object_detection_tutorial.ipynb +++ b/research/object_detection/colab_tutorials/object_detection_tutorial.ipynb @@ -10,11 +10,11 @@ "# Object Detection API Demo\n", "\n", "\u003ctable align=\"left\"\u003e\u003ctd\u003e\n", - " \u003ca target=\"_blank\" href=\"https://colab.sandbox.google.com/github/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb\"\u003e\n", + " \u003ca target=\"_blank\" href=\"https://colab.sandbox.google.com/github/tensorflow/models/blob/master/research/object_detection/colab_tutorials/colab_tutorials/object_detection_tutorial.ipynb\"\u003e\n", " \u003cimg src=\"https://www.tensorflow.org/images/colab_logo_32px.png\" /\u003eRun in Google Colab\n", " \u003c/a\u003e\n", "\u003c/td\u003e\u003ctd\u003e\n", - " \u003ca target=\"_blank\" href=\"https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb\"\u003e\n", + " \u003ca target=\"_blank\" href=\"https://github.com/tensorflow/models/blob/master/research/object_detection/colab_tutorials/colab_tutorials/object_detection_tutorial.ipynb\"\u003e\n", " \u003cimg width=32px src=\"https://www.tensorflow.org/images/GitHub-Mark-32px.png\" /\u003eView source on GitHub\u003c/a\u003e\n", "\u003c/td\u003e\u003c/table\u003e" ] diff --git a/research/object_detection/core/batch_multiclass_nms_test.py b/research/object_detection/core/batch_multiclass_nms_test.py index d99116a4b..06f17103b 100644 --- a/research/object_detection/core/batch_multiclass_nms_test.py +++ b/research/object_detection/core/batch_multiclass_nms_test.py @@ -27,21 +27,20 @@ from object_detection.utils import test_case class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase, parameterized.TestCase): - @parameterized.named_parameters(('', False), ('_use_static_shapes', True)) - def test_batch_multiclass_nms_with_batch_size_1(self, use_static_shapes): - boxes = tf.constant([[[[0, 0, 1, 1], [0, 0, 4, 5]], - [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]], - [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]], - [[0, 10, 1, 11], [0, 10, 1, 11]], - [[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]], - [[0, 100, 1, 101], [0, 100, 1, 101]], - [[0, 1000, 1, 1002], [0, 999, 2, 1004]], - [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]], - tf.float32) - scores = tf.constant([[[.9, 0.01], [.75, 0.05], - [.6, 0.01], [.95, 0], - [.5, 0.01], [.3, 0.01], - [.01, .85], [.01, .5]]]) + def test_batch_multiclass_nms_with_batch_size_1(self): + boxes = np.array([[[[0, 0, 1, 1], [0, 0, 4, 5]], + [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]], + [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]], + [[0, 10, 1, 11], [0, 10, 1, 11]], + [[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]], + [[0, 100, 1, 101], [0, 100, 1, 101]], + [[0, 1000, 1, 1002], [0, 999, 2, 1004]], + [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]], + np.float32) + scores = np.array([[[.9, 0.01], [.75, 0.05], + [.6, 0.01], [.95, 0], + [.5, 0.01], [.3, 0.01], + [.01, .85], [.01, .5]]], np.float32) score_thresh = 0.1 iou_thresh = .5 max_output_size = 4 @@ -52,56 +51,51 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase, [0, 100, 1, 101]]] exp_nms_scores = [[.95, .9, .85, .3]] exp_nms_classes = [[0, 0, 1, 0]] - - (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, - nmsed_additional_fields, - num_detections) = post_processing.batch_multiclass_non_max_suppression( - boxes, - scores, - score_thresh, - iou_thresh, - max_size_per_class=max_output_size, - max_total_size=max_output_size, - use_static_shapes=use_static_shapes) - - self.assertIsNone(nmsed_masks) - self.assertIsNone(nmsed_additional_fields) - - with self.test_session() as sess: - (nmsed_boxes, nmsed_scores, nmsed_classes, - num_detections) = sess.run([nmsed_boxes, nmsed_scores, nmsed_classes, - num_detections]) - self.assertAllClose(nmsed_boxes, exp_nms_corners) - self.assertAllClose(nmsed_scores, exp_nms_scores) - self.assertAllClose(nmsed_classes, exp_nms_classes) - self.assertEqual(num_detections, [4]) + def graph_fn(boxes, scores): + (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, + nmsed_additional_fields, num_detections + ) = post_processing.batch_multiclass_non_max_suppression( + boxes, scores, score_thresh, iou_thresh, + max_size_per_class=max_output_size, + max_total_size=max_output_size) + self.assertIsNone(nmsed_masks) + self.assertIsNone(nmsed_additional_fields) + return (nmsed_boxes, nmsed_scores, nmsed_classes, num_detections) + + (nmsed_boxes, nmsed_scores, nmsed_classes, + num_detections) = self.execute_cpu(graph_fn, [boxes, scores]) + self.assertAllClose(nmsed_boxes, exp_nms_corners) + self.assertAllClose(nmsed_scores, exp_nms_scores) + self.assertAllClose(nmsed_classes, exp_nms_classes) + self.assertEqual(num_detections, [4]) def test_batch_iou_with_negative_data(self): - boxes = tf.constant([[[0, -0.01, 0.1, 1.1], [0, 0.2, 0.2, 5.0], - [0, -0.01, 0.1, 1.], [-1, -1, -1, -1]]], tf.float32) - iou = post_processing.batch_iou(boxes, boxes) + def graph_fn(): + boxes = tf.constant([[[0, -0.01, 0.1, 1.1], [0, 0.2, 0.2, 5.0], + [0, -0.01, 0.1, 1.], [-1, -1, -1, -1]]], tf.float32) + iou = post_processing.batch_iou(boxes, boxes) + return iou + iou = self.execute_cpu(graph_fn, []) expected_iou = [[[0.99999994, 0.0917431, 0.9099099, -1.], [0.0917431, 1., 0.08154944, -1.], [0.9099099, 0.08154944, 1., -1.], [-1., -1., -1., -1.]]] - with self.test_session() as sess: - iou = sess.run(iou) - self.assertAllClose(iou, expected_iou) + self.assertAllClose(iou, expected_iou) @parameterized.parameters(False, True) def test_batch_multiclass_nms_with_batch_size_2(self, use_dynamic_map_fn): - boxes = tf.constant([[[[0, 0, 1, 1], [0, 0, 4, 5]], - [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]], - [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]], - [[0, 10, 1, 11], [0, 10, 1, 11]]], - [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]], - [[0, 100, 1, 101], [0, 100, 1, 101]], - [[0, 1000, 1, 1002], [0, 999, 2, 1004]], - [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]], - tf.float32) - scores = tf.constant([[[.9, 0.01], [.75, 0.05], - [.6, 0.01], [.95, 0]], - [[.5, 0.01], [.3, 0.01], - [.01, .85], [.01, .5]]]) + boxes = np.array([[[[0, 0, 1, 1], [0, 0, 4, 5]], + [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]], + [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]], + [[0, 10, 1, 11], [0, 10, 1, 11]]], + [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]], + [[0, 100, 1, 101], [0, 100, 1, 101]], + [[0, 1000, 1, 1002], [0, 999, 2, 1004]], + [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]], + np.float32) + scores = np.array([[[.9, 0.01], [.75, 0.05], + [.6, 0.01], [.95, 0]], + [[.5, 0.01], [.3, 0.01], + [.01, .85], [.01, .5]]], np.float32) score_thresh = 0.1 iou_thresh = .5 max_output_size = 4 @@ -118,49 +112,48 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase, [.85, .5, .3, 0]]) exp_nms_classes = np.array([[0, 0, 0, 0], [1, 0, 0, 0]]) - - (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, - nmsed_additional_fields, num_detections - ) = post_processing.batch_multiclass_non_max_suppression( - boxes, scores, score_thresh, iou_thresh, - max_size_per_class=max_output_size, max_total_size=max_output_size, - use_dynamic_map_fn=use_dynamic_map_fn) - - self.assertIsNone(nmsed_masks) - self.assertIsNone(nmsed_additional_fields) - # Check static shapes - self.assertAllEqual(nmsed_boxes.shape.as_list(), - exp_nms_corners.shape) - self.assertAllEqual(nmsed_scores.shape.as_list(), - exp_nms_scores.shape) - self.assertAllEqual(nmsed_classes.shape.as_list(), - exp_nms_classes.shape) - self.assertEqual(num_detections.shape.as_list(), [2]) - - with self.test_session() as sess: - (nmsed_boxes, nmsed_scores, nmsed_classes, - num_detections) = sess.run([nmsed_boxes, nmsed_scores, nmsed_classes, - num_detections]) - self.assertAllClose(nmsed_boxes, exp_nms_corners) - self.assertAllClose(nmsed_scores, exp_nms_scores) - self.assertAllClose(nmsed_classes, exp_nms_classes) - self.assertAllClose(num_detections, [2, 3]) + def graph_fn(boxes, scores): + (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, + nmsed_additional_fields, num_detections + ) = post_processing.batch_multiclass_non_max_suppression( + boxes, scores, score_thresh, iou_thresh, + max_size_per_class=max_output_size, + max_total_size=max_output_size, + use_dynamic_map_fn=use_dynamic_map_fn) + self.assertIsNone(nmsed_masks) + self.assertIsNone(nmsed_additional_fields) + # Check static shapes + self.assertAllEqual(nmsed_boxes.shape.as_list(), + exp_nms_corners.shape) + self.assertAllEqual(nmsed_scores.shape.as_list(), + exp_nms_scores.shape) + self.assertAllEqual(nmsed_classes.shape.as_list(), + exp_nms_classes.shape) + self.assertEqual(num_detections.shape.as_list(), [2]) + return (nmsed_boxes, nmsed_scores, nmsed_classes, num_detections) + + (nmsed_boxes, nmsed_scores, nmsed_classes, + num_detections) = self.execute_cpu(graph_fn, [boxes, scores]) + self.assertAllClose(nmsed_boxes, exp_nms_corners) + self.assertAllClose(nmsed_scores, exp_nms_scores) + self.assertAllClose(nmsed_classes, exp_nms_classes) + self.assertAllClose(num_detections, [2, 3]) def test_batch_multiclass_nms_with_per_batch_clip_window(self): - boxes = tf.constant([[[[0, 0, 1, 1], [0, 0, 4, 5]], - [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]], - [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]], - [[0, 10, 1, 11], [0, 10, 1, 11]]], - [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]], - [[0, 100, 1, 101], [0, 100, 1, 101]], - [[0, 1000, 1, 1002], [0, 999, 2, 1004]], - [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]], - tf.float32) - scores = tf.constant([[[.9, 0.01], [.75, 0.05], - [.6, 0.01], [.95, 0]], - [[.5, 0.01], [.3, 0.01], - [.01, .85], [.01, .5]]]) - clip_window = tf.constant([0., 0., 200., 200.]) + boxes = np.array([[[[0, 0, 1, 1], [0, 0, 4, 5]], + [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]], + [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]], + [[0, 10, 1, 11], [0, 10, 1, 11]]], + [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]], + [[0, 100, 1, 101], [0, 100, 1, 101]], + [[0, 1000, 1, 1002], [0, 999, 2, 1004]], + [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]], + np.float32) + scores = np.array([[[.9, 0.01], [.75, 0.05], + [.6, 0.01], [.95, 0]], + [[.5, 0.01], [.3, 0.01], + [.01, .85], [.01, .5]]], np.float32) + clip_window = np.array([0., 0., 200., 200.], np.float32) score_thresh = 0.1 iou_thresh = .5 max_output_size = 4 @@ -177,50 +170,48 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase, [.5, .3, 0, 0]]) exp_nms_classes = np.array([[0, 0, 0, 0], [0, 0, 0, 0]]) - - (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, - nmsed_additional_fields, num_detections - ) = post_processing.batch_multiclass_non_max_suppression( - boxes, scores, score_thresh, iou_thresh, - max_size_per_class=max_output_size, max_total_size=max_output_size, - clip_window=clip_window) - - self.assertIsNone(nmsed_masks) - self.assertIsNone(nmsed_additional_fields) - # Check static shapes - self.assertAllEqual(nmsed_boxes.shape.as_list(), - exp_nms_corners.shape) - self.assertAllEqual(nmsed_scores.shape.as_list(), - exp_nms_scores.shape) - self.assertAllEqual(nmsed_classes.shape.as_list(), - exp_nms_classes.shape) - self.assertEqual(num_detections.shape.as_list(), [2]) - - with self.test_session() as sess: - (nmsed_boxes, nmsed_scores, nmsed_classes, - num_detections) = sess.run([nmsed_boxes, nmsed_scores, nmsed_classes, - num_detections]) - self.assertAllClose(nmsed_boxes, exp_nms_corners) - self.assertAllClose(nmsed_scores, exp_nms_scores) - self.assertAllClose(nmsed_classes, exp_nms_classes) - self.assertAllClose(num_detections, [2, 2]) + def graph_fn(boxes, scores, clip_window): + (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, + nmsed_additional_fields, num_detections + ) = post_processing.batch_multiclass_non_max_suppression( + boxes, scores, score_thresh, iou_thresh, + max_size_per_class=max_output_size, max_total_size=max_output_size, + clip_window=clip_window) + self.assertIsNone(nmsed_masks) + self.assertIsNone(nmsed_additional_fields) + # Check static shapes + self.assertAllEqual(nmsed_boxes.shape.as_list(), + exp_nms_corners.shape) + self.assertAllEqual(nmsed_scores.shape.as_list(), + exp_nms_scores.shape) + self.assertAllEqual(nmsed_classes.shape.as_list(), + exp_nms_classes.shape) + self.assertEqual(num_detections.shape.as_list(), [2]) + return nmsed_boxes, nmsed_scores, nmsed_classes, num_detections + + (nmsed_boxes, nmsed_scores, nmsed_classes, + num_detections) = self.execute_cpu(graph_fn, [boxes, scores, clip_window]) + self.assertAllClose(nmsed_boxes, exp_nms_corners) + self.assertAllClose(nmsed_scores, exp_nms_scores) + self.assertAllClose(nmsed_classes, exp_nms_classes) + self.assertAllClose(num_detections, [2, 2]) def test_batch_multiclass_nms_with_per_image_clip_window(self): - boxes = tf.constant([[[[0, 0, 1, 1], [0, 0, 4, 5]], - [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]], - [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]], - [[0, 10, 1, 11], [0, 10, 1, 11]]], - [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]], - [[0, 100, 1, 101], [0, 100, 1, 101]], - [[0, 1000, 1, 1002], [0, 999, 2, 1004]], - [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]], - tf.float32) - scores = tf.constant([[[.9, 0.01], [.75, 0.05], - [.6, 0.01], [.95, 0]], - [[.5, 0.01], [.3, 0.01], - [.01, .85], [.01, .5]]]) - clip_window = tf.constant([[0., 0., 5., 5.], - [0., 0., 200., 200.]]) + boxes = np.array([[[[0, 0, 1, 1], [0, 0, 4, 5]], + [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]], + [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]], + [[0, 10, 1, 11], [0, 10, 1, 11]]], + [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]], + [[0, 100, 1, 101], [0, 100, 1, 101]], + [[0, 1000, 1, 1002], [0, 999, 2, 1004]], + [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]], + np.float32) + scores = np.array([[[.9, 0.01], [.75, 0.05], + [.6, 0.01], [.95, 0]], + [[.5, 0.01], [.3, 0.01], + [.01, .85], [.01, .5]]], np.float32) + clip_window = np.array([[0., 0., 5., 5.], + [0., 0., 200., 200.]], np.float32) score_thresh = 0.1 iou_thresh = .5 max_output_size = 4 @@ -238,56 +229,55 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase, exp_nms_classes = np.array([[0, 0, 0, 0], [0, 0, 0, 0]]) - (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, - nmsed_additional_fields, num_detections - ) = post_processing.batch_multiclass_non_max_suppression( - boxes, scores, score_thresh, iou_thresh, - max_size_per_class=max_output_size, max_total_size=max_output_size, - clip_window=clip_window) - - self.assertIsNone(nmsed_masks) - self.assertIsNone(nmsed_additional_fields) - # Check static shapes - self.assertAllEqual(nmsed_boxes.shape.as_list(), - exp_nms_corners.shape) - self.assertAllEqual(nmsed_scores.shape.as_list(), - exp_nms_scores.shape) - self.assertAllEqual(nmsed_classes.shape.as_list(), - exp_nms_classes.shape) - self.assertEqual(num_detections.shape.as_list(), [2]) - - with self.test_session() as sess: - (nmsed_boxes, nmsed_scores, nmsed_classes, - num_detections) = sess.run([nmsed_boxes, nmsed_scores, nmsed_classes, - num_detections]) - self.assertAllClose(nmsed_boxes, exp_nms_corners) - self.assertAllClose(nmsed_scores, exp_nms_scores) - self.assertAllClose(nmsed_classes, exp_nms_classes) - self.assertAllClose(num_detections, [1, 2]) + def graph_fn(boxes, scores, clip_window): + (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, + nmsed_additional_fields, num_detections + ) = post_processing.batch_multiclass_non_max_suppression( + boxes, scores, score_thresh, iou_thresh, + max_size_per_class=max_output_size, max_total_size=max_output_size, + clip_window=clip_window) + self.assertIsNone(nmsed_masks) + self.assertIsNone(nmsed_additional_fields) + # Check static shapes + self.assertAllEqual(nmsed_boxes.shape.as_list(), + exp_nms_corners.shape) + self.assertAllEqual(nmsed_scores.shape.as_list(), + exp_nms_scores.shape) + self.assertAllEqual(nmsed_classes.shape.as_list(), + exp_nms_classes.shape) + self.assertEqual(num_detections.shape.as_list(), [2]) + return nmsed_boxes, nmsed_scores, nmsed_classes, num_detections + + (nmsed_boxes, nmsed_scores, nmsed_classes, + num_detections) = self.execute_cpu(graph_fn, [boxes, scores, clip_window]) + self.assertAllClose(nmsed_boxes, exp_nms_corners) + self.assertAllClose(nmsed_scores, exp_nms_scores) + self.assertAllClose(nmsed_classes, exp_nms_classes) + self.assertAllClose(num_detections, [1, 2]) def test_batch_multiclass_nms_with_masks(self): - boxes = tf.constant([[[[0, 0, 1, 1], [0, 0, 4, 5]], - [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]], - [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]], - [[0, 10, 1, 11], [0, 10, 1, 11]]], - [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]], - [[0, 100, 1, 101], [0, 100, 1, 101]], - [[0, 1000, 1, 1002], [0, 999, 2, 1004]], - [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]], - tf.float32) - scores = tf.constant([[[.9, 0.01], [.75, 0.05], - [.6, 0.01], [.95, 0]], - [[.5, 0.01], [.3, 0.01], - [.01, .85], [.01, .5]]]) - masks = tf.constant([[[[[0, 1], [2, 3]], [[1, 2], [3, 4]]], - [[[2, 3], [4, 5]], [[3, 4], [5, 6]]], - [[[4, 5], [6, 7]], [[5, 6], [7, 8]]], - [[[6, 7], [8, 9]], [[7, 8], [9, 10]]]], - [[[[8, 9], [10, 11]], [[9, 10], [11, 12]]], - [[[10, 11], [12, 13]], [[11, 12], [13, 14]]], - [[[12, 13], [14, 15]], [[13, 14], [15, 16]]], - [[[14, 15], [16, 17]], [[15, 16], [17, 18]]]]], - tf.float32) + boxes = np.array([[[[0, 0, 1, 1], [0, 0, 4, 5]], + [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]], + [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]], + [[0, 10, 1, 11], [0, 10, 1, 11]]], + [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]], + [[0, 100, 1, 101], [0, 100, 1, 101]], + [[0, 1000, 1, 1002], [0, 999, 2, 1004]], + [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]], + np.float32) + scores = np.array([[[.9, 0.01], [.75, 0.05], + [.6, 0.01], [.95, 0]], + [[.5, 0.01], [.3, 0.01], + [.01, .85], [.01, .5]]], np.float32) + masks = np.array([[[[[0, 1], [2, 3]], [[1, 2], [3, 4]]], + [[[2, 3], [4, 5]], [[3, 4], [5, 6]]], + [[[4, 5], [6, 7]], [[5, 6], [7, 8]]], + [[[6, 7], [8, 9]], [[7, 8], [9, 10]]]], + [[[[8, 9], [10, 11]], [[9, 10], [11, 12]]], + [[[10, 11], [12, 13]], [[11, 12], [13, 14]]], + [[[12, 13], [14, 15]], [[13, 14], [15, 16]]], + [[[14, 15], [16, 17]], [[15, 16], [17, 18]]]]], + np.float32) score_thresh = 0.1 iou_thresh = .5 max_output_size = 4 @@ -313,61 +303,58 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase, [[10, 11], [12, 13]], [[0, 0], [0, 0]]]]) - (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, - nmsed_additional_fields, num_detections - ) = post_processing.batch_multiclass_non_max_suppression( - boxes, scores, score_thresh, iou_thresh, - max_size_per_class=max_output_size, max_total_size=max_output_size, - masks=masks) - - self.assertIsNone(nmsed_additional_fields) - # Check static shapes - self.assertAllEqual(nmsed_boxes.shape.as_list(), exp_nms_corners.shape) - self.assertAllEqual(nmsed_scores.shape.as_list(), exp_nms_scores.shape) - self.assertAllEqual(nmsed_classes.shape.as_list(), exp_nms_classes.shape) - self.assertAllEqual(nmsed_masks.shape.as_list(), exp_nms_masks.shape) - self.assertEqual(num_detections.shape.as_list(), [2]) - - with self.test_session() as sess: + def graph_fn(boxes, scores, masks): (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, - num_detections) = sess.run([nmsed_boxes, nmsed_scores, nmsed_classes, - nmsed_masks, num_detections]) + nmsed_additional_fields, num_detections + ) = post_processing.batch_multiclass_non_max_suppression( + boxes, scores, score_thresh, iou_thresh, + max_size_per_class=max_output_size, max_total_size=max_output_size, + masks=masks) + self.assertIsNone(nmsed_additional_fields) + # Check static shapes + self.assertAllEqual(nmsed_boxes.shape.as_list(), exp_nms_corners.shape) + self.assertAllEqual(nmsed_scores.shape.as_list(), exp_nms_scores.shape) + self.assertAllEqual(nmsed_classes.shape.as_list(), exp_nms_classes.shape) + self.assertAllEqual(nmsed_masks.shape.as_list(), exp_nms_masks.shape) + self.assertEqual(num_detections.shape.as_list(), [2]) + return (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, + num_detections) - self.assertAllClose(nmsed_boxes, exp_nms_corners) - self.assertAllClose(nmsed_scores, exp_nms_scores) - self.assertAllClose(nmsed_classes, exp_nms_classes) - self.assertAllClose(num_detections, [2, 3]) - self.assertAllClose(nmsed_masks, exp_nms_masks) + (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, + num_detections) = self.execute_cpu(graph_fn, [boxes, scores, masks]) + self.assertAllClose(nmsed_boxes, exp_nms_corners) + self.assertAllClose(nmsed_scores, exp_nms_scores) + self.assertAllClose(nmsed_classes, exp_nms_classes) + self.assertAllClose(num_detections, [2, 3]) + self.assertAllClose(nmsed_masks, exp_nms_masks) def test_batch_multiclass_nms_with_additional_fields(self): - boxes = tf.constant([[[[0, 0, 1, 1], [0, 0, 4, 5]], - [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]], - [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]], - [[0, 10, 1, 11], [0, 10, 1, 11]]], - [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]], - [[0, 100, 1, 101], [0, 100, 1, 101]], - [[0, 1000, 1, 1002], [0, 999, 2, 1004]], - [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]], - tf.float32) - scores = tf.constant([[[.9, 0.01], [.75, 0.05], - [.6, 0.01], [.95, 0]], - [[.5, 0.01], [.3, 0.01], - [.01, .85], [.01, .5]]]) - additional_fields = { - 'keypoints': tf.constant( - [[[[6, 7], [8, 9]], - [[0, 1], [2, 3]], - [[0, 0], [0, 0]], - [[0, 0], [0, 0]]], - [[[13, 14], [15, 16]], - [[8, 9], [10, 11]], - [[10, 11], [12, 13]], - [[0, 0], [0, 0]]]], - tf.float32) - } - additional_fields['size'] = tf.constant( + boxes = np.array([[[[0, 0, 1, 1], [0, 0, 4, 5]], + [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]], + [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]], + [[0, 10, 1, 11], [0, 10, 1, 11]]], + [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]], + [[0, 100, 1, 101], [0, 100, 1, 101]], + [[0, 1000, 1, 1002], [0, 999, 2, 1004]], + [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]], + np.float32) + scores = np.array([[[.9, 0.01], [.75, 0.05], + [.6, 0.01], [.95, 0]], + [[.5, 0.01], [.3, 0.01], + [.01, .85], [.01, .5]]], np.float32) + keypoints = np.array( + [[[[6, 7], [8, 9]], + [[0, 1], [2, 3]], + [[0, 0], [0, 0]], + [[0, 0], [0, 0]]], + [[[13, 14], [15, 16]], + [[8, 9], [10, 11]], + [[10, 11], [12, 13]], + [[0, 0], [0, 0]]]], + np.float32) + size = np.array( [[[[6], [8]], [[0], [2]], [[0], [0]], [[0], [0]]], - [[[13], [15]], [[8], [10]], [[10], [12]], [[0], [0]]]], tf.float32) + [[[13], [15]], [[8], [10]], [[10], [12]], [[0], [0]]]], np.float32) score_thresh = 0.1 iou_thresh = .5 max_output_size = 4 @@ -399,43 +386,43 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase, [[[10], [12]], [[13], [15]], [[8], [10]], [[0], [0]]]]) - (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, - nmsed_additional_fields, num_detections - ) = post_processing.batch_multiclass_non_max_suppression( - boxes, scores, score_thresh, iou_thresh, - max_size_per_class=max_output_size, max_total_size=max_output_size, - additional_fields=additional_fields) - - self.assertIsNone(nmsed_masks) - # Check static shapes - self.assertAllEqual(nmsed_boxes.shape.as_list(), exp_nms_corners.shape) - self.assertAllEqual(nmsed_scores.shape.as_list(), exp_nms_scores.shape) - self.assertAllEqual(nmsed_classes.shape.as_list(), exp_nms_classes.shape) - self.assertEqual(len(nmsed_additional_fields), - len(exp_nms_additional_fields)) - for key in exp_nms_additional_fields: - self.assertAllEqual(nmsed_additional_fields[key].shape.as_list(), - exp_nms_additional_fields[key].shape) - self.assertEqual(num_detections.shape.as_list(), [2]) - - with self.test_session() as sess: - (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_additional_fields, - num_detections) = sess.run([nmsed_boxes, nmsed_scores, nmsed_classes, - nmsed_additional_fields, num_detections]) - - self.assertAllClose(nmsed_boxes, exp_nms_corners) - self.assertAllClose(nmsed_scores, exp_nms_scores) - self.assertAllClose(nmsed_classes, exp_nms_classes) + def graph_fn(boxes, scores, keypoints, size): + additional_fields = {'keypoints': keypoints, 'size': size} + (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, + nmsed_additional_fields, num_detections + ) = post_processing.batch_multiclass_non_max_suppression( + boxes, scores, score_thresh, iou_thresh, + max_size_per_class=max_output_size, max_total_size=max_output_size, + additional_fields=additional_fields) + self.assertIsNone(nmsed_masks) + # Check static shapes + self.assertAllEqual(nmsed_boxes.shape.as_list(), exp_nms_corners.shape) + self.assertAllEqual(nmsed_scores.shape.as_list(), exp_nms_scores.shape) + self.assertAllEqual(nmsed_classes.shape.as_list(), exp_nms_classes.shape) + self.assertEqual(len(nmsed_additional_fields), + len(exp_nms_additional_fields)) for key in exp_nms_additional_fields: - self.assertAllClose(nmsed_additional_fields[key], - exp_nms_additional_fields[key]) - self.assertAllClose(num_detections, [2, 3]) - - def test_batch_multiclass_nms_with_dynamic_batch_size(self): - boxes_placeholder = tf.placeholder(tf.float32, shape=(None, None, 2, 4)) - scores_placeholder = tf.placeholder(tf.float32, shape=(None, None, 2)) - masks_placeholder = tf.placeholder(tf.float32, shape=(None, None, 2, 2, 2)) + self.assertAllEqual(nmsed_additional_fields[key].shape.as_list(), + exp_nms_additional_fields[key].shape) + self.assertEqual(num_detections.shape.as_list(), [2]) + return (nmsed_boxes, nmsed_scores, nmsed_classes, + nmsed_additional_fields['keypoints'], + nmsed_additional_fields['size'], + num_detections) + (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_keypoints, nmsed_size, + num_detections) = self.execute_cpu(graph_fn, [boxes, scores, keypoints, + size]) + + self.assertAllClose(nmsed_boxes, exp_nms_corners) + self.assertAllClose(nmsed_scores, exp_nms_scores) + self.assertAllClose(nmsed_classes, exp_nms_classes) + self.assertAllClose(nmsed_keypoints, + exp_nms_additional_fields['keypoints']) + self.assertAllClose(nmsed_size, + exp_nms_additional_fields['size']) + self.assertAllClose(num_detections, [2, 3]) + def test_batch_multiclass_nms_with_masks_and_num_valid_boxes(self): boxes = np.array([[[[0, 0, 1, 1], [0, 0, 4, 5]], [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]], [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]], @@ -443,11 +430,12 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase, [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]], [[0, 100, 1, 101], [0, 100, 1, 101]], [[0, 1000, 1, 1002], [0, 999, 2, 1004]], - [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]]) + [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]], + np.float32) scores = np.array([[[.9, 0.01], [.75, 0.05], [.6, 0.01], [.95, 0]], [[.5, 0.01], [.3, 0.01], - [.01, .85], [.01, .5]]]) + [.01, .85], [.01, .5]]], np.float32) masks = np.array([[[[[0, 1], [2, 3]], [[1, 2], [3, 4]]], [[[2, 3], [4, 5]], [[3, 4], [5, 6]]], [[[4, 5], [6, 7]], [[5, 6], [7, 8]]], @@ -455,84 +443,9 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase, [[[[8, 9], [10, 11]], [[9, 10], [11, 12]]], [[[10, 11], [12, 13]], [[11, 12], [13, 14]]], [[[12, 13], [14, 15]], [[13, 14], [15, 16]]], - [[[14, 15], [16, 17]], [[15, 16], [17, 18]]]]]) - score_thresh = 0.1 - iou_thresh = .5 - max_output_size = 4 - - exp_nms_corners = np.array([[[0, 10, 1, 11], - [0, 0, 1, 1], - [0, 0, 0, 0], - [0, 0, 0, 0]], - [[0, 999, 2, 1004], - [0, 10.1, 1, 11.1], - [0, 100, 1, 101], - [0, 0, 0, 0]]]) - exp_nms_scores = np.array([[.95, .9, 0, 0], - [.85, .5, .3, 0]]) - exp_nms_classes = np.array([[0, 0, 0, 0], - [1, 0, 0, 0]]) - exp_nms_masks = np.array([[[[6, 7], [8, 9]], - [[0, 1], [2, 3]], - [[0, 0], [0, 0]], - [[0, 0], [0, 0]]], - [[[13, 14], [15, 16]], - [[8, 9], [10, 11]], - [[10, 11], [12, 13]], - [[0, 0], [0, 0]]]]) - - (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, - nmsed_additional_fields, num_detections - ) = post_processing.batch_multiclass_non_max_suppression( - boxes_placeholder, scores_placeholder, score_thresh, iou_thresh, - max_size_per_class=max_output_size, max_total_size=max_output_size, - masks=masks_placeholder) - - self.assertIsNone(nmsed_additional_fields) - # Check static shapes - self.assertAllEqual(nmsed_boxes.shape.as_list(), [None, 4, 4]) - self.assertAllEqual(nmsed_scores.shape.as_list(), [None, 4]) - self.assertAllEqual(nmsed_classes.shape.as_list(), [None, 4]) - self.assertAllEqual(nmsed_masks.shape.as_list(), [None, 4, 2, 2]) - self.assertEqual(num_detections.shape.as_list(), [None]) - - with self.test_session() as sess: - (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, - num_detections) = sess.run([nmsed_boxes, nmsed_scores, nmsed_classes, - nmsed_masks, num_detections], - feed_dict={boxes_placeholder: boxes, - scores_placeholder: scores, - masks_placeholder: masks}) - self.assertAllClose(nmsed_boxes, exp_nms_corners) - self.assertAllClose(nmsed_scores, exp_nms_scores) - self.assertAllClose(nmsed_classes, exp_nms_classes) - self.assertAllClose(num_detections, [2, 3]) - self.assertAllClose(nmsed_masks, exp_nms_masks) - - def test_batch_multiclass_nms_with_masks_and_num_valid_boxes(self): - boxes = tf.constant([[[[0, 0, 1, 1], [0, 0, 4, 5]], - [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]], - [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]], - [[0, 10, 1, 11], [0, 10, 1, 11]]], - [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]], - [[0, 100, 1, 101], [0, 100, 1, 101]], - [[0, 1000, 1, 1002], [0, 999, 2, 1004]], - [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]], - tf.float32) - scores = tf.constant([[[.9, 0.01], [.75, 0.05], - [.6, 0.01], [.95, 0]], - [[.5, 0.01], [.3, 0.01], - [.01, .85], [.01, .5]]]) - masks = tf.constant([[[[[0, 1], [2, 3]], [[1, 2], [3, 4]]], - [[[2, 3], [4, 5]], [[3, 4], [5, 6]]], - [[[4, 5], [6, 7]], [[5, 6], [7, 8]]], - [[[6, 7], [8, 9]], [[7, 8], [9, 10]]]], - [[[[8, 9], [10, 11]], [[9, 10], [11, 12]]], - [[[10, 11], [12, 13]], [[11, 12], [13, 14]]], - [[[12, 13], [14, 15]], [[13, 14], [15, 16]]], - [[[14, 15], [16, 17]], [[15, 16], [17, 18]]]]], - tf.float32) - num_valid_boxes = tf.constant([1, 1], tf.int32) + [[[14, 15], [16, 17]], [[15, 16], [17, 18]]]]], + np.float32) + num_valid_boxes = np.array([1, 1], np.int32) score_thresh = 0.1 iou_thresh = .5 max_output_size = 4 @@ -558,58 +471,56 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase, [[0, 0], [0, 0]], [[0, 0], [0, 0]]]] - (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, - nmsed_additional_fields, num_detections - ) = post_processing.batch_multiclass_non_max_suppression( - boxes, scores, score_thresh, iou_thresh, - max_size_per_class=max_output_size, max_total_size=max_output_size, - num_valid_boxes=num_valid_boxes, masks=masks) - - self.assertIsNone(nmsed_additional_fields) - - with self.test_session() as sess: + def graph_fn(boxes, scores, masks, num_valid_boxes): (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, - num_detections) = sess.run([nmsed_boxes, nmsed_scores, nmsed_classes, - nmsed_masks, num_detections]) - self.assertAllClose(nmsed_boxes, exp_nms_corners) - self.assertAllClose(nmsed_scores, exp_nms_scores) - self.assertAllClose(nmsed_classes, exp_nms_classes) - self.assertAllClose(num_detections, [1, 1]) - self.assertAllClose(nmsed_masks, exp_nms_masks) + nmsed_additional_fields, num_detections + ) = post_processing.batch_multiclass_non_max_suppression( + boxes, scores, score_thresh, iou_thresh, + max_size_per_class=max_output_size, max_total_size=max_output_size, + masks=masks, num_valid_boxes=num_valid_boxes) + self.assertIsNone(nmsed_additional_fields) + return (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, + num_detections) + + (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, + num_detections) = self.execute_cpu(graph_fn, [boxes, scores, masks, + num_valid_boxes]) + self.assertAllClose(nmsed_boxes, exp_nms_corners) + self.assertAllClose(nmsed_scores, exp_nms_scores) + self.assertAllClose(nmsed_classes, exp_nms_classes) + self.assertAllClose(num_detections, [1, 1]) + self.assertAllClose(nmsed_masks, exp_nms_masks) def test_batch_multiclass_nms_with_additional_fields_and_num_valid_boxes( self): - boxes = tf.constant([[[[0, 0, 1, 1], [0, 0, 4, 5]], - [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]], - [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]], - [[0, 10, 1, 11], [0, 10, 1, 11]]], - [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]], - [[0, 100, 1, 101], [0, 100, 1, 101]], - [[0, 1000, 1, 1002], [0, 999, 2, 1004]], - [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]], - tf.float32) - scores = tf.constant([[[.9, 0.01], [.75, 0.05], - [.6, 0.01], [.95, 0]], - [[.5, 0.01], [.3, 0.01], - [.01, .85], [.01, .5]]]) - additional_fields = { - 'keypoints': tf.constant( - [[[[6, 7], [8, 9]], - [[0, 1], [2, 3]], - [[0, 0], [0, 0]], - [[0, 0], [0, 0]]], - [[[13, 14], [15, 16]], - [[8, 9], [10, 11]], - [[10, 11], [12, 13]], - [[0, 0], [0, 0]]]], - tf.float32) - } - - additional_fields['size'] = tf.constant( + boxes = np.array([[[[0, 0, 1, 1], [0, 0, 4, 5]], + [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]], + [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]], + [[0, 10, 1, 11], [0, 10, 1, 11]]], + [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]], + [[0, 100, 1, 101], [0, 100, 1, 101]], + [[0, 1000, 1, 1002], [0, 999, 2, 1004]], + [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]], + np.float32) + scores = np.array([[[.9, 0.01], [.75, 0.05], + [.6, 0.01], [.95, 0]], + [[.5, 0.01], [.3, 0.01], + [.01, .85], [.01, .5]]], np.float32) + keypoints = np.array( + [[[[6, 7], [8, 9]], + [[0, 1], [2, 3]], + [[0, 0], [0, 0]], + [[0, 0], [0, 0]]], + [[[13, 14], [15, 16]], + [[8, 9], [10, 11]], + [[10, 11], [12, 13]], + [[0, 0], [0, 0]]]], + np.float32) + size = np.array( [[[[7], [9]], [[1], [3]], [[0], [0]], [[0], [0]]], - [[[14], [16]], [[9], [11]], [[11], [13]], [[0], [0]]]], tf.float32) + [[[14], [16]], [[9], [11]], [[11], [13]], [[0], [0]]]], np.float32) - num_valid_boxes = tf.constant([1, 1], tf.int32) + num_valid_boxes = np.array([1, 1], np.int32) score_thresh = 0.1 iou_thresh = .5 max_output_size = 4 @@ -641,45 +552,48 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase, [[0], [0]], [[0], [0]]], [[[14], [16]], [[0], [0]], [[0], [0]], [[0], [0]]]]) - - (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, - nmsed_additional_fields, num_detections - ) = post_processing.batch_multiclass_non_max_suppression( - boxes, scores, score_thresh, iou_thresh, - max_size_per_class=max_output_size, max_total_size=max_output_size, - num_valid_boxes=num_valid_boxes, - additional_fields=additional_fields) - - self.assertIsNone(nmsed_masks) - - with self.test_session() as sess: - (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_additional_fields, - num_detections) = sess.run([nmsed_boxes, nmsed_scores, nmsed_classes, - nmsed_additional_fields, num_detections]) - - self.assertAllClose(nmsed_boxes, exp_nms_corners) - self.assertAllClose(nmsed_scores, exp_nms_scores) - self.assertAllClose(nmsed_classes, exp_nms_classes) - for key in exp_nms_additional_fields: - self.assertAllClose(nmsed_additional_fields[key], - exp_nms_additional_fields[key]) - self.assertAllClose(num_detections, [1, 1]) + def graph_fn(boxes, scores, keypoints, size, num_valid_boxes): + additional_fields = {'keypoints': keypoints, 'size': size} + (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, + nmsed_additional_fields, num_detections + ) = post_processing.batch_multiclass_non_max_suppression( + boxes, scores, score_thresh, iou_thresh, + max_size_per_class=max_output_size, max_total_size=max_output_size, + num_valid_boxes=num_valid_boxes, + additional_fields=additional_fields) + self.assertIsNone(nmsed_masks) + return (nmsed_boxes, nmsed_scores, nmsed_classes, + nmsed_additional_fields['keypoints'], + nmsed_additional_fields['size'], num_detections) + + (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_keypoints, nmsed_size, + num_detections) = self.execute_cpu(graph_fn, [boxes, scores, keypoints, + size, num_valid_boxes]) + + self.assertAllClose(nmsed_boxes, exp_nms_corners) + self.assertAllClose(nmsed_scores, exp_nms_scores) + self.assertAllClose(nmsed_classes, exp_nms_classes) + self.assertAllClose(nmsed_keypoints, + exp_nms_additional_fields['keypoints']) + self.assertAllClose(nmsed_size, + exp_nms_additional_fields['size']) + self.assertAllClose(num_detections, [1, 1]) def test_combined_nms_with_batch_size_2(self): """Test use_combined_nms.""" - boxes = tf.constant([[[[0, 0, 0.1, 0.1], [0, 0, 0.1, 0.1]], - [[0, 0.01, 1, 0.11], [0, 0.6, 0.1, 0.7]], - [[0, -0.01, 0.1, 0.09], [0, -0.1, 0.1, 0.09]], - [[0, 0.11, 0.1, 0.2], [0, 0.11, 0.1, 0.2]]], - [[[0, 0, 0.2, 0.2], [0, 0, 0.2, 0.2]], - [[0, 0.02, 0.2, 0.22], [0, 0.02, 0.2, 0.22]], - [[0, -0.02, 0.2, 0.19], [0, -0.02, 0.2, 0.19]], - [[0, 0.21, 0.2, 0.3], [0, 0.21, 0.2, 0.3]]]], - tf.float32) - scores = tf.constant([[[.1, 0.9], [.75, 0.8], - [.6, 0.3], [0.95, 0.1]], - [[.1, 0.9], [.75, 0.8], - [.6, .3], [.95, .1]]]) + boxes = np.array([[[[0, 0, 0.1, 0.1], [0, 0, 0.1, 0.1]], + [[0, 0.01, 1, 0.11], [0, 0.6, 0.1, 0.7]], + [[0, -0.01, 0.1, 0.09], [0, -0.1, 0.1, 0.09]], + [[0, 0.11, 0.1, 0.2], [0, 0.11, 0.1, 0.2]]], + [[[0, 0, 0.2, 0.2], [0, 0, 0.2, 0.2]], + [[0, 0.02, 0.2, 0.22], [0, 0.02, 0.2, 0.22]], + [[0, -0.02, 0.2, 0.19], [0, -0.02, 0.2, 0.19]], + [[0, 0.21, 0.2, 0.3], [0, 0.21, 0.2, 0.3]]]], + np.float32) + scores = np.array([[[.1, 0.9], [.75, 0.8], + [.6, 0.3], [0.95, 0.1]], + [[.1, 0.9], [.75, 0.8], + [.6, .3], [.95, .1]]], np.float32) score_thresh = 0.1 iou_thresh = .5 max_output_size = 3 @@ -695,27 +609,78 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase, exp_nms_classes = np.array([[0, 1, 1], [0, 1, 0]]) - (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, - nmsed_additional_fields, num_detections - ) = post_processing.batch_multiclass_non_max_suppression( - boxes, scores, score_thresh, iou_thresh, - max_size_per_class=max_output_size, max_total_size=max_output_size, - use_static_shapes=True, - use_combined_nms=True) - - self.assertIsNone(nmsed_masks) - self.assertIsNone(nmsed_additional_fields) - - with self.test_session() as sess: - (nmsed_boxes, nmsed_scores, nmsed_classes, - num_detections) = sess.run([nmsed_boxes, nmsed_scores, nmsed_classes, - num_detections]) - self.assertAllClose(nmsed_boxes, exp_nms_corners) - self.assertAllClose(nmsed_scores, exp_nms_scores) - self.assertAllClose(nmsed_classes, exp_nms_classes) - self.assertListEqual(num_detections.tolist(), [3, 3]) - - # TODO(bhattad): Remove conditional after CMLE moves to TF 1.9 + def graph_fn(boxes, scores): + (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, + nmsed_additional_fields, num_detections + ) = post_processing.batch_multiclass_non_max_suppression( + boxes, scores, score_thresh, iou_thresh, + max_size_per_class=max_output_size, max_total_size=max_output_size, + use_static_shapes=True, + use_combined_nms=True) + self.assertIsNone(nmsed_masks) + self.assertIsNone(nmsed_additional_fields) + return (nmsed_boxes, nmsed_scores, nmsed_classes, num_detections) + + (nmsed_boxes, nmsed_scores, nmsed_classes, + num_detections) = self.execute_cpu(graph_fn, [boxes, scores]) + self.assertAllClose(nmsed_boxes, exp_nms_corners) + self.assertAllClose(nmsed_scores, exp_nms_scores) + self.assertAllClose(nmsed_classes, exp_nms_classes) + self.assertListEqual(num_detections.tolist(), [3, 3]) + + def test_batch_multiclass_nms_with_use_static_shapes(self): + boxes = np.array([[[[0, 0, 1, 1], [0, 0, 4, 5]], + [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]], + [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]], + [[0, 10, 1, 11], [0, 10, 1, 11]]], + [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]], + [[0, 100, 1, 101], [0, 100, 1, 101]], + [[0, 1000, 1, 1002], [0, 999, 2, 1004]], + [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]], + np.float32) + scores = np.array([[[.9, 0.01], [.75, 0.05], + [.6, 0.01], [.95, 0]], + [[.5, 0.01], [.3, 0.01], + [.01, .85], [.01, .5]]], + np.float32) + clip_window = np.array([[0., 0., 5., 5.], + [0., 0., 200., 200.]], + np.float32) + score_thresh = 0.1 + iou_thresh = .5 + max_output_size = 4 + + exp_nms_corners = np.array([[[0, 0, 1, 1], + [0, 0, 0, 0], + [0, 0, 0, 0], + [0, 0, 0, 0]], + [[0, 10.1, 1, 11.1], + [0, 100, 1, 101], + [0, 0, 0, 0], + [0, 0, 0, 0]]]) + exp_nms_scores = np.array([[.9, 0., 0., 0.], + [.5, .3, 0, 0]]) + exp_nms_classes = np.array([[0, 0, 0, 0], + [0, 0, 0, 0]]) + + def graph_fn(boxes, scores, clip_window): + (nmsed_boxes, nmsed_scores, nmsed_classes, _, _, num_detections + ) = post_processing.batch_multiclass_non_max_suppression( + boxes, scores, score_thresh, iou_thresh, + max_size_per_class=max_output_size, clip_window=clip_window, + use_static_shapes=True) + return nmsed_boxes, nmsed_scores, nmsed_classes, num_detections + + (nmsed_boxes, nmsed_scores, nmsed_classes, + num_detections) = self.execute(graph_fn, [boxes, scores, clip_window]) + for i in range(len(num_detections)): + self.assertAllClose(nmsed_boxes[i, 0:num_detections[i]], + exp_nms_corners[i, 0:num_detections[i]]) + self.assertAllClose(nmsed_scores[i, 0:num_detections[i]], + exp_nms_scores[i, 0:num_detections[i]]) + self.assertAllClose(nmsed_classes[i, 0:num_detections[i]], + exp_nms_classes[i, 0:num_detections[i]]) + self.assertAllClose(num_detections, [1, 2]) if __name__ == '__main__': tf.test.main() diff --git a/research/object_detection/core/batcher.py b/research/object_detection/core/batcher.py index 832e22420..26832e30e 100644 --- a/research/object_detection/core/batcher.py +++ b/research/object_detection/core/batcher.py @@ -24,10 +24,6 @@ from six.moves import range import tensorflow.compat.v1 as tf from object_detection.core import prefetcher -from object_detection.utils import tf_version - -if not tf_version.is_tf1(): - raise ValueError('`batcher.py` is only supported in Tensorflow 1.X') rt_shape_str = '_runtime_shapes' diff --git a/research/object_detection/core/batcher_tf1_test.py b/research/object_detection/core/batcher_tf1_test.py index 8f443a942..1688b87cd 100644 --- a/research/object_detection/core/batcher_tf1_test.py +++ b/research/object_detection/core/batcher_tf1_test.py @@ -19,14 +19,17 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function +import unittest import numpy as np from six.moves import range import tensorflow.compat.v1 as tf import tf_slim as slim from object_detection.core import batcher +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class BatcherTest(tf.test.TestCase): def test_batch_and_unpad_2d_tensors_of_different_sizes_in_1st_dimension(self): diff --git a/research/object_detection/core/freezable_batch_norm_test.py b/research/object_detection/core/freezable_batch_norm_tf2_test.py similarity index 98% rename from research/object_detection/core/freezable_batch_norm_test.py rename to research/object_detection/core/freezable_batch_norm_tf2_test.py index 8379a3839..4cc42ae3e 100644 --- a/research/object_detection/core/freezable_batch_norm_test.py +++ b/research/object_detection/core/freezable_batch_norm_tf2_test.py @@ -17,15 +17,17 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function - +import unittest import numpy as np from six.moves import zip import tensorflow.compat.v1 as tf from object_detection.core import freezable_batch_norm +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class FreezableBatchNormTest(tf.test.TestCase): """Tests for FreezableBatchNorm operations.""" diff --git a/research/object_detection/core/losses.py b/research/object_detection/core/losses.py index 07e7dd3ff..c4d499e7e 100644 --- a/research/object_detection/core/losses.py +++ b/research/object_detection/core/losses.py @@ -681,3 +681,95 @@ class HardExampleMiner(object): num_positives, num_negatives) +class PenaltyReducedLogisticFocalLoss(Loss): + """Penalty-reduced pixelwise logistic regression with focal loss. + + The loss is defined in Equation (1) of the Objects as Points[1] paper. + Although the loss is defined per-pixel in the output space, this class + assumes that each pixel is an anchor to be compatible with the base class. + + [1]: https://arxiv.org/abs/1904.07850 + """ + + def __init__(self, alpha=2.0, beta=4.0, sigmoid_clip_value=1e-4): + """Constructor. + + Args: + alpha: Focussing parameter of the focal loss. Increasing this will + decrease the loss contribution of the well classified examples. + beta: The local penalty reduction factor. Increasing this will decrease + the contribution of loss due to negative pixels near the keypoint. + sigmoid_clip_value: The sigmoid operation used internally will be clipped + between [sigmoid_clip_value, 1 - sigmoid_clip_value) + """ + self._alpha = alpha + self._beta = beta + self._sigmoid_clip_value = sigmoid_clip_value + super(PenaltyReducedLogisticFocalLoss, self).__init__() + + def _compute_loss(self, prediction_tensor, target_tensor, weights): + """Compute loss function. + + In all input tensors, `num_anchors` is the total number of pixels in the + the output space. + + Args: + prediction_tensor: A float tensor of shape [batch_size, num_anchors, + num_classes] representing the predicted unscaled logits for each class. + The function will compute sigmoid on this tensor internally. + target_tensor: A float tensor of shape [batch_size, num_anchors, + num_classes] representing a tensor with the 'splatted' keypoints, + possibly using a gaussian kernel. This function assumes that + the target is bounded between [0, 1]. + weights: a float tensor of shape, either [batch_size, num_anchors, + num_classes] or [batch_size, num_anchors, 1]. If the shape is + [batch_size, num_anchors, 1], all the classses are equally weighted. + + + Returns: + loss: a float tensor of shape [batch_size, num_anchors, num_classes] + representing the value of the loss function. + """ + + is_present_tensor = tf.math.equal(target_tensor, 1.0) + prediction_tensor = tf.clip_by_value(tf.sigmoid(prediction_tensor), + self._sigmoid_clip_value, + 1 - self._sigmoid_clip_value) + + positive_loss = (tf.math.pow((1 - prediction_tensor), self._alpha)* + tf.math.log(prediction_tensor)) + negative_loss = (tf.math.pow((1 - target_tensor), self._beta)* + tf.math.pow(prediction_tensor, self._alpha)* + tf.math.log(1 - prediction_tensor)) + + loss = -tf.where(is_present_tensor, positive_loss, negative_loss) + return loss * weights + + +class L1LocalizationLoss(Loss): + """L1 loss or absolute difference. + + When used in a per-pixel manner, each pixel should be given as an anchor. + """ + + def _compute_loss(self, prediction_tensor, target_tensor, weights): + """Compute loss function. + + Args: + prediction_tensor: A float tensor of shape [batch_size, num_anchors] + representing the (encoded) predicted locations of objects. + target_tensor: A float tensor of shape [batch_size, num_anchors] + representing the regression targets + weights: a float tensor of shape [batch_size, num_anchors] + + Returns: + loss: a float tensor of shape [batch_size, num_anchors] tensor + representing the value of the loss function. + """ + return tf.losses.absolute_difference( + target_tensor, + prediction_tensor, + weights=weights, + loss_collection=None, + reduction=tf.losses.Reduction.NONE + ) diff --git a/research/object_detection/core/prefetcher.py b/research/object_detection/core/prefetcher.py index f88fbbd39..31e93eae8 100644 --- a/research/object_detection/core/prefetcher.py +++ b/research/object_detection/core/prefetcher.py @@ -16,10 +16,6 @@ """Provides functions to prefetch tensors to feed into models.""" import tensorflow.compat.v1 as tf -from object_detection.utils import tf_version -if not tf_version.is_tf1(): - raise ValueError('`prefetcher.py` is only supported in Tensorflow 1.X') - def prefetch(tensor_dict, capacity): """Creates a prefetch queue for tensors. diff --git a/research/object_detection/core/prefetcher_tf1_test.py b/research/object_detection/core/prefetcher_tf1_test.py index 3c827d800..95e9155e5 100644 --- a/research/object_detection/core/prefetcher_tf1_test.py +++ b/research/object_detection/core/prefetcher_tf1_test.py @@ -18,16 +18,16 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function +import unittest from six.moves import range import tensorflow.compat.v1 as tf - -# pylint: disable=g-bad-import-order, -from object_detection.core import prefetcher import tf_slim as slim -# pylint: disable=g-bad-import-order +from object_detection.core import prefetcher +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class PrefetcherTest(tf.test.TestCase): """Test class for prefetcher.""" diff --git a/research/object_detection/core/preprocessor_test.py b/research/object_detection/core/preprocessor_test.py index a535ce207..fdb56c90a 100644 --- a/research/object_detection/core/preprocessor_test.py +++ b/research/object_detection/core/preprocessor_test.py @@ -19,6 +19,7 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function +import unittest from absl.testing import parameterized import numpy as np import six @@ -30,11 +31,12 @@ from object_detection.core import preprocessor from object_detection.core import preprocessor_cache from object_detection.core import standard_fields as fields from object_detection.utils import test_case +from object_detection.utils import tf_version if six.PY2: import mock # pylint: disable=g-import-not-at-top else: - from unittest import mock # pylint: disable=g-import-not-at-top + mock = unittest.mock # pylint: disable=g-import-not-at-top class PreprocessorTest(test_case.TestCase, parameterized.TestCase): @@ -2819,6 +2821,7 @@ class PreprocessorTest(test_case.TestCase, parameterized.TestCase): self.assertAllEqual(images_shape, patched_images_shape) self.assertAllEqual(images, patched_images) + @unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') def testAutoAugmentImage(self): def graph_fn(): preprocessing_options = [] diff --git a/research/object_detection/core/target_assigner.py b/research/object_detection/core/target_assigner.py index 3d5453bf2..fd9020ebe 100644 --- a/research/object_detection/core/target_assigner.py +++ b/research/object_detection/core/target_assigner.py @@ -50,10 +50,12 @@ from object_detection.core import matcher as mat from object_detection.core import region_similarity_calculator as sim_calc from object_detection.core import standard_fields as fields from object_detection.matchers import argmax_matcher -from object_detection.matchers import bipartite_matcher from object_detection.utils import shape_utils from object_detection.utils import target_assigner_utils as ta_utils +from object_detection.utils import tf_version +if tf_version.is_tf1(): + from object_detection.matchers import bipartite_matcher # pylint: disable=g-import-not-at-top ResizeMethod = tf2.image.ResizeMethod @@ -398,6 +400,8 @@ def create_target_assigner(reference, stage=None, ValueError: if combination reference+stage is invalid. """ if reference == 'Multibox' and stage == 'proposal': + if tf_version.is_tf2(): + raise ValueError('GreedyBipartiteMatcher is not supported in TF 2.X.') similarity_calc = sim_calc.NegSqDistSimilarity() matcher = bipartite_matcher.GreedyBipartiteMatcher() box_coder_instance = mean_stddev_box_coder.MeanStddevBoxCoder() @@ -713,3 +717,943 @@ def batch_assign_confidences(target_assigner, batch_reg_weights, batch_match) +def _smallest_positive_root(a, b, c): + """Returns the smallest positive root of a quadratic equation.""" + + discriminant = tf.sqrt(b ** 2 - 4 * a * c) + + # TODO(vighneshb) We are currently using the slightly incorrect + # CenterNet implementation. The commented lines implement the fixed version + # in https://github.com/princeton-vl/CornerNet. Change the implementation + # after verifying it has no negative impact. + # root1 = (-b - discriminant) / (2 * a) + # root2 = (-b + discriminant) / (2 * a) + + # return tf.where(tf.less(root1, 0), root2, root1) + + return (-b + discriminant) / (2.0) + + +def max_distance_for_overlap(height, width, min_iou): + """Computes how far apart bbox corners can lie while maintaining the iou. + + Given a bounding box size, this function returns a lower bound on how far + apart the corners of another box can lie while still maintaining the given + IoU. The implementation is based on the `gaussian_radius` function in the + Objects as Points github repo: https://github.com/xingyizhou/CenterNet + + Args: + height: A 1-D float Tensor representing height of the ground truth boxes. + width: A 1-D float Tensor representing width of the ground truth boxes. + min_iou: A float representing the minimum IoU desired. + + Returns: + distance: A 1-D Tensor of distances, of the same length as the input + height and width tensors. + """ + + # Given that the detected box is displaced at a distance `d`, the exact + # IoU value will depend on the angle at which each corner is displaced. + # We simplify our computation by assuming that each corner is displaced by + # a distance `d` in both x and y direction. This gives us a lower IoU than + # what is actually realizable and ensures that any box with corners less + # than `d` distance apart will always have an IoU greater than or equal + # to `min_iou` + + # The following 3 cases can be worked on geometrically and come down to + # solving a quadratic inequality. In each case, to ensure `min_iou` we use + # the smallest positive root of the equation. + + # Case where detected box is offset from ground truth and no box completely + # contains the other. + + distance_detection_offset = _smallest_positive_root( + a=1, b=-(height + width), + c=width * height * ((1 - min_iou) / (1 + min_iou)) + ) + + # Case where detection is smaller than ground truth and completely contained + # in it. + distance_detection_in_gt = _smallest_positive_root( + a=4, b=-2 * (height + width), + c=(1 - min_iou) * width * height + ) + + # Case where ground truth is smaller than detection and completely contained + # in it. + distance_gt_in_detection = _smallest_positive_root( + a=4 * min_iou, b=(2 * min_iou) * (width + height), + c=(min_iou - 1) * width * height + ) + + return tf.reduce_min([distance_detection_offset, + distance_gt_in_detection, + distance_detection_in_gt], axis=0) + + +def get_batch_predictions_from_indices(batch_predictions, indices): + """Gets the values of predictions in a batch at the given indices. + + The indices are expected to come from the offset targets generation functions + in this library. The returned value is intended to be used inside a loss + function. + + Args: + batch_predictions: A tensor of shape [batch_size, height, width, 2] for + single class offsets and [batch_size, height, width, class, 2] for + multiple classes offsets (e.g. keypoint joint offsets) representing the + (height, width) or (y_offset, x_offset) predictions over a batch. + indices: A tensor of shape [num_instances, 3] for single class offset and + [num_instances, 4] for multiple classes offsets representing the indices + in the batch to be penalized in a loss function + + Returns: + values: A tensor of shape [num_instances, 2] holding the predicted values + at the given indices. + """ + return tf.gather_nd(batch_predictions, indices) + + +def _compute_std_dev_from_box_size(boxes_height, boxes_width, min_overlap): + """Computes the standard deviation of the Gaussian kernel from box size. + + Args: + boxes_height: A 1D tensor with shape [num_instances] representing the height + of each box. + boxes_width: A 1D tensor with shape [num_instances] representing the width + of each box. + min_overlap: The minimum IOU overlap that boxes need to have to not be + penalized. + + Returns: + A 1D tensor with shape [num_instances] representing the computed Gaussian + sigma for each of the box. + """ + # We are dividing by 3 so that points closer than the computed + # distance have a >99% CDF. + sigma = max_distance_for_overlap(boxes_height, boxes_width, min_overlap) + sigma = (2 * tf.math.maximum(tf.math.floor(sigma), 0.0) + 1) / 6.0 + return sigma + + +class CenterNetCenterHeatmapTargetAssigner(object): + """Wrapper to compute the object center heatmap.""" + + def __init__(self, stride, min_overlap=0.7): + """Initializes the target assigner. + + Args: + stride: int, the stride of the network in output pixels. + min_overlap: The minimum IOU overlap that boxes need to have to not be + penalized. + """ + + self._stride = stride + self._min_overlap = min_overlap + + def assign_center_targets_from_boxes(self, + height, + width, + gt_boxes_list, + gt_classes_list, + gt_weights_list=None): + """Computes the object center heatmap target. + + Args: + height: int, height of input to the model. This is used to + determine the height of the output. + width: int, width of the input to the model. This is used to + determine the width of the output. + gt_boxes_list: A list of float tensors with shape [num_boxes, 4] + representing the groundtruth detection bounding boxes for each sample in + the batch. The box coordinates are expected in normalized coordinates. + gt_classes_list: A list of float tensors with shape [num_boxes, + num_classes] representing the one-hot encoded class labels for each box + in the gt_boxes_list. + gt_weights_list: A list of float tensors with shape [num_boxes] + representing the weight of each groundtruth detection box. + + Returns: + heatmap: A Tensor of size [batch_size, output_height, output_width, + num_classes] representing the per class center heatmap. output_height + and output_width are computed by dividing the input height and width by + the stride specified during initialization. + """ + + out_height = tf.cast(height // self._stride, tf.float32) + out_width = tf.cast(width // self._stride, tf.float32) + # Compute the yx-grid to be used to generate the heatmap. Each returned + # tensor has shape of [out_height, out_width] + (y_grid, x_grid) = ta_utils.image_shape_to_grids(out_height, out_width) + + heatmaps = [] + if gt_weights_list is None: + gt_weights_list = [None] * len(gt_boxes_list) + # TODO(vighneshb) Replace the for loop with a batch version. + for boxes, class_targets, weights in zip(gt_boxes_list, gt_classes_list, + gt_weights_list): + boxes = box_list.BoxList(boxes) + # Convert the box coordinates to absolute output image dimension space. + boxes = box_list_ops.to_absolute_coordinates(boxes, + height // self._stride, + width // self._stride) + # Get the box center coordinates. Each returned tensors have the shape of + # [num_instances] + (y_center, x_center, boxes_height, + boxes_width) = boxes.get_center_coordinates_and_sizes() + + # Compute the sigma from box size. The tensor shape: [num_instances]. + sigma = _compute_std_dev_from_box_size(boxes_height, boxes_width, + self._min_overlap) + # Apply the Gaussian kernel to the center coordinates. Returned heatmap + # has shape of [out_height, out_width, num_classes] + heatmap = ta_utils.coordinates_to_heatmap( + y_grid=y_grid, + x_grid=x_grid, + y_coordinates=y_center, + x_coordinates=x_center, + sigma=sigma, + channel_onehot=class_targets, + channel_weights=weights) + heatmaps.append(heatmap) + + # Return the stacked heatmaps over the batch. + return tf.stack(heatmaps, axis=0) + + +class CenterNetBoxTargetAssigner(object): + """Wrapper to compute target tensors for the object detection task. + + This class has methods that take as input a batch of ground truth tensors + (in the form of a list) and return the targets required to train the object + detection task. + """ + + def __init__(self, stride): + """Initializes the target assigner. + + Args: + stride: int, the stride of the network in output pixels. + """ + + self._stride = stride + + def assign_size_and_offset_targets(self, + height, + width, + gt_boxes_list, + gt_weights_list=None): + """Returns the box height/width and center offset targets and their indices. + + The returned values are expected to be used with predicted tensors + of size (batch_size, height//self._stride, width//self._stride, 2). The + predicted values at the relevant indices can be retrieved with the + get_batch_predictions_from_indices function. + + Args: + height: int, height of input to the model. This is used to determine the + height of the output. + width: int, width of the input to the model. This is used to determine the + width of the output. + gt_boxes_list: A list of float tensors with shape [num_boxes, 4] + representing the groundtruth detection bounding boxes for each sample in + the batch. The coordinates are expected in normalized coordinates. + gt_weights_list: A list of tensors with shape [num_boxes] corresponding to + the weight of each groundtruth detection box. + + Returns: + batch_indices: an integer tensor of shape [num_boxes, 3] holding the + indices inside the predicted tensor which should be penalized. The + first column indicates the index along the batch dimension and the + second and third columns indicate the index along the y and x + dimensions respectively. + batch_box_height_width: a float tensor of shape [num_boxes, 2] holding + expected height and width of each box in the output space. + batch_offsets: a float tensor of shape [num_boxes, 2] holding the + expected y and x offset of each box in the output space. + batch_weights: a float tensor of shape [num_boxes] indicating the + weight of each prediction. + """ + + if gt_weights_list is None: + gt_weights_list = [None] * len(gt_boxes_list) + + batch_indices = [] + batch_box_height_width = [] + batch_weights = [] + batch_offsets = [] + + for i, (boxes, weights) in enumerate(zip(gt_boxes_list, gt_weights_list)): + boxes = box_list.BoxList(boxes) + boxes = box_list_ops.to_absolute_coordinates(boxes, + height // self._stride, + width // self._stride) + # Get the box center coordinates. Each returned tensors have the shape of + # [num_boxes] + (y_center, x_center, boxes_height, + boxes_width) = boxes.get_center_coordinates_and_sizes() + num_boxes = tf.shape(x_center) + + # Compute the offsets and indices of the box centers. Shape: + # offsets: [num_boxes, 2] + # indices: [num_boxes, 2] + (offsets, indices) = ta_utils.compute_floor_offsets_with_indices( + y_source=y_center, x_source=x_center) + + # Assign ones if weights are not provided. + if weights is None: + weights = tf.ones(num_boxes, dtype=tf.float32) + + # Shape of [num_boxes, 1] integer tensor filled with current batch index. + batch_index = i * tf.ones_like(indices[:, 0:1], dtype=tf.int32) + batch_indices.append(tf.concat([batch_index, indices], axis=1)) + batch_box_height_width.append( + tf.stack([boxes_height, boxes_width], axis=1)) + batch_weights.append(weights) + batch_offsets.append(offsets) + + batch_indices = tf.concat(batch_indices, axis=0) + batch_box_height_width = tf.concat(batch_box_height_width, axis=0) + batch_weights = tf.concat(batch_weights, axis=0) + batch_offsets = tf.concat(batch_offsets, axis=0) + return (batch_indices, batch_box_height_width, batch_offsets, batch_weights) + + +# TODO(yuhuic): Update this class to handle the instance/keypoint weights. +# Currently those weights are used as "mask" to indicate whether an +# instance/keypoint should be considered or not (expecting only either 0 or 1 +# value). In reality, the weights can be any value and this class should handle +# those values properly. +class CenterNetKeypointTargetAssigner(object): + """Wrapper to compute target tensors for the CenterNet keypoint estimation. + + This class has methods that take as input a batch of groundtruth tensors + (in the form of a list) and returns the targets required to train the + CenterNet model for keypoint estimation. Specifically, the class methods + expect the groundtruth in the following formats (consistent with the + standard Object Detection API). Note that usually the groundtruth tensors are + packed with a list which represents the batch dimension: + + gt_classes_list: [Required] a list of 2D tf.float32 one-hot + (or k-hot) tensors of shape [num_instances, num_classes] containing the + class targets with the 0th index assumed to map to the first non-background + class. + gt_keypoints_list: [Required] a list of 3D tf.float32 tensors of + shape [num_instances, num_total_keypoints, 2] containing keypoint + coordinates. Note that the "num_total_keypoints" should be the sum of the + num_keypoints over all possible keypoint types, e.g. human pose, face. + For example, if a dataset contains both 17 human pose keypoints and 5 face + keypoints, then num_total_keypoints = 17 + 5 = 22. + If an intance contains only a subet of keypoints (e.g. human pose keypoints + but not face keypoints), the face keypoints will be filled with zeros. + Also note that keypoints are assumed to be provided in normalized + coordinates and missing keypoints should be encoded as NaN. + gt_keypoints_weights_list: [Optional] a list 3D tf.float32 tensors of shape + [num_instances, num_total_keypoints] representing the weights of each + keypoints. If not provided, then all not NaN keypoints will be equally + weighted. + gt_boxes_list: [Optional] a list of 2D tf.float32 tensors of shape + [num_instances, 4] containing coordinates of the groundtruth boxes. + Groundtruth boxes are provided in [y_min, x_min, y_max, x_max] format and + assumed to be normalized and clipped relative to the image window with + y_min <= y_max and x_min <= x_max. + Note that the boxes are only used to compute the center targets but are not + considered as required output of the keypoint task. If the boxes were not + provided, the center targets will be inferred from the keypoints + [not implemented yet]. + gt_weights_list: [Optional] A list of 1D tf.float32 tensors of shape + [num_instances] containing weights for groundtruth boxes. Only useful when + gt_boxes_list is also provided. + """ + + def __init__(self, + stride, + class_id, + keypoint_indices, + keypoint_std_dev=None, + per_keypoint_offset=False, + peak_radius=0): + """Initializes a CenterNet keypoints target assigner. + + Args: + stride: int, the stride of the network in output pixels. + class_id: int, the ID of the class (0-indexed) that contains the target + keypoints to consider in this task. For example, if the task is human + pose estimation, the class id should correspond to the "human" class. + keypoint_indices: A list of integers representing the indices of the + keypoints to be considered in this task. This is used to retrieve the + subset of the keypoints from gt_keypoints that should be considered in + this task. + keypoint_std_dev: A list of floats represent the standard deviation of the + Gaussian kernel used to generate the keypoint heatmap (in the unit of + output pixels). It is to provide the flexibility of using different + sizes of Gaussian kernel for each keypoint type. If not provided, then + all standard deviation will be the same as the default value (10.0 in + the output pixel space). If provided, the length of keypoint_std_dev + needs to be the same as the length of keypoint_indices, indicating the + standard deviation of each keypoint type. + per_keypoint_offset: boolean, indicating whether to assign offset for + each keypoint channel. If set False, the output offset target will have + the shape [batch_size, out_height, out_width, 2]. If set True, the + output offset target will have the shape [batch_size, out_height, + out_width, 2 * num_keypoints]. + peak_radius: int, the radius (in the unit of output pixel) around heatmap + peak to assign the offset targets. + """ + + self._stride = stride + self._class_id = class_id + self._keypoint_indices = keypoint_indices + self._per_keypoint_offset = per_keypoint_offset + self._peak_radius = peak_radius + if keypoint_std_dev is None: + self._keypoint_std_dev = ([_DEFAULT_KEYPOINT_OFFSET_STD_DEV] * + len(keypoint_indices)) + else: + assert len(keypoint_indices) == len(keypoint_std_dev) + self._keypoint_std_dev = keypoint_std_dev + + def _preprocess_keypoints_and_weights(self, out_height, out_width, keypoints, + class_onehot, class_weights, + keypoint_weights): + """Preprocesses the keypoints and the corresponding keypoint weights. + + This function performs several common steps to preprocess the keypoints and + keypoint weights features, including: + 1) Select the subset of keypoints based on the keypoint indices, fill the + keypoint NaN values with zeros and convert to absoluate coordinates. + 2) Generate the weights of the keypoint using the following information: + a. The class of the instance. + b. The NaN value of the keypoint coordinates. + c. The provided keypoint weights. + + Args: + out_height: An integer or an interger tensor indicating the output height + of the model. + out_width: An integer or an interger tensor indicating the output width of + the model. + keypoints: A float tensor of shape [num_instances, num_total_keypoints, 2] + representing the original keypoint grountruth coordinates. + class_onehot: A float tensor of shape [num_instances, num_classes] + containing the class targets with the 0th index assumed to map to the + first non-background class. + class_weights: A float tensor of shape [num_instances] containing weights + for groundtruth instances. + keypoint_weights: A float tensor of shape + [num_instances, num_total_keypoints] representing the weights of each + keypoints. + + Returns: + A tuple of two tensors: + keypoint_absolute: A float tensor of shape + [num_instances, num_keypoints, 2] which is the selected and updated + keypoint coordinates. + keypoint_weights: A float tensor of shape [num_instances, num_keypoints] + representing the updated weight of each keypoint. + """ + # Select the targets keypoints by their type ids and generate the mask + # of valid elements. + valid_mask, keypoints = ta_utils.get_valid_keypoint_mask_for_class( + keypoint_coordinates=keypoints, + class_id=self._class_id, + class_onehot=class_onehot, + class_weights=class_weights, + keypoint_indices=self._keypoint_indices) + # Keypoint coordinates in absolute coordinate system. + # The shape of the tensors: [num_instances, num_keypoints, 2]. + keypoints_absolute = keypoint_ops.to_absolute_coordinates( + keypoints, out_height, out_width) + # Assign default weights for the keypoints. + if keypoint_weights is None: + keypoint_weights = tf.ones_like(keypoints[:, :, 0]) + else: + keypoint_weights = tf.gather( + keypoint_weights, indices=self._keypoint_indices, axis=1) + keypoint_weights = keypoint_weights * valid_mask + return keypoints_absolute, keypoint_weights + + def assign_keypoint_heatmap_targets(self, + height, + width, + gt_keypoints_list, + gt_classes_list, + gt_keypoints_weights_list=None, + gt_weights_list=None, + gt_boxes_list=None): + """Returns the keypoint heatmap targets for the CenterNet model. + + Args: + height: int, height of input to the CenterNet model. This is used to + determine the height of the output. + width: int, width of the input to the CenterNet model. This is used to + determine the width of the output. + gt_keypoints_list: A list of float tensors with shape [num_instances, + num_total_keypoints, 2]. See class-level description for more detail. + gt_classes_list: A list of float tensors with shape [num_instances, + num_classes]. See class-level description for more detail. + gt_keypoints_weights_list: A list of tensors with shape [num_instances, + num_total_keypoints] corresponding to the weight of each keypoint. + gt_weights_list: A list of float tensors with shape [num_instances]. See + class-level description for more detail. + gt_boxes_list: A list of float tensors with shape [num_instances, 4]. See + class-level description for more detail. If provided, the keypoint + standard deviations will be scaled based on the box sizes. + + Returns: + heatmap: A float tensor of shape [batch_size, output_height, output_width, + num_keypoints] representing the per keypoint type center heatmap. + output_height and output_width are computed by dividing the input height + and width by the stride specified during initialization. Note that the + "num_keypoints" is defined by the length of keypoint_indices, which is + not necessarily equal to "num_total_keypoints". + num_instances_batch: A 2D int tensor of shape + [batch_size, num_keypoints] representing number of instances for each + keypoint type. + valid_mask: A float tensor with shape [batch_size, output_height, + output_width] where all values within the regions of the blackout boxes + are 0.0 and 1.0 else where. + """ + out_width = tf.cast(width // self._stride, tf.float32) + out_height = tf.cast(height // self._stride, tf.float32) + # Compute the yx-grid to be used to generate the heatmap. Each returned + # tensor has shape of [out_height, out_width] + y_grid, x_grid = ta_utils.image_shape_to_grids(out_height, out_width) + + if gt_keypoints_weights_list is None: + gt_keypoints_weights_list = [None] * len(gt_keypoints_list) + if gt_weights_list is None: + gt_weights_list = [None] * len(gt_classes_list) + if gt_boxes_list is None: + gt_boxes_list = [None] * len(gt_keypoints_list) + + heatmaps = [] + num_instances_list = [] + valid_mask_list = [] + for keypoints, classes, kp_weights, weights, boxes in zip( + gt_keypoints_list, gt_classes_list, gt_keypoints_weights_list, + gt_weights_list, gt_boxes_list): + keypoints_absolute, kp_weights = self._preprocess_keypoints_and_weights( + out_height=out_height, + out_width=out_width, + keypoints=keypoints, + class_onehot=classes, + class_weights=weights, + keypoint_weights=kp_weights) + num_instances, num_keypoints, _ = ( + shape_utils.combined_static_and_dynamic_shape(keypoints_absolute)) + + # A tensor of shape [num_instances, num_keypoints] with + # each element representing the type dimension for each corresponding + # keypoint: + # [[0, 1, ..., k-1], + # [0, 1, ..., k-1], + # : + # [0, 1, ..., k-1]] + keypoint_types = tf.tile( + input=tf.expand_dims(tf.range(num_keypoints), axis=0), + multiples=[num_instances, 1]) + + # A tensor of shape [num_instances, num_keypoints] with + # each element representing the sigma of the Gaussian kernel for each + # keypoint. + keypoint_std_dev = tf.tile( + input=tf.expand_dims(tf.constant(self._keypoint_std_dev), axis=0), + multiples=[num_instances, 1]) + + # If boxes is not None, then scale the standard deviation based on the + # size of the object bounding boxes similar to object center heatmap. + if boxes is not None: + boxes = box_list.BoxList(boxes) + # Convert the box coordinates to absolute output image dimension space. + boxes = box_list_ops.to_absolute_coordinates(boxes, + height // self._stride, + width // self._stride) + # Get the box height and width. Each returned tensors have the shape + # of [num_instances] + (_, _, boxes_height, + boxes_width) = boxes.get_center_coordinates_and_sizes() + + # Compute the sigma from box size. The tensor shape: [num_instances]. + sigma = _compute_std_dev_from_box_size(boxes_height, boxes_width, 0.7) + keypoint_std_dev = keypoint_std_dev * tf.stack( + [sigma] * num_keypoints, axis=1) + + # Generate the valid region mask to ignore regions with target class but + # no corresponding keypoints. + # Shape: [num_instances]. + blackout = tf.logical_and(classes[:, self._class_id] > 0, + tf.reduce_max(kp_weights, axis=1) < 1e-3) + valid_mask = ta_utils.blackout_pixel_weights_by_box_regions( + out_height, out_width, boxes.get(), blackout) + valid_mask_list.append(valid_mask) + + # Apply the Gaussian kernel to the keypoint coordinates. Returned heatmap + # has shape of [out_height, out_width, num_keypoints]. + heatmap = ta_utils.coordinates_to_heatmap( + y_grid=y_grid, + x_grid=x_grid, + y_coordinates=tf.keras.backend.flatten(keypoints_absolute[:, :, 0]), + x_coordinates=tf.keras.backend.flatten(keypoints_absolute[:, :, 1]), + sigma=tf.keras.backend.flatten(keypoint_std_dev), + channel_onehot=tf.one_hot( + tf.keras.backend.flatten(keypoint_types), depth=num_keypoints), + channel_weights=tf.keras.backend.flatten(kp_weights)) + num_instances_list.append( + tf.cast(tf.reduce_sum(kp_weights, axis=0), dtype=tf.int32)) + heatmaps.append(heatmap) + return (tf.stack(heatmaps, axis=0), tf.stack(num_instances_list, axis=0), + tf.stack(valid_mask_list, axis=0)) + + def _get_keypoint_types(self, num_instances, num_keypoints, num_neighbors): + """Gets keypoint type index tensor. + + The function prepares the tensor of keypoint indices with shape + [num_instances, num_keypoints, num_neighbors]. Each element represents the + keypoint type index for each corresponding keypoint and tiled along the 3rd + axis: + [[0, 1, ..., num_keypoints - 1], + [0, 1, ..., num_keypoints - 1], + : + [0, 1, ..., num_keypoints - 1]] + + Args: + num_instances: int, the number of instances, used to define the 1st + dimension. + num_keypoints: int, the number of keypoint types, used to define the 2nd + dimension. + num_neighbors: int, the number of neighborhood pixels to consider for each + keypoint, used to define the 3rd dimension. + + Returns: + A integer tensor of shape [num_instances, num_keypoints, num_neighbors]. + """ + keypoint_types = tf.range(num_keypoints)[tf.newaxis, :, tf.newaxis] + tiled_keypoint_types = tf.tile(keypoint_types, + multiples=[num_instances, 1, num_neighbors]) + return tiled_keypoint_types + + def assign_keypoints_offset_targets(self, + height, + width, + gt_keypoints_list, + gt_classes_list, + gt_keypoints_weights_list=None, + gt_weights_list=None): + """Returns the offsets and indices of the keypoints for location refinement. + + The returned values are used to refine the location of each keypoints in the + heatmap. The predicted values at the relevant indices can be retrieved with + the get_batch_predictions_from_indices function. + + Args: + height: int, height of input to the CenterNet model. This is used to + determine the height of the output. + width: int, width of the input to the CenterNet model. This is used to + determine the width of the output. + gt_keypoints_list: A list of tensors with shape [num_instances, + num_total_keypoints]. See class-level description for more detail. + gt_classes_list: A list of tensors with shape [num_instances, + num_classes]. See class-level description for more detail. + gt_keypoints_weights_list: A list of tensors with shape [num_instances, + num_total_keypoints] corresponding to the weight of each keypoint. + gt_weights_list: A list of float tensors with shape [num_instances]. See + class-level description for more detail. + + Returns: + batch_indices: an integer tensor of shape [num_total_instances, 3] (or + [num_total_instances, 4] if 'per_keypoint_offset' is set True) holding + the indices inside the predicted tensor which should be penalized. The + first column indicates the index along the batch dimension and the + second and third columns indicate the index along the y and x + dimensions respectively. The fourth column corresponds to the channel + dimension (if 'per_keypoint_offset' is set True). + batch_offsets: a float tensor of shape [num_total_instances, 2] holding + the expected y and x offset of each box in the output space. + batch_weights: a float tensor of shape [num_total_instances] indicating + the weight of each prediction. + Note that num_total_instances = batch_size * num_instances * + num_keypoints * num_neighbors + """ + + batch_indices = [] + batch_offsets = [] + batch_weights = [] + + if gt_keypoints_weights_list is None: + gt_keypoints_weights_list = [None] * len(gt_keypoints_list) + if gt_weights_list is None: + gt_weights_list = [None] * len(gt_classes_list) + for i, (keypoints, classes, kp_weights, weights) in enumerate( + zip(gt_keypoints_list, gt_classes_list, gt_keypoints_weights_list, + gt_weights_list)): + keypoints_absolute, kp_weights = self._preprocess_keypoints_and_weights( + out_height=height // self._stride, + out_width=width // self._stride, + keypoints=keypoints, + class_onehot=classes, + class_weights=weights, + keypoint_weights=kp_weights) + num_instances, num_keypoints, _ = ( + shape_utils.combined_static_and_dynamic_shape(keypoints_absolute)) + + # [num_instances * num_keypoints] + y_source = tf.keras.backend.flatten(keypoints_absolute[:, :, 0]) + x_source = tf.keras.backend.flatten(keypoints_absolute[:, :, 1]) + + # All keypoint coordinates and their neighbors: + # [num_instance * num_keypoints, num_neighbors] + (y_source_neighbors, x_source_neighbors, + valid_sources) = ta_utils.get_surrounding_grids(height // self._stride, + width // self._stride, + y_source, x_source, + self._peak_radius) + _, num_neighbors = shape_utils.combined_static_and_dynamic_shape( + y_source_neighbors) + + # Update the valid keypoint weights. + # [num_instance * num_keypoints, num_neighbors] + valid_keypoints = tf.cast( + valid_sources, dtype=tf.float32) * tf.stack( + [tf.keras.backend.flatten(kp_weights)] * num_neighbors, axis=-1) + + # Compute the offsets and indices of the box centers. Shape: + # offsets: [num_instances * num_keypoints, num_neighbors, 2] + # indices: [num_instances * num_keypoints, num_neighbors, 2] + offsets, indices = ta_utils.compute_floor_offsets_with_indices( + y_source=y_source_neighbors, + x_source=x_source_neighbors, + y_target=y_source, + x_target=x_source) + # Reshape to: + # offsets: [num_instances * num_keypoints * num_neighbors, 2] + # indices: [num_instances * num_keypoints * num_neighbors, 2] + offsets = tf.reshape(offsets, [-1, 2]) + indices = tf.reshape(indices, [-1, 2]) + + # Prepare the batch indices to be prepended. + batch_index = tf.fill( + [num_instances * num_keypoints * num_neighbors, 1], i) + if self._per_keypoint_offset: + tiled_keypoint_types = self._get_keypoint_types( + num_instances, num_keypoints, num_neighbors) + batch_indices.append( + tf.concat([batch_index, indices, + tf.reshape(tiled_keypoint_types, [-1, 1])], axis=1)) + else: + batch_indices.append(tf.concat([batch_index, indices], axis=1)) + batch_offsets.append(offsets) + batch_weights.append(tf.keras.backend.flatten(valid_keypoints)) + + # Concatenate the tensors in the batch in the first dimension: + # shape: [batch_size * num_instances * num_keypoints * num_neighbors, 3] or + # [batch_size * num_instances * num_keypoints * num_neighbors, 4] if + # 'per_keypoint_offset' is set to True. + batch_indices = tf.concat(batch_indices, axis=0) + # shape: [batch_size * num_instances * num_keypoints * num_neighbors] + batch_weights = tf.concat(batch_weights, axis=0) + # shape: [batch_size * num_instances * num_keypoints * num_neighbors, 2] + batch_offsets = tf.concat(batch_offsets, axis=0) + return (batch_indices, batch_offsets, batch_weights) + + def assign_joint_regression_targets(self, + height, + width, + gt_keypoints_list, + gt_classes_list, + gt_boxes_list=None, + gt_keypoints_weights_list=None, + gt_weights_list=None): + """Returns the joint regression from center grid to keypoints. + + The joint regression is used as the grouping cue from the estimated + keypoints to instance center. The offsets are the vectors from the floored + object center coordinates to the keypoint coordinates. + + Args: + height: int, height of input to the CenterNet model. This is used to + determine the height of the output. + width: int, width of the input to the CenterNet model. This is used to + determine the width of the output. + gt_keypoints_list: A list of float tensors with shape [num_instances, + num_total_keypoints]. See class-level description for more detail. + gt_classes_list: A list of float tensors with shape [num_instances, + num_classes]. See class-level description for more detail. + gt_boxes_list: A list of float tensors with shape [num_instances, 4]. See + class-level description for more detail. If provided, then the center + targets will be computed based on the center of the boxes. + gt_keypoints_weights_list: A list of float tensors with shape + [num_instances, num_total_keypoints] representing to the weight of each + keypoint. + gt_weights_list: A list of float tensors with shape [num_instances]. See + class-level description for more detail. + + Returns: + batch_indices: an integer tensor of shape [num_instances, 4] holding the + indices inside the predicted tensor which should be penalized. The + first column indicates the index along the batch dimension and the + second and third columns indicate the index along the y and x + dimensions respectively, the last dimension refers to the keypoint type + dimension. + batch_offsets: a float tensor of shape [num_instances, 2] holding the + expected y and x offset of each box in the output space. + batch_weights: a float tensor of shape [num_instances] indicating the + weight of each prediction. + Note that num_total_instances = batch_size * num_instances * num_keypoints + + Raises: + NotImplementedError: currently the object center coordinates need to be + computed from groundtruth bounding boxes. The functionality of + generating the object center coordinates from keypoints is not + implemented yet. + """ + + batch_indices = [] + batch_offsets = [] + batch_weights = [] + batch_size = len(gt_keypoints_list) + if gt_keypoints_weights_list is None: + gt_keypoints_weights_list = [None] * batch_size + if gt_boxes_list is None: + gt_boxes_list = [None] * batch_size + if gt_weights_list is None: + gt_weights_list = [None] * len(gt_classes_list) + for i, (keypoints, classes, boxes, kp_weights, weights) in enumerate( + zip(gt_keypoints_list, gt_classes_list, + gt_boxes_list, gt_keypoints_weights_list, gt_weights_list)): + keypoints_absolute, kp_weights = self._preprocess_keypoints_and_weights( + out_height=height // self._stride, + out_width=width // self._stride, + keypoints=keypoints, + class_onehot=classes, + class_weights=weights, + keypoint_weights=kp_weights) + num_instances, num_keypoints, _ = ( + shape_utils.combined_static_and_dynamic_shape(keypoints_absolute)) + + # If boxes are provided, compute the joint center from it. + if boxes is not None: + # Compute joint center from boxes. + boxes = box_list.BoxList(boxes) + boxes = box_list_ops.to_absolute_coordinates(boxes, + height // self._stride, + width // self._stride) + y_center, x_center, _, _ = boxes.get_center_coordinates_and_sizes() + else: + # TODO(yuhuic): Add the logic to generate object centers from keypoints. + raise NotImplementedError(( + 'The functionality of generating object centers from keypoints is' + ' not implemented yet. Please provide groundtruth bounding boxes.' + )) + + # Tile the yx center coordinates to be the same shape as keypoints. + y_center_tiled = tf.tile( + tf.reshape(y_center, shape=[num_instances, 1]), + multiples=[1, num_keypoints]) + x_center_tiled = tf.tile( + tf.reshape(x_center, shape=[num_instances, 1]), + multiples=[1, num_keypoints]) + # [num_instance * num_keypoints, num_neighbors] + (y_source_neighbors, x_source_neighbors, + valid_sources) = ta_utils.get_surrounding_grids( + height // self._stride, width // self._stride, + tf.keras.backend.flatten(y_center_tiled), + tf.keras.backend.flatten(x_center_tiled), self._peak_radius) + + _, num_neighbors = shape_utils.combined_static_and_dynamic_shape( + y_source_neighbors) + valid_keypoints = tf.cast( + valid_sources, dtype=tf.float32) * tf.stack( + [tf.keras.backend.flatten(kp_weights)] * num_neighbors, axis=-1) + + # Compute the offsets and indices of the box centers. Shape: + # offsets: [num_instances * num_keypoints, 2] + # indices: [num_instances * num_keypoints, 2] + (offsets, indices) = ta_utils.compute_floor_offsets_with_indices( + y_source=y_source_neighbors, + x_source=x_source_neighbors, + y_target=tf.keras.backend.flatten(keypoints_absolute[:, :, 0]), + x_target=tf.keras.backend.flatten(keypoints_absolute[:, :, 1])) + # Reshape to: + # offsets: [num_instances * num_keypoints * num_neighbors, 2] + # indices: [num_instances * num_keypoints * num_neighbors, 2] + offsets = tf.reshape(offsets, [-1, 2]) + indices = tf.reshape(indices, [-1, 2]) + + # keypoint type tensor: [num_instances, num_keypoints, num_neighbors]. + tiled_keypoint_types = self._get_keypoint_types( + num_instances, num_keypoints, num_neighbors) + + batch_index = tf.fill( + [num_instances * num_keypoints * num_neighbors, 1], i) + batch_indices.append( + tf.concat([batch_index, indices, + tf.reshape(tiled_keypoint_types, [-1, 1])], axis=1)) + batch_offsets.append(offsets) + batch_weights.append(tf.keras.backend.flatten(valid_keypoints)) + + # Concatenate the tensors in the batch in the first dimension: + # shape: [batch_size * num_instances * num_keypoints, 4] + batch_indices = tf.concat(batch_indices, axis=0) + # shape: [batch_size * num_instances * num_keypoints] + batch_weights = tf.concat(batch_weights, axis=0) + # shape: [batch_size * num_instances * num_keypoints, 2] + batch_offsets = tf.concat(batch_offsets, axis=0) + return (batch_indices, batch_offsets, batch_weights) + + +class CenterNetMaskTargetAssigner(object): + """Wrapper to compute targets for segmentation masks.""" + + def __init__(self, stride): + self._stride = stride + + def assign_segmentation_targets( + self, gt_masks_list, gt_classes_list, + mask_resize_method=ResizeMethod.BILINEAR): + """Computes the segmentation targets. + + This utility produces a semantic segmentation mask for each class, starting + with whole image instance segmentation masks. Effectively, each per-class + segmentation target is the union of all masks from that class. + + Args: + gt_masks_list: A list of float tensors with shape [num_boxes, + input_height, input_width] with values in {0, 1} representing instance + masks for each object. + gt_classes_list: A list of float tensors with shape [num_boxes, + num_classes] representing the one-hot encoded class labels for each box + in the gt_boxes_list. + mask_resize_method: A `tf.compat.v2.image.ResizeMethod`. The method to use + when resizing masks from input resolution to output resolution. + + Returns: + segmentation_targets: An int32 tensor of size [batch_size, output_height, + output_width, num_classes] representing the class of each location in + the output space. + """ + # TODO(ronnyvotel): Handle groundtruth weights. + _, num_classes = shape_utils.combined_static_and_dynamic_shape( + gt_classes_list[0]) + + _, input_height, input_width = ( + shape_utils.combined_static_and_dynamic_shape(gt_masks_list[0])) + output_height = input_height // self._stride + output_width = input_width // self._stride + + segmentation_targets_list = [] + for gt_masks, gt_classes in zip(gt_masks_list, gt_classes_list): + # Resize segmentation masks to conform to output dimensions. Use TF2 + # image resize because TF1's version is buggy: + # https://yaqs.corp.google.com/eng/q/4970450458378240 + gt_masks = tf2.image.resize( + gt_masks[:, :, :, tf.newaxis], + size=(output_height, output_width), + method=mask_resize_method) + gt_classes_reshaped = tf.reshape(gt_classes, [-1, 1, 1, num_classes]) + # Shape: [h, w, num_classes]. + segmentations_for_image = tf.reduce_max( + gt_masks * gt_classes_reshaped, axis=0) + segmentation_targets_list.append(segmentations_for_image) + + segmentation_target = tf.stack(segmentation_targets_list, axis=0) + return segmentation_target diff --git a/research/object_detection/core/target_assigner_test.py b/research/object_detection/core/target_assigner_test.py index fb0a63bd1..5a0ca43e5 100644 --- a/research/object_detection/core/target_assigner_test.py +++ b/research/object_detection/core/target_assigner_test.py @@ -24,9 +24,9 @@ from object_detection.core import region_similarity_calculator from object_detection.core import standard_fields as fields from object_detection.core import target_assigner as targetassigner from object_detection.matchers import argmax_matcher -from object_detection.matchers import bipartite_matcher from object_detection.utils import np_box_ops from object_detection.utils import test_case +from object_detection.utils import tf_version class TargetAssignerTest(test_case.TestCase): @@ -439,7 +439,7 @@ class TargetAssignerTest(test_case.TestCase): def test_raises_error_on_incompatible_groundtruth_boxes_and_labels(self): similarity_calc = region_similarity_calculator.NegSqDistSimilarity() - matcher = bipartite_matcher.GreedyBipartiteMatcher() + matcher = argmax_matcher.ArgMaxMatcher(0.5) box_coder = mean_stddev_box_coder.MeanStddevBoxCoder() unmatched_class_label = tf.constant([1, 0, 0, 0, 0, 0, 0], tf.float32) target_assigner = targetassigner.TargetAssigner( @@ -469,7 +469,7 @@ class TargetAssignerTest(test_case.TestCase): def test_raises_error_on_invalid_groundtruth_labels(self): similarity_calc = region_similarity_calculator.NegSqDistSimilarity() - matcher = bipartite_matcher.GreedyBipartiteMatcher() + matcher = argmax_matcher.ArgMaxMatcher(0.5) box_coder = mean_stddev_box_coder.MeanStddevBoxCoder(stddev=1.0) unmatched_class_label = tf.constant([[0, 0], [0, 0], [0, 0]], tf.float32) target_assigner = targetassigner.TargetAssigner( @@ -1191,7 +1191,7 @@ class BatchTargetAssignConfidencesTest(test_case.TestCase): ]) -class CreateTargetAssignerTest(tf.test.TestCase): +class CreateTargetAssignerTest(test_case.TestCase): def test_create_target_assigner(self): """Tests that named constructor gives working target assigners. @@ -1202,9 +1202,10 @@ class CreateTargetAssignerTest(tf.test.TestCase): groundtruth = box_list.BoxList(tf.constant(corners)) priors = box_list.BoxList(tf.constant(corners)) - multibox_ta = (targetassigner - .create_target_assigner('Multibox', stage='proposal')) - multibox_ta.assign(priors, groundtruth) + if tf_version.is_tf1(): + multibox_ta = (targetassigner + .create_target_assigner('Multibox', stage='proposal')) + multibox_ta.assign(priors, groundtruth) # No tests on output, as that may vary arbitrarily as new target assigners # are added. As long as it is constructed correctly and runs without errors, # tests on the individual assigners cover correctness of the assignments. @@ -1229,6 +1230,681 @@ class CreateTargetAssignerTest(tf.test.TestCase): stage='invalid_stage') +def _array_argmax(array): + return np.unravel_index(np.argmax(array), array.shape) + + +class CenterNetCenterHeatmapTargetAssignerTest(test_case.TestCase): + + def setUp(self): + super(CenterNetCenterHeatmapTargetAssignerTest, self).setUp() + + self._box_center = [0.0, 0.0, 1.0, 1.0] + self._box_center_small = [0.25, 0.25, 0.75, 0.75] + self._box_lower_left = [0.5, 0.0, 1.0, 0.5] + self._box_center_offset = [0.1, 0.05, 1.0, 1.0] + self._box_odd_coordinates = [0.1625, 0.2125, 0.5625, 0.9625] + + def test_center_location(self): + """Test that the centers are at the correct location.""" + def graph_fn(): + box_batch = [tf.constant([self._box_center, self._box_lower_left])] + classes = [ + tf.one_hot([0, 1], depth=4), + ] + assigner = targetassigner.CenterNetCenterHeatmapTargetAssigner(4) + targets = assigner.assign_center_targets_from_boxes(80, 80, box_batch, + classes) + return targets + targets = self.execute(graph_fn, []) + self.assertEqual((10, 10), _array_argmax(targets[0, :, :, 0])) + self.assertAlmostEqual(1.0, targets[0, 10, 10, 0]) + self.assertEqual((15, 5), _array_argmax(targets[0, :, :, 1])) + self.assertAlmostEqual(1.0, targets[0, 15, 5, 1]) + + def test_center_batch_shape(self): + """Test that the shape of the target for a batch is correct.""" + def graph_fn(): + box_batch = [ + tf.constant([self._box_center, self._box_lower_left]), + tf.constant([self._box_center]), + tf.constant([self._box_center_small]), + ] + classes = [ + tf.one_hot([0, 1], depth=4), + tf.one_hot([2], depth=4), + tf.one_hot([3], depth=4), + ] + assigner = targetassigner.CenterNetCenterHeatmapTargetAssigner(4) + targets = assigner.assign_center_targets_from_boxes(80, 80, box_batch, + classes) + return targets + targets = self.execute(graph_fn, []) + self.assertEqual((3, 20, 20, 4), targets.shape) + + def test_center_overlap_maximum(self): + """Test that when boxes overlap we, are computing the maximum.""" + def graph_fn(): + box_batch = [ + tf.constant([ + self._box_center, self._box_center_offset, self._box_center, + self._box_center_offset + ]) + ] + classes = [ + tf.one_hot([0, 0, 1, 2], depth=4), + ] + + assigner = targetassigner.CenterNetCenterHeatmapTargetAssigner(4) + targets = assigner.assign_center_targets_from_boxes(80, 80, box_batch, + classes) + return targets + targets = self.execute(graph_fn, []) + class0_targets = targets[0, :, :, 0] + class1_targets = targets[0, :, :, 1] + class2_targets = targets[0, :, :, 2] + np.testing.assert_allclose(class0_targets, + np.maximum(class1_targets, class2_targets)) + + def test_size_blur(self): + """Test that the heatmap of a larger box is more blurred.""" + def graph_fn(): + box_batch = [tf.constant([self._box_center, self._box_center_small])] + + classes = [ + tf.one_hot([0, 1], depth=4), + ] + assigner = targetassigner.CenterNetCenterHeatmapTargetAssigner(4) + targets = assigner.assign_center_targets_from_boxes(80, 80, box_batch, + classes) + return targets + targets = self.execute(graph_fn, []) + self.assertGreater( + np.count_nonzero(targets[:, :, :, 0]), + np.count_nonzero(targets[:, :, :, 1])) + + def test_weights(self): + """Test that the weights correctly ignore ground truth.""" + def graph1_fn(): + box_batch = [ + tf.constant([self._box_center, self._box_lower_left]), + tf.constant([self._box_center]), + tf.constant([self._box_center_small]), + ] + classes = [ + tf.one_hot([0, 1], depth=4), + tf.one_hot([2], depth=4), + tf.one_hot([3], depth=4), + ] + assigner = targetassigner.CenterNetCenterHeatmapTargetAssigner(4) + targets = assigner.assign_center_targets_from_boxes(80, 80, box_batch, + classes) + return targets + + targets = self.execute(graph1_fn, []) + self.assertAlmostEqual(1.0, targets[0, :, :, 0].max()) + self.assertAlmostEqual(1.0, targets[0, :, :, 1].max()) + self.assertAlmostEqual(1.0, targets[1, :, :, 2].max()) + self.assertAlmostEqual(1.0, targets[2, :, :, 3].max()) + self.assertAlmostEqual(0.0, targets[0, :, :, [2, 3]].max()) + self.assertAlmostEqual(0.0, targets[1, :, :, [0, 1, 3]].max()) + self.assertAlmostEqual(0.0, targets[2, :, :, :3].max()) + + def graph2_fn(): + weights = [ + tf.constant([0., 1.]), + tf.constant([1.]), + tf.constant([1.]), + ] + box_batch = [ + tf.constant([self._box_center, self._box_lower_left]), + tf.constant([self._box_center]), + tf.constant([self._box_center_small]), + ] + classes = [ + tf.one_hot([0, 1], depth=4), + tf.one_hot([2], depth=4), + tf.one_hot([3], depth=4), + ] + assigner = targetassigner.CenterNetCenterHeatmapTargetAssigner(4) + targets = assigner.assign_center_targets_from_boxes(80, 80, box_batch, + classes, + weights) + return targets + targets = self.execute(graph2_fn, []) + self.assertAlmostEqual(1.0, targets[0, :, :, 1].max()) + self.assertAlmostEqual(1.0, targets[1, :, :, 2].max()) + self.assertAlmostEqual(1.0, targets[2, :, :, 3].max()) + self.assertAlmostEqual(0.0, targets[0, :, :, [0, 2, 3]].max()) + self.assertAlmostEqual(0.0, targets[1, :, :, [0, 1, 3]].max()) + self.assertAlmostEqual(0.0, targets[2, :, :, :3].max()) + + def test_low_overlap(self): + def graph1_fn(): + box_batch = [tf.constant([self._box_center])] + classes = [ + tf.one_hot([0], depth=2), + ] + assigner = targetassigner.CenterNetCenterHeatmapTargetAssigner( + 4, min_overlap=0.1) + targets_low_overlap = assigner.assign_center_targets_from_boxes( + 80, 80, box_batch, classes) + return targets_low_overlap + targets_low_overlap = self.execute(graph1_fn, []) + self.assertLess(1, np.count_nonzero(targets_low_overlap)) + + def graph2_fn(): + box_batch = [tf.constant([self._box_center])] + classes = [ + tf.one_hot([0], depth=2), + ] + assigner = targetassigner.CenterNetCenterHeatmapTargetAssigner( + 4, min_overlap=0.6) + targets_medium_overlap = assigner.assign_center_targets_from_boxes( + 80, 80, box_batch, classes) + return targets_medium_overlap + targets_medium_overlap = self.execute(graph2_fn, []) + self.assertLess(1, np.count_nonzero(targets_medium_overlap)) + + def graph3_fn(): + box_batch = [tf.constant([self._box_center])] + classes = [ + tf.one_hot([0], depth=2), + ] + assigner = targetassigner.CenterNetCenterHeatmapTargetAssigner( + 4, min_overlap=0.99) + targets_high_overlap = assigner.assign_center_targets_from_boxes( + 80, 80, box_batch, classes) + return targets_high_overlap + + targets_high_overlap = self.execute(graph3_fn, []) + self.assertTrue(np.all(targets_low_overlap >= targets_medium_overlap)) + self.assertTrue(np.all(targets_medium_overlap >= targets_high_overlap)) + + def test_empty_box_list(self): + """Test that an empty box list gives an all 0 heatmap.""" + def graph_fn(): + box_batch = [ + tf.zeros((0, 4), dtype=tf.float32), + ] + + classes = [ + tf.zeros((0, 5), dtype=tf.float32), + ] + + assigner = targetassigner.CenterNetCenterHeatmapTargetAssigner( + 4, min_overlap=0.1) + targets = assigner.assign_center_targets_from_boxes( + 80, 80, box_batch, classes) + return targets + targets = self.execute(graph_fn, []) + np.testing.assert_allclose(targets, 0.) + + +class CenterNetBoxTargetAssignerTest(test_case.TestCase): + + def setUp(self): + super(CenterNetBoxTargetAssignerTest, self).setUp() + self._box_center = [0.0, 0.0, 1.0, 1.0] + self._box_center_small = [0.25, 0.25, 0.75, 0.75] + self._box_lower_left = [0.5, 0.0, 1.0, 0.5] + self._box_center_offset = [0.1, 0.05, 1.0, 1.0] + self._box_odd_coordinates = [0.1625, 0.2125, 0.5625, 0.9625] + + def test_max_distance_for_overlap(self): + """Test that the distance ensures the IoU with random boxes.""" + + # TODO(vighneshb) remove this after the `_smallest_positive_root` + # function if fixed. + self.skipTest(('Skipping test because we are using an incorrect version of' + 'the `max_distance_for_overlap` function to reproduce' + ' results.')) + + rng = np.random.RandomState(0) + n_samples = 100 + + width = rng.uniform(1, 100, size=n_samples) + height = rng.uniform(1, 100, size=n_samples) + min_iou = rng.uniform(0.1, 1.0, size=n_samples) + + def graph_fn(): + max_dist = targetassigner.max_distance_for_overlap(height, width, min_iou) + return max_dist + max_dist = self.execute(graph_fn, []) + xmin1 = np.zeros(n_samples) + ymin1 = np.zeros(n_samples) + xmax1 = np.zeros(n_samples) + width + ymax1 = np.zeros(n_samples) + height + + xmin2 = max_dist * np.cos(rng.uniform(0, 2 * np.pi)) + ymin2 = max_dist * np.sin(rng.uniform(0, 2 * np.pi)) + xmax2 = width + max_dist * np.cos(rng.uniform(0, 2 * np.pi)) + ymax2 = height + max_dist * np.sin(rng.uniform(0, 2 * np.pi)) + + boxes1 = np.vstack([ymin1, xmin1, ymax1, xmax1]).T + boxes2 = np.vstack([ymin2, xmin2, ymax2, xmax2]).T + + iou = np.diag(np_box_ops.iou(boxes1, boxes2)) + + self.assertTrue(np.all(iou >= min_iou)) + + def test_max_distance_for_overlap_centernet(self): + """Test the version of the function used in the CenterNet paper.""" + + def graph_fn(): + distance = targetassigner.max_distance_for_overlap(10, 5, 0.5) + return distance + distance = self.execute(graph_fn, []) + self.assertAlmostEqual(2.807764064, distance) + + def test_assign_size_and_offset_targets(self): + """Test the assign_size_and_offset_targets function.""" + def graph_fn(): + box_batch = [ + tf.constant([self._box_center, self._box_lower_left]), + tf.constant([self._box_center_offset]), + tf.constant([self._box_center_small, self._box_odd_coordinates]), + ] + + assigner = targetassigner.CenterNetBoxTargetAssigner(4) + indices, hw, yx_offset, weights = assigner.assign_size_and_offset_targets( + 80, 80, box_batch) + return indices, hw, yx_offset, weights + indices, hw, yx_offset, weights = self.execute(graph_fn, []) + self.assertEqual(indices.shape, (5, 3)) + self.assertEqual(hw.shape, (5, 2)) + self.assertEqual(yx_offset.shape, (5, 2)) + self.assertEqual(weights.shape, (5,)) + np.testing.assert_array_equal( + indices, + [[0, 10, 10], [0, 15, 5], [1, 11, 10], [2, 10, 10], [2, 7, 11]]) + np.testing.assert_array_equal( + hw, [[20, 20], [10, 10], [18, 19], [10, 10], [8, 15]]) + np.testing.assert_array_equal( + yx_offset, [[0, 0], [0, 0], [0, 0.5], [0, 0], [0.25, 0.75]]) + np.testing.assert_array_equal(weights, 1) + + def test_assign_size_and_offset_targets_weights(self): + """Test the assign_size_and_offset_targets function with box weights.""" + def graph_fn(): + box_batch = [ + tf.constant([self._box_center, self._box_lower_left]), + tf.constant([self._box_lower_left, self._box_center_small]), + tf.constant([self._box_center_small, self._box_odd_coordinates]), + ] + + cn_assigner = targetassigner.CenterNetBoxTargetAssigner(4) + weights_batch = [ + tf.constant([0.0, 1.0]), + tf.constant([1.0, 1.0]), + tf.constant([0.0, 0.0]) + ] + indices, hw, yx_offset, weights = cn_assigner.assign_size_and_offset_targets( + 80, 80, box_batch, weights_batch) + return indices, hw, yx_offset, weights + indices, hw, yx_offset, weights = self.execute(graph_fn, []) + self.assertEqual(indices.shape, (6, 3)) + self.assertEqual(hw.shape, (6, 2)) + self.assertEqual(yx_offset.shape, (6, 2)) + self.assertEqual(weights.shape, (6,)) + np.testing.assert_array_equal(indices, + [[0, 10, 10], [0, 15, 5], [1, 15, 5], + [1, 10, 10], [2, 10, 10], [2, 7, 11]]) + np.testing.assert_array_equal( + hw, [[20, 20], [10, 10], [10, 10], [10, 10], [10, 10], [8, 15]]) + np.testing.assert_array_equal( + yx_offset, [[0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0.25, 0.75]]) + np.testing.assert_array_equal(weights, [0, 1, 1, 1, 0, 0]) + + def test_get_batch_predictions_from_indices(self): + """Test the get_batch_predictions_from_indices function. + + This test verifies that the indices returned by + assign_size_and_offset_targets function work as expected with a predicted + tensor. + + """ + def graph_fn(): + box_batch = [ + tf.constant([self._box_center, self._box_lower_left]), + tf.constant([self._box_center_small, self._box_odd_coordinates]), + ] + + pred_array = np.ones((2, 40, 20, 2), dtype=np.int32) * -1000 + pred_array[0, 20, 10] = [1, 2] + pred_array[0, 30, 5] = [3, 4] + pred_array[1, 20, 10] = [5, 6] + pred_array[1, 14, 11] = [7, 8] + + pred_tensor = tf.constant(pred_array) + + cn_assigner = targetassigner.CenterNetBoxTargetAssigner(4) + indices, _, _, _ = cn_assigner.assign_size_and_offset_targets( + 160, 80, box_batch) + + preds = targetassigner.get_batch_predictions_from_indices( + pred_tensor, indices) + return preds + preds = self.execute(graph_fn, []) + np.testing.assert_array_equal(preds, [[1, 2], [3, 4], [5, 6], [7, 8]]) + + +class CenterNetKeypointTargetAssignerTest(test_case.TestCase): + + def test_keypoint_heatmap_targets(self): + def graph_fn(): + gt_classes_list = [ + tf.one_hot([0, 1, 0, 1], depth=4), + ] + coordinates = tf.expand_dims( + tf.constant( + np.array([[0.1, 0.2, 0.3, 0.4, 0.5], + [float('nan'), 0.7, float('nan'), 0.9, 1.0], + [0.4, 0.1, 0.4, 0.2, 0.1], + [float('nan'), 0.1, 0.5, 0.7, 0.6]]), + dtype=tf.float32), + axis=2) + gt_keypoints_list = [tf.concat([coordinates, coordinates], axis=2)] + gt_boxes_list = [ + tf.constant( + np.array([[0.0, 0.0, 0.3, 0.3], + [0.0, 0.0, 0.5, 0.5], + [0.0, 0.0, 0.5, 0.5], + [0.0, 0.0, 1.0, 1.0]]), + dtype=tf.float32) + ] + + cn_assigner = targetassigner.CenterNetKeypointTargetAssigner( + stride=4, + class_id=1, + keypoint_indices=[0, 2]) + (targets, num_instances_batch, + valid_mask) = cn_assigner.assign_keypoint_heatmap_targets( + 120, + 80, + gt_keypoints_list, + gt_classes_list, + gt_boxes_list=gt_boxes_list) + return targets, num_instances_batch, valid_mask + + targets, num_instances_batch, valid_mask = self.execute(graph_fn, []) + # keypoint (0.5, 0.5) is selected. The peak is expected to appear at the + # center of the image. + self.assertEqual((15, 10), _array_argmax(targets[0, :, :, 1])) + self.assertAlmostEqual(1.0, targets[0, 15, 10, 1]) + # No peak for the first class since NaN is selected. + self.assertAlmostEqual(0.0, targets[0, 15, 10, 0]) + # Verify the output heatmap shape. + self.assertAllEqual([1, 30, 20, 2], targets.shape) + # Verify the number of instances is correct. + np.testing.assert_array_almost_equal([[0, 1]], + num_instances_batch) + # When calling the function, we specify the class id to be 1 (1th and 3rd) + # instance and the keypoint indices to be [0, 2], meaning that the 1st + # instance is the target class with no valid keypoints in it. As a result, + # the region of the 1st instance boxing box should be blacked out + # (0.0, 0.0, 0.5, 0.5), transfering to (0, 0, 15, 10) in absolute output + # space. + self.assertAlmostEqual(np.sum(valid_mask[:, 0:16, 0:11]), 0.0) + # All other values are 1.0 so the sum is: 30 * 20 - 16 * 11 = 424. + self.assertAlmostEqual(np.sum(valid_mask), 424.0) + + def test_assign_keypoints_offset_targets(self): + def graph_fn(): + gt_classes_list = [ + tf.one_hot([0, 1, 0, 1], depth=4), + ] + coordinates = tf.expand_dims( + tf.constant( + np.array([[0.1, 0.2, 0.3, 0.4, 0.5], + [float('nan'), 0.7, float('nan'), 0.9, 0.4], + [0.4, 0.1, 0.4, 0.2, 0.0], + [float('nan'), 0.0, 0.12, 0.7, 0.4]]), + dtype=tf.float32), + axis=2) + gt_keypoints_list = [tf.concat([coordinates, coordinates], axis=2)] + + cn_assigner = targetassigner.CenterNetKeypointTargetAssigner( + stride=4, + class_id=1, + keypoint_indices=[0, 2]) + (indices, offsets, weights) = cn_assigner.assign_keypoints_offset_targets( + height=120, + width=80, + gt_keypoints_list=gt_keypoints_list, + gt_classes_list=gt_classes_list) + return indices, weights, offsets + indices, weights, offsets = self.execute(graph_fn, []) + # Only the last element has positive weight. + np.testing.assert_array_almost_equal( + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0], weights) + # Validate the last element's indices and offsets. + np.testing.assert_array_equal([0, 3, 2], indices[7, :]) + np.testing.assert_array_almost_equal([0.6, 0.4], offsets[7, :]) + + def test_assign_keypoints_offset_targets_radius(self): + def graph_fn(): + gt_classes_list = [ + tf.one_hot([0, 1, 0, 1], depth=4), + ] + coordinates = tf.expand_dims( + tf.constant( + np.array([[0.1, 0.2, 0.3, 0.4, 0.5], + [float('nan'), 0.7, float('nan'), 0.9, 0.4], + [0.4, 0.1, 0.4, 0.2, 0.0], + [float('nan'), 0.0, 0.12, 0.7, 0.4]]), + dtype=tf.float32), + axis=2) + gt_keypoints_list = [tf.concat([coordinates, coordinates], axis=2)] + + cn_assigner = targetassigner.CenterNetKeypointTargetAssigner( + stride=4, + class_id=1, + keypoint_indices=[0, 2], + peak_radius=1, + per_keypoint_offset=True) + (indices, offsets, weights) = cn_assigner.assign_keypoints_offset_targets( + height=120, + width=80, + gt_keypoints_list=gt_keypoints_list, + gt_classes_list=gt_classes_list) + return indices, weights, offsets + indices, weights, offsets = self.execute(graph_fn, []) + + # There are total 8 * 5 (neighbors) = 40 targets. + self.assertAllEqual(indices.shape, [40, 4]) + self.assertAllEqual(offsets.shape, [40, 2]) + self.assertAllEqual(weights.shape, [40]) + # Only the last 5 (radius 1 generates 5 valid points) element has positive + # weight. + np.testing.assert_array_almost_equal([ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0 + ], weights) + # Validate the last element's (with neighbors) indices and offsets. + np.testing.assert_array_equal([0, 2, 2, 1], indices[35, :]) + np.testing.assert_array_equal([0, 3, 1, 1], indices[36, :]) + np.testing.assert_array_equal([0, 3, 2, 1], indices[37, :]) + np.testing.assert_array_equal([0, 3, 3, 1], indices[38, :]) + np.testing.assert_array_equal([0, 4, 2, 1], indices[39, :]) + np.testing.assert_array_almost_equal([1.6, 0.4], offsets[35, :]) + np.testing.assert_array_almost_equal([0.6, 1.4], offsets[36, :]) + np.testing.assert_array_almost_equal([0.6, 0.4], offsets[37, :]) + np.testing.assert_array_almost_equal([0.6, -0.6], offsets[38, :]) + np.testing.assert_array_almost_equal([-0.4, 0.4], offsets[39, :]) + + def test_assign_joint_regression_targets(self): + def graph_fn(): + gt_boxes_list = [ + tf.constant( + np.array([[0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 1.0, 1.0]]), + dtype=tf.float32) + ] + gt_classes_list = [ + tf.one_hot([0, 1, 0, 1], depth=4), + ] + coordinates = tf.expand_dims( + tf.constant( + np.array([[0.1, 0.2, 0.3, 0.4, 0.5], + [float('nan'), 0.7, float('nan'), 0.9, 0.4], + [0.4, 0.1, 0.4, 0.2, 0.0], + [float('nan'), 0.0, 0.12, 0.7, 0.4]]), + dtype=tf.float32), + axis=2) + gt_keypoints_list = [tf.concat([coordinates, coordinates], axis=2)] + + cn_assigner = targetassigner.CenterNetKeypointTargetAssigner( + stride=4, + class_id=1, + keypoint_indices=[0, 2]) + (indices, offsets, weights) = cn_assigner.assign_joint_regression_targets( + height=120, + width=80, + gt_keypoints_list=gt_keypoints_list, + gt_classes_list=gt_classes_list, + gt_boxes_list=gt_boxes_list) + return indices, offsets, weights + indices, offsets, weights = self.execute(graph_fn, []) + np.testing.assert_array_almost_equal( + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0], weights) + np.testing.assert_array_equal([0, 15, 10, 1], indices[7, :]) + np.testing.assert_array_almost_equal([-11.4, -7.6], offsets[7, :]) + + def test_assign_joint_regression_targets_radius(self): + def graph_fn(): + gt_boxes_list = [ + tf.constant( + np.array([[0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 1.0, 1.0]]), + dtype=tf.float32) + ] + gt_classes_list = [ + tf.one_hot([0, 1, 0, 1], depth=4), + ] + coordinates = tf.expand_dims( + tf.constant( + np.array([[0.1, 0.2, 0.3, 0.4, 0.5], + [float('nan'), 0.7, float('nan'), 0.9, 0.4], + [0.4, 0.1, 0.4, 0.2, 0.0], + [float('nan'), 0.0, 0.12, 0.7, 0.4]]), + dtype=tf.float32), + axis=2) + gt_keypoints_list = [tf.concat([coordinates, coordinates], axis=2)] + + cn_assigner = targetassigner.CenterNetKeypointTargetAssigner( + stride=4, + class_id=1, + keypoint_indices=[0, 2], + peak_radius=1) + (indices, offsets, weights) = cn_assigner.assign_joint_regression_targets( + height=120, + width=80, + gt_keypoints_list=gt_keypoints_list, + gt_classes_list=gt_classes_list, + gt_boxes_list=gt_boxes_list) + return indices, offsets, weights + indices, offsets, weights = self.execute(graph_fn, []) + + # There are total 8 * 5 (neighbors) = 40 targets. + self.assertAllEqual(indices.shape, [40, 4]) + self.assertAllEqual(offsets.shape, [40, 2]) + self.assertAllEqual(weights.shape, [40]) + # Only the last 5 (radius 1 generates 5 valid points) element has positive + # weight. + np.testing.assert_array_almost_equal([ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0 + ], weights) + # Test the values of the indices and offsets of the last 5 elements. + np.testing.assert_array_equal([0, 14, 10, 1], indices[35, :]) + np.testing.assert_array_equal([0, 15, 9, 1], indices[36, :]) + np.testing.assert_array_equal([0, 15, 10, 1], indices[37, :]) + np.testing.assert_array_equal([0, 15, 11, 1], indices[38, :]) + np.testing.assert_array_equal([0, 16, 10, 1], indices[39, :]) + np.testing.assert_array_almost_equal([-10.4, -7.6], offsets[35, :]) + np.testing.assert_array_almost_equal([-11.4, -6.6], offsets[36, :]) + np.testing.assert_array_almost_equal([-11.4, -7.6], offsets[37, :]) + np.testing.assert_array_almost_equal([-11.4, -8.6], offsets[38, :]) + np.testing.assert_array_almost_equal([-12.4, -7.6], offsets[39, :]) + + +class CenterNetMaskTargetAssignerTest(test_case.TestCase): + + def test_assign_segmentation_targets(self): + def graph_fn(): + gt_masks_list = [ + # Example 0. + tf.constant([ + [ + [1., 0., 0., 0.], + [1., 1., 0., 0.], + [0., 0., 0., 0.], + [0., 0., 0., 0.], + ], + [ + [0., 0., 0., 0.], + [0., 0., 0., 1.], + [0., 0., 0., 0.], + [0., 0., 0., 0.], + ], + [ + [1., 1., 0., 0.], + [1., 1., 0., 0.], + [0., 0., 1., 1.], + [0., 0., 1., 1.], + ] + ], dtype=tf.float32), + # Example 1. + tf.constant([ + [ + [1., 1., 0., 1.], + [1., 1., 1., 1.], + [0., 0., 1., 1.], + [0., 0., 0., 1.], + ], + [ + [0., 0., 0., 0.], + [0., 0., 0., 0.], + [1., 1., 0., 0.], + [1., 1., 0., 0.], + ], + ], dtype=tf.float32), + ] + gt_classes_list = [ + # Example 0. + tf.constant([[1., 0., 0.], + [0., 1., 0.], + [1., 0., 0.]], dtype=tf.float32), + # Example 1. + tf.constant([[0., 1., 0.], + [0., 1., 0.]], dtype=tf.float32) + ] + cn_assigner = targetassigner.CenterNetMaskTargetAssigner(stride=2) + segmentation_target = cn_assigner.assign_segmentation_targets( + gt_masks_list=gt_masks_list, + gt_classes_list=gt_classes_list, + mask_resize_method=targetassigner.ResizeMethod.NEAREST_NEIGHBOR) + return segmentation_target + segmentation_target = self.execute(graph_fn, []) + + expected_seg_target = np.array([ + # Example 0 [[class 0, class 1], [background, class 0]] + [[[1, 0, 0], [0, 1, 0]], + [[0, 0, 0], [1, 0, 0]]], + # Example 1 [[class 1, class 1], [class 1, class 1]] + [[[0, 1, 0], [0, 1, 0]], + [[0, 1, 0], [0, 1, 0]]], + ], dtype=np.float32) + np.testing.assert_array_almost_equal( + expected_seg_target, segmentation_target) + if __name__ == '__main__': tf.enable_v2_behavior() diff --git a/research/object_detection/dataset_tools/context_rcnn/__init__.py b/research/object_detection/dataset_tools/context_rcnn/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/research/object_detection/dataset_tools/context_rcnn/add_context_to_examples.py b/research/object_detection/dataset_tools/context_rcnn/add_context_to_examples.py new file mode 100644 index 000000000..9c0538798 --- /dev/null +++ b/research/object_detection/dataset_tools/context_rcnn/add_context_to_examples.py @@ -0,0 +1,845 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +r"""A Beam job to add contextual memory banks to tf.Examples. + +This tool groups images containing bounding boxes and embedded context features +by a key, either `image/location` or `image/seq_id`, and time horizon, +then uses these groups to build up a contextual memory bank from the embedded +context features from each image in the group and adds that context to the +output tf.Examples for each image in the group. + +Steps to generate a dataset with context from one with bounding boxes and +embedded context features: +1. Use object/detection/export_inference_graph.py to get a `saved_model` for + inference. The input node must accept a tf.Example proto. +2. Run this tool with `saved_model` from step 1 and a TFRecord of tf.Example + protos containing images, bounding boxes, and embedded context features. + The context features can be added to tf.Examples using + generate_embedding_data.py. + +Example Usage: +-------------- +python add_context_to_examples.py \ + --input_tfrecord path/to/input_tfrecords* \ + --output_tfrecord path/to/output_tfrecords \ + --sequence_key image/location \ + --time_horizon month + +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import copy +import datetime +import io +import itertools +import json +import os + +from absl import app +from absl import flags +import apache_beam as beam +import numpy as np +import PIL.Image +import six +import tensorflow as tf + +from apache_beam import runners + + +flags.DEFINE_string('input_tfrecord', None, 'TFRecord containing images in ' + 'tf.Example format for object detection, with bounding' + 'boxes and contextual feature embeddings.') +flags.DEFINE_string('output_tfrecord', None, + 'TFRecord containing images in tf.Example format, with ' + 'added contextual memory banks.') +flags.DEFINE_string('sequence_key', None, 'Key to use when grouping sequences: ' + 'so far supports `image/seq_id` and `image/location`.') +flags.DEFINE_string('time_horizon', None, 'What time horizon to use when ' + 'splitting the data, if any. Options are: `year`, `month`,' + ' `week`, `day `, `hour`, `minute`, `None`.') +flags.DEFINE_integer('subsample_context_features_rate', 0, 'Whether to ' + 'subsample the context_features, and if so how many to ' + 'sample. If the rate is set to X, it will sample context ' + 'from 1 out of every X images. Default is sampling from ' + 'every image, which is X=0.') +flags.DEFINE_boolean('reduce_image_size', True, 'downsamples images to' + 'have longest side max_image_dimension, maintaining aspect' + ' ratio') +flags.DEFINE_integer('max_image_dimension', 1024, 'sets max image dimension') +flags.DEFINE_boolean('add_context_features', True, 'adds a memory bank of' + 'embeddings to each clip') +flags.DEFINE_boolean('sorted_image_ids', True, 'whether the image source_ids ' + 'are sortable to deal with date_captured tie-breaks') +flags.DEFINE_string('image_ids_to_keep', 'All', 'path to .json list of image' + 'ids to keep, used for ground truth eval creation') +flags.DEFINE_boolean('keep_context_features_image_id_list', False, 'Whether or ' + 'not to keep a list of the image_ids corresponding to the ' + 'memory bank') +flags.DEFINE_boolean('keep_only_positives', False, 'Whether or not to ' + 'keep only positive boxes based on score') +flags.DEFINE_boolean('keep_only_positives_gt', False, 'Whether or not to ' + 'keep only positive boxes based on gt class') +flags.DEFINE_float('context_features_score_threshold', 0.7, 'What score ' + 'threshold to use for boxes in context_features') +flags.DEFINE_integer('max_num_elements_in_context_features', 2000, 'Sets max ' + 'num elements per memory bank') +flags.DEFINE_integer('num_shards', 0, 'Number of output shards.') +flags.DEFINE_string('output_type', 'tf_sequence_example', 'Output type, one of ' + '`tf_example`, `tf_sequence_example`') +flags.DEFINE_integer('max_clip_length', None, 'Max length for sequence ' + 'example outputs.') + +FLAGS = flags.FLAGS + +DEFAULT_FEATURE_LENGTH = 2057 + + +class ReKeyDataFn(beam.DoFn): + """Re-keys tfrecords by sequence_key. + + This Beam DoFn re-keys the tfrecords by a user-defined sequence_key + """ + + def __init__(self, sequence_key, time_horizon, + reduce_image_size, max_image_dimension): + """Initialization function. + + Args: + sequence_key: A feature name to use as a key for grouping sequences. + Must point to a key of type bytes_list + time_horizon: What length of time to use to partition the data when + building the memory banks. Options: `year`, `month`, `week`, `day `, + `hour`, `minute`, None + reduce_image_size: Whether to reduce the sizes of the stored images. + max_image_dimension: maximum dimension of reduced images + """ + self._sequence_key = sequence_key + if time_horizon is None or time_horizon in {'year', 'month', 'week', 'day', + 'hour', 'minute'}: + self._time_horizon = time_horizon + else: + raise ValueError('Time horizon not supported.') + self._reduce_image_size = reduce_image_size + self._max_image_dimension = max_image_dimension + self._session = None + self._num_examples_processed = beam.metrics.Metrics.counter( + 'data_rekey', 'num_tf_examples_processed') + self._num_images_resized = beam.metrics.Metrics.counter( + 'data_rekey', 'num_images_resized') + self._num_images_read = beam.metrics.Metrics.counter( + 'data_rekey', 'num_images_read') + self._num_images_found = beam.metrics.Metrics.counter( + 'data_rekey', 'num_images_read') + self._num_got_shape = beam.metrics.Metrics.counter( + 'data_rekey', 'num_images_got_shape') + self._num_images_found_size = beam.metrics.Metrics.counter( + 'data_rekey', 'num_images_found_size') + self._num_examples_cleared = beam.metrics.Metrics.counter( + 'data_rekey', 'num_examples_cleared') + self._num_examples_updated = beam.metrics.Metrics.counter( + 'data_rekey', 'num_examples_updated') + + def process(self, tfrecord_entry): + return self._rekey_examples(tfrecord_entry) + + def _largest_size_at_most(self, height, width, largest_side): + """Computes new shape with the largest side equal to `largest_side`. + + Args: + height: an int indicating the current height. + width: an int indicating the current width. + largest_side: A python integer indicating the size of + the largest side after resize. + Returns: + new_height: an int indicating the new height. + new_width: an int indicating the new width. + """ + + x_scale = float(largest_side) / float(width) + y_scale = float(largest_side) / float(height) + scale = min(x_scale, y_scale) + + new_width = int(width * scale) + new_height = int(height * scale) + + return new_height, new_width + + def _resize_image(self, input_example): + """Resizes the image within input_example and updates the height and width. + + Args: + input_example: A tf.Example that we want to update to contain a resized + image. + Returns: + input_example: Updated tf.Example. + """ + + original_image = copy.deepcopy( + input_example.features.feature['image/encoded'].bytes_list.value[0]) + self._num_images_read.inc(1) + + height = copy.deepcopy( + input_example.features.feature['image/height'].int64_list.value[0]) + + width = copy.deepcopy( + input_example.features.feature['image/width'].int64_list.value[0]) + + self._num_got_shape.inc(1) + + new_height, new_width = self._largest_size_at_most( + height, width, self._max_image_dimension) + + self._num_images_found_size.inc(1) + + encoded_jpg_io = io.BytesIO(original_image) + image = PIL.Image.open(encoded_jpg_io) + resized_image = image.resize((new_width, new_height)) + + with io.BytesIO() as output: + resized_image.save(output, format='JPEG') + encoded_resized_image = output.getvalue() + + self._num_images_resized.inc(1) + + del input_example.features.feature['image/encoded'].bytes_list.value[:] + del input_example.features.feature['image/height'].int64_list.value[:] + del input_example.features.feature['image/width'].int64_list.value[:] + + self._num_examples_cleared.inc(1) + + input_example.features.feature['image/encoded'].bytes_list.value.extend( + [encoded_resized_image]) + input_example.features.feature['image/height'].int64_list.value.extend( + [new_height]) + input_example.features.feature['image/width'].int64_list.value.extend( + [new_width]) + self._num_examples_updated.inc(1) + + return input_example + + def _rekey_examples(self, tfrecord_entry): + serialized_example = copy.deepcopy(tfrecord_entry) + + input_example = tf.train.Example.FromString(serialized_example) + + self._num_images_found.inc(1) + + if self._reduce_image_size: + input_example = self._resize_image(input_example) + self._num_images_resized.inc(1) + + new_key = input_example.features.feature[ + self._sequence_key].bytes_list.value[0] + + if self._time_horizon: + date_captured = datetime.datetime.strptime( + six.ensure_str(input_example.features.feature[ + 'image/date_captured'].bytes_list.value[0]), '%Y-%m-%d %H:%M:%S') + year = date_captured.year + month = date_captured.month + day = date_captured.day + week = np.floor(float(day) / float(7)) + hour = date_captured.hour + minute = date_captured.minute + + if self._time_horizon == 'year': + new_key = new_key + six.ensure_binary('/' + str(year)) + elif self._time_horizon == 'month': + new_key = new_key + six.ensure_binary( + '/' + str(year) + '/' + str(month)) + elif self._time_horizon == 'week': + new_key = new_key + six.ensure_binary( + '/' + str(year) + '/' + str(month) + '/' + str(week)) + elif self._time_horizon == 'day': + new_key = new_key + six.ensure_binary( + '/' + str(year) + '/' + str(month) + '/' + str(day)) + elif self._time_horizon == 'hour': + new_key = new_key + six.ensure_binary( + '/' + str(year) + '/' + str(month) + '/' + str(day) + '/' + ( + str(hour))) + elif self._time_horizon == 'minute': + new_key = new_key + six.ensure_binary( + '/' + str(year) + '/' + str(month) + '/' + str(day) + '/' + ( + str(hour) + '/' + str(minute))) + + self._num_examples_processed.inc(1) + + return [(new_key, input_example)] + + +class SortGroupedDataFn(beam.DoFn): + """Sorts data within a keyed group. + + This Beam DoFn sorts the grouped list of image examples by frame_num + """ + + def __init__(self, sequence_key, sorted_image_ids, + max_num_elements_in_context_features): + """Initialization function. + + Args: + sequence_key: A feature name to use as a key for grouping sequences. + Must point to a key of type bytes_list + sorted_image_ids: Whether the image ids are sortable to use as sorting + tie-breakers + max_num_elements_in_context_features: The maximum number of elements + allowed in the memory bank + """ + self._session = None + self._num_examples_processed = beam.metrics.Metrics.counter( + 'sort_group', 'num_groups_sorted') + self._too_many_elements = beam.metrics.Metrics.counter( + 'sort_group', 'too_many_elements') + self._split_elements = beam.metrics.Metrics.counter( + 'sort_group', 'split_elements') + self._sequence_key = six.ensure_binary(sequence_key) + self._sorted_image_ids = sorted_image_ids + self._max_num_elements_in_context_features = ( + max_num_elements_in_context_features) + + def process(self, grouped_entry): + return self._sort_image_examples(grouped_entry) + + def _sort_image_examples(self, grouped_entry): + key, example_collection = grouped_entry + example_list = list(example_collection) + + def get_frame_num(example): + return example.features.feature['image/seq_frame_num'].int64_list.value[0] + + def get_date_captured(example): + return datetime.datetime.strptime( + six.ensure_str( + example.features.feature[ + 'image/date_captured'].bytes_list.value[0]), + '%Y-%m-%d %H:%M:%S') + + def get_image_id(example): + return example.features.feature['image/source_id'].bytes_list.value[0] + + if self._sequence_key == six.ensure_binary('image/seq_id'): + sorting_fn = get_frame_num + elif self._sequence_key == six.ensure_binary('image/location'): + if self._sorted_image_ids: + sorting_fn = get_image_id + else: + sorting_fn = get_date_captured + + sorted_example_list = sorted(example_list, key=sorting_fn) + + self._num_examples_processed.inc(1) + + if len(sorted_example_list) > self._max_num_elements_in_context_features: + leftovers = sorted_example_list + output_list = [] + count = 0 + self._too_many_elements.inc(1) + while len(leftovers) > self._max_num_elements_in_context_features: + self._split_elements.inc(1) + new_key = key + six.ensure_binary('_' + str(count)) + new_list = leftovers[:self._max_num_elements_in_context_features] + output_list.append((new_key, new_list)) + leftovers = leftovers[:self._max_num_elements_in_context_features] + count += 1 + else: + output_list = [(key, sorted_example_list)] + + return output_list + + +def get_sliding_window(example_list, max_clip_length, stride_length): + """Yields a sliding window over data from example_list. + + Sliding window has width max_clip_len (n) and stride stride_len (m). + s -> (s0,s1,...s[n-1]), (s[m],s[m+1],...,s[m+n]), ... + + Args: + example_list: A list of examples. + max_clip_length: The maximum length of each clip. + stride_length: The stride between each clip. + + Yields: + A list of lists of examples, each with length <= max_clip_length + """ + + # check if the list is too short to slide over + if len(example_list) < max_clip_length: + yield example_list + else: + starting_values = [i*stride_length for i in + range(len(example_list)) if + len(example_list) > i*stride_length] + for start in starting_values: + result = tuple(itertools.islice(example_list, start, + min(start + max_clip_length, + len(example_list)))) + yield result + + +class GenerateContextFn(beam.DoFn): + """Generates context data for camera trap images. + + This Beam DoFn builds up contextual memory banks from groups of images and + stores them in the output tf.Example or tf.Sequence_example for each image. + """ + + def __init__(self, sequence_key, add_context_features, image_ids_to_keep, + keep_context_features_image_id_list=False, + subsample_context_features_rate=0, + keep_only_positives=False, + context_features_score_threshold=0.7, + keep_only_positives_gt=False, + max_num_elements_in_context_features=5000, + pad_context_features=False, + output_type='tf_example', max_clip_length=None): + """Initialization function. + + Args: + sequence_key: A feature name to use as a key for grouping sequences. + add_context_features: Whether to keep and store the contextual memory + bank. + image_ids_to_keep: A list of image ids to save, to use to build data + subsets for evaluation. + keep_context_features_image_id_list: Whether to save an ordered list of + the ids of the images in the contextual memory bank. + subsample_context_features_rate: What rate to subsample images for the + contextual memory bank. + keep_only_positives: Whether to only keep high scoring + (>context_features_score_threshold) features in the contextual memory + bank. + context_features_score_threshold: What threshold to use for keeping + features. + keep_only_positives_gt: Whether to only keep features from images that + contain objects based on the ground truth (for training). + max_num_elements_in_context_features: the maximum number of elements in + the memory bank + pad_context_features: Whether to pad the context features to a fixed size. + output_type: What type of output, tf_example of tf_sequence_example + max_clip_length: The maximum length of a sequence example, before + splitting into multiple + """ + self._session = None + self._num_examples_processed = beam.metrics.Metrics.counter( + 'sequence_data_generation', 'num_seq_examples_processed') + self._num_keys_processed = beam.metrics.Metrics.counter( + 'sequence_data_generation', 'num_keys_processed') + self._sequence_key = sequence_key + self._add_context_features = add_context_features + self._pad_context_features = pad_context_features + self._output_type = output_type + self._max_clip_length = max_clip_length + if six.ensure_str(image_ids_to_keep) == 'All': + self._image_ids_to_keep = None + else: + with tf.io.gfile.GFile(image_ids_to_keep) as f: + self._image_ids_to_keep = json.load(f) + self._keep_context_features_image_id_list = ( + keep_context_features_image_id_list) + self._subsample_context_features_rate = subsample_context_features_rate + self._keep_only_positives = keep_only_positives + self._keep_only_positives_gt = keep_only_positives_gt + self._context_features_score_threshold = context_features_score_threshold + self._max_num_elements_in_context_features = ( + max_num_elements_in_context_features) + + self._images_kept = beam.metrics.Metrics.counter( + 'sequence_data_generation', 'images_kept') + self._images_loaded = beam.metrics.Metrics.counter( + 'sequence_data_generation', 'images_loaded') + + def process(self, grouped_entry): + return self._add_context_to_example(copy.deepcopy(grouped_entry)) + + def _build_context_features(self, example_list): + context_features = [] + context_features_image_id_list = [] + count = 0 + example_embedding = [] + + for idx, example in enumerate(example_list): + if self._subsample_context_features_rate > 0: + if (idx % self._subsample_context_features_rate) != 0: + example.features.feature[ + 'context_features_idx'].int64_list.value.append( + self._max_num_elements_in_context_features + 1) + continue + if self._keep_only_positives: + if example.features.feature[ + 'image/embedding_score' + ].float_list.value[0] < self._context_features_score_threshold: + example.features.feature[ + 'context_features_idx'].int64_list.value.append( + self._max_num_elements_in_context_features + 1) + continue + if self._keep_only_positives_gt: + if len(example.features.feature[ + 'image/object/bbox/xmin' + ].float_list.value) < 1: + example.features.feature[ + 'context_features_idx'].int64_list.value.append( + self._max_num_elements_in_context_features + 1) + continue + + example_embedding = list(example.features.feature[ + 'image/embedding'].float_list.value) + context_features.extend(example_embedding) + example.features.feature[ + 'context_features_idx'].int64_list.value.append(count) + count += 1 + example_image_id = example.features.feature[ + 'image/source_id'].bytes_list.value[0] + context_features_image_id_list.append(example_image_id) + + if not example_embedding: + example_embedding.append(np.zeros(DEFAULT_FEATURE_LENGTH)) + + feature_length = DEFAULT_FEATURE_LENGTH + + # If the example_list is not empty and image/embedding_length is in the + # featture dict, feature_length will be assigned to that. Otherwise, it will + # be kept as default. + if example_list and ( + 'image/embedding_length' in example_list[0].features.feature): + feature_length = example_list[0].features.feature[ + 'image/embedding_length'].int64_list.value[0] + + if self._pad_context_features: + while len(context_features_image_id_list) < ( + self._max_num_elements_in_context_features): + context_features_image_id_list.append('') + + return context_features, feature_length, context_features_image_id_list + + def _add_context_to_example(self, grouped_entry): + key, example_collection = grouped_entry + list_of_examples = [] + + example_list = list(example_collection) + + if self._add_context_features: + context_features, feature_length, context_features_image_id_list = ( + self._build_context_features(example_list)) + + if self._image_ids_to_keep is not None: + new_example_list = [] + for example in example_list: + im_id = example.features.feature['image/source_id'].bytes_list.value[0] + self._images_loaded.inc(1) + if six.ensure_str(im_id) in self._image_ids_to_keep: + self._images_kept.inc(1) + new_example_list.append(example) + if new_example_list: + example_list = new_example_list + else: + return [] + + if self._output_type == 'tf_sequence_example': + if self._max_clip_length is not None: + # For now, no overlap + clips = get_sliding_window( + example_list, self._max_clip_length, self._max_clip_length) + else: + clips = [example_list] + + for clip_num, clip_list in enumerate(clips): + # initialize sequence example + seq_example = tf.train.SequenceExample() + video_id = six.ensure_str(key)+'_'+ str(clip_num) + seq_example.context.feature['clip/media_id'].bytes_list.value.append( + video_id.encode('utf8')) + seq_example.context.feature['clip/frames'].int64_list.value.append( + len(clip_list)) + + seq_example.context.feature[ + 'clip/start/timestamp'].int64_list.value.append(0) + seq_example.context.feature[ + 'clip/end/timestamp'].int64_list.value.append(len(clip_list)) + seq_example.context.feature['image/format'].bytes_list.value.append( + six.ensure_binary('JPG')) + seq_example.context.feature['image/channels'].int64_list.value.append(3) + context_example = clip_list[0] + seq_example.context.feature['image/height'].int64_list.value.append( + context_example.features.feature[ + 'image/height'].int64_list.value[0]) + seq_example.context.feature['image/width'].int64_list.value.append( + context_example.features.feature['image/width'].int64_list.value[0]) + + seq_example.context.feature[ + 'image/context_feature_length'].int64_list.value.append( + feature_length) + seq_example.context.feature[ + 'image/context_features'].float_list.value.extend( + context_features) + if self._keep_context_features_image_id_list: + seq_example.context.feature[ + 'image/context_features_image_id_list'].bytes_list.value.extend( + context_features_image_id_list) + + encoded_image_list = seq_example.feature_lists.feature_list[ + 'image/encoded'] + timestamps_list = seq_example.feature_lists.feature_list[ + 'image/timestamp'] + context_features_idx_list = seq_example.feature_lists.feature_list[ + 'image/context_features_idx'] + date_captured_list = seq_example.feature_lists.feature_list[ + 'image/date_captured'] + unix_time_list = seq_example.feature_lists.feature_list[ + 'image/unix_time'] + location_list = seq_example.feature_lists.feature_list['image/location'] + image_ids_list = seq_example.feature_lists.feature_list[ + 'image/source_id'] + gt_xmin_list = seq_example.feature_lists.feature_list[ + 'region/bbox/xmin'] + gt_xmax_list = seq_example.feature_lists.feature_list[ + 'region/bbox/xmax'] + gt_ymin_list = seq_example.feature_lists.feature_list[ + 'region/bbox/ymin'] + gt_ymax_list = seq_example.feature_lists.feature_list[ + 'region/bbox/ymax'] + gt_type_list = seq_example.feature_lists.feature_list[ + 'region/label/index'] + gt_type_string_list = seq_example.feature_lists.feature_list[ + 'region/label/string'] + gt_is_annotated_list = seq_example.feature_lists.feature_list[ + 'region/is_annotated'] + + for idx, example in enumerate(clip_list): + + encoded_image = encoded_image_list.feature.add() + encoded_image.bytes_list.value.extend( + example.features.feature['image/encoded'].bytes_list.value) + + image_id = image_ids_list.feature.add() + image_id.bytes_list.value.append( + example.features.feature['image/source_id'].bytes_list.value[0]) + + timestamp = timestamps_list.feature.add() + # Timestamp is currently order in the list. + timestamp.int64_list.value.extend([idx]) + + context_features_idx = context_features_idx_list.feature.add() + context_features_idx.int64_list.value.extend( + example.features.feature['context_features_idx'].int64_list.value) + + date_captured = date_captured_list.feature.add() + date_captured.bytes_list.value.extend( + example.features.feature['image/date_captured'].bytes_list.value) + unix_time = unix_time_list.feature.add() + unix_time.float_list.value.extend( + example.features.feature['image/unix_time'].float_list.value) + location = location_list.feature.add() + location.bytes_list.value.extend( + example.features.feature['image/location'].bytes_list.value) + + gt_xmin = gt_xmin_list.feature.add() + gt_xmax = gt_xmax_list.feature.add() + gt_ymin = gt_ymin_list.feature.add() + gt_ymax = gt_ymax_list.feature.add() + gt_type = gt_type_list.feature.add() + gt_type_str = gt_type_string_list.feature.add() + + gt_is_annotated = gt_is_annotated_list.feature.add() + gt_is_annotated.int64_list.value.append(1) + + gt_xmin.float_list.value.extend( + example.features.feature[ + 'image/object/bbox/xmin'].float_list.value) + gt_xmax.float_list.value.extend( + example.features.feature[ + 'image/object/bbox/xmax'].float_list.value) + gt_ymin.float_list.value.extend( + example.features.feature[ + 'image/object/bbox/ymin'].float_list.value) + gt_ymax.float_list.value.extend( + example.features.feature[ + 'image/object/bbox/ymax'].float_list.value) + + gt_type.int64_list.value.extend( + example.features.feature[ + 'image/object/class/label'].int64_list.value) + gt_type_str.bytes_list.value.extend( + example.features.feature[ + 'image/object/class/text'].bytes_list.value) + + self._num_examples_processed.inc(1) + list_of_examples.append(seq_example) + + elif self._output_type == 'tf_example': + + for example in example_list: + im_id = example.features.feature['image/source_id'].bytes_list.value[0] + + if self._add_context_features: + example.features.feature[ + 'image/context_features'].float_list.value.extend( + context_features) + example.features.feature[ + 'image/context_feature_length'].int64_list.value.append( + feature_length) + + if self._keep_context_features_image_id_list: + example.features.feature[ + 'image/context_features_image_id_list'].bytes_list.value.extend( + context_features_image_id_list) + + self._num_examples_processed.inc(1) + list_of_examples.append(example) + + return list_of_examples + + +def construct_pipeline(input_tfrecord, + output_tfrecord, + sequence_key, + time_horizon=None, + subsample_context_features_rate=0, + reduce_image_size=True, + max_image_dimension=1024, + add_context_features=True, + sorted_image_ids=True, + image_ids_to_keep='All', + keep_context_features_image_id_list=False, + keep_only_positives=False, + context_features_score_threshold=0.7, + keep_only_positives_gt=False, + max_num_elements_in_context_features=5000, + num_shards=0, + output_type='tf_example', + max_clip_length=None): + """Returns a beam pipeline to run object detection inference. + + Args: + input_tfrecord: An TFRecord of tf.train.Example protos containing images. + output_tfrecord: An TFRecord of tf.train.Example protos that contain images + in the input TFRecord and the detections from the model. + sequence_key: A feature name to use as a key for grouping sequences. + time_horizon: What length of time to use to partition the data when building + the memory banks. Options: `year`, `month`, `week`, `day `, `hour`, + `minute`, None. + subsample_context_features_rate: What rate to subsample images for the + contextual memory bank. + reduce_image_size: Whether to reduce the size of the stored images. + max_image_dimension: The maximum image dimension to use for resizing. + add_context_features: Whether to keep and store the contextual memory bank. + sorted_image_ids: Whether the image ids are sortable, and can be used as + datetime tie-breakers when building memory banks. + image_ids_to_keep: A list of image ids to save, to use to build data subsets + for evaluation. + keep_context_features_image_id_list: Whether to save an ordered list of the + ids of the images in the contextual memory bank. + keep_only_positives: Whether to only keep high scoring + (>context_features_score_threshold) features in the contextual memory + bank. + context_features_score_threshold: What threshold to use for keeping + features. + keep_only_positives_gt: Whether to only keep features from images that + contain objects based on the ground truth (for training). + max_num_elements_in_context_features: the maximum number of elements in the + memory bank + num_shards: The number of output shards. + output_type: What type of output, tf_example of tf_sequence_example + max_clip_length: The maximum length of a sequence example, before + splitting into multiple + """ + def pipeline(root): + if output_type == 'tf_example': + coder = beam.coders.ProtoCoder(tf.train.Example) + elif output_type == 'tf_sequence_example': + coder = beam.coders.ProtoCoder(tf.train.SequenceExample) + else: + raise ValueError('Unsupported output type.') + input_collection = ( + root | 'ReadInputTFRecord' >> beam.io.tfrecordio.ReadFromTFRecord( + input_tfrecord, + coder=beam.coders.BytesCoder())) + rekey_collection = input_collection | 'RekeyExamples' >> beam.ParDo( + ReKeyDataFn(sequence_key, time_horizon, + reduce_image_size, max_image_dimension)) + grouped_collection = ( + rekey_collection | 'GroupBySequenceKey' >> beam.GroupByKey()) + grouped_collection = ( + grouped_collection | 'ReshuffleGroups' >> beam.Reshuffle()) + ordered_collection = ( + grouped_collection | 'OrderByFrameNumber' >> beam.ParDo( + SortGroupedDataFn(sequence_key, sorted_image_ids, + max_num_elements_in_context_features))) + ordered_collection = ( + ordered_collection | 'ReshuffleSortedGroups' >> beam.Reshuffle()) + output_collection = ( + ordered_collection | 'AddContextToExamples' >> beam.ParDo( + GenerateContextFn( + sequence_key, add_context_features, image_ids_to_keep, + keep_context_features_image_id_list=( + keep_context_features_image_id_list), + subsample_context_features_rate=subsample_context_features_rate, + keep_only_positives=keep_only_positives, + keep_only_positives_gt=keep_only_positives_gt, + context_features_score_threshold=( + context_features_score_threshold), + max_num_elements_in_context_features=( + max_num_elements_in_context_features), + output_type=output_type, + max_clip_length=max_clip_length))) + + output_collection = ( + output_collection | 'ReshuffleExamples' >> beam.Reshuffle()) + _ = output_collection | 'WritetoDisk' >> beam.io.tfrecordio.WriteToTFRecord( + output_tfrecord, + num_shards=num_shards, + coder=coder) + return pipeline + + +def main(_): + """Runs the Beam pipeline that builds context features. + + Args: + _: unused + """ + # must create before flags are used + runner = runners.DirectRunner() + + dirname = os.path.dirname(FLAGS.output_tfrecord) + tf.io.gfile.makedirs(dirname) + runner.run( + construct_pipeline(FLAGS.input_tfrecord, + FLAGS.output_tfrecord, + FLAGS.sequence_key, + FLAGS.time_horizon, + FLAGS.subsample_context_features_rate, + FLAGS.reduce_image_size, + FLAGS.max_image_dimension, + FLAGS.add_context_features, + FLAGS.sorted_image_ids, + FLAGS.image_ids_to_keep, + FLAGS.keep_context_features_image_id_list, + FLAGS.keep_only_positives, + FLAGS.context_features_score_threshold, + FLAGS.keep_only_positives_gt, + FLAGS.max_num_elements_in_context_features, + FLAGS.num_shards, + FLAGS.output_type, + FLAGS.max_clip_length)) + + +if __name__ == '__main__': + flags.mark_flags_as_required([ + 'input_tfrecord', + 'output_tfrecord' + ]) + app.run(main) diff --git a/research/object_detection/dataset_tools/context_rcnn/add_context_to_examples_tf1_test.py b/research/object_detection/dataset_tools/context_rcnn/add_context_to_examples_tf1_test.py new file mode 100644 index 000000000..99bb47979 --- /dev/null +++ b/research/object_detection/dataset_tools/context_rcnn/add_context_to_examples_tf1_test.py @@ -0,0 +1,384 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for add_context_to_examples.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +import contextlib +import datetime +import os +import tempfile +import unittest +import numpy as np +import six +import tensorflow.compat.v1 as tf + +from object_detection.dataset_tools.context_rcnn import add_context_to_examples +from object_detection.utils import tf_version +from apache_beam import runners + + +@contextlib.contextmanager +def InMemoryTFRecord(entries): + temp = tempfile.NamedTemporaryFile(delete=False) + filename = temp.name + try: + with tf.python_io.TFRecordWriter(filename) as writer: + for value in entries: + writer.write(value) + yield filename + finally: + os.unlink(temp.name) + + +def BytesFeature(value): + return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value])) + + +def BytesListFeature(value): + return tf.train.Feature(bytes_list=tf.train.BytesList(value=value)) + + +def Int64Feature(value): + return tf.train.Feature(int64_list=tf.train.Int64List(value=[value])) + + +def Int64ListFeature(value): + return tf.train.Feature(int64_list=tf.train.Int64List(value=value)) + + +def FloatListFeature(value): + return tf.train.Feature(float_list=tf.train.FloatList(value=value)) + + +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') +class GenerateContextDataTest(tf.test.TestCase): + + def _create_first_tf_example(self): + with self.test_session(): + encoded_image = tf.image.encode_jpeg( + tf.constant(np.ones((4, 4, 3)).astype(np.uint8))).eval() + + example = tf.train.Example(features=tf.train.Features(feature={ + 'image/encoded': BytesFeature(encoded_image), + 'image/source_id': BytesFeature(six.ensure_binary('image_id_1')), + 'image/height': Int64Feature(4), + 'image/width': Int64Feature(4), + 'image/object/class/label': Int64ListFeature([5, 5]), + 'image/object/class/text': BytesListFeature([six.ensure_binary('hyena'), + six.ensure_binary('hyena') + ]), + 'image/object/bbox/xmin': FloatListFeature([0.0, 0.1]), + 'image/object/bbox/xmax': FloatListFeature([0.2, 0.3]), + 'image/object/bbox/ymin': FloatListFeature([0.4, 0.5]), + 'image/object/bbox/ymax': FloatListFeature([0.6, 0.7]), + 'image/seq_id': BytesFeature(six.ensure_binary('01')), + 'image/seq_num_frames': Int64Feature(2), + 'image/seq_frame_num': Int64Feature(0), + 'image/date_captured': BytesFeature( + six.ensure_binary(str(datetime.datetime(2020, 1, 1, 1, 0, 0)))), + 'image/embedding': FloatListFeature([0.1, 0.2, 0.3]), + 'image/embedding_score': FloatListFeature([0.9]), + 'image/embedding_length': Int64Feature(3) + + })) + + return example.SerializeToString() + + def _create_second_tf_example(self): + with self.test_session(): + encoded_image = tf.image.encode_jpeg( + tf.constant(np.ones((4, 4, 3)).astype(np.uint8))).eval() + + example = tf.train.Example(features=tf.train.Features(feature={ + 'image/encoded': BytesFeature(encoded_image), + 'image/source_id': BytesFeature(six.ensure_binary('image_id_2')), + 'image/height': Int64Feature(4), + 'image/width': Int64Feature(4), + 'image/object/class/label': Int64ListFeature([5]), + 'image/object/class/text': BytesListFeature([six.ensure_binary('hyena') + ]), + 'image/object/bbox/xmin': FloatListFeature([0.0]), + 'image/object/bbox/xmax': FloatListFeature([0.1]), + 'image/object/bbox/ymin': FloatListFeature([0.2]), + 'image/object/bbox/ymax': FloatListFeature([0.3]), + 'image/seq_id': BytesFeature(six.ensure_binary('01')), + 'image/seq_num_frames': Int64Feature(2), + 'image/seq_frame_num': Int64Feature(1), + 'image/date_captured': BytesFeature( + six.ensure_binary(str(datetime.datetime(2020, 1, 1, 1, 1, 0)))), + 'image/embedding': FloatListFeature([0.4, 0.5, 0.6]), + 'image/embedding_score': FloatListFeature([0.9]), + 'image/embedding_length': Int64Feature(3) + })) + + return example.SerializeToString() + + def assert_expected_examples(self, tf_example_list): + self.assertAllEqual( + {tf_example.features.feature['image/source_id'].bytes_list.value[0] + for tf_example in tf_example_list}, + {six.ensure_binary('image_id_1'), six.ensure_binary('image_id_2')}) + self.assertAllClose( + tf_example_list[0].features.feature[ + 'image/context_features'].float_list.value, + [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]) + self.assertAllClose( + tf_example_list[1].features.feature[ + 'image/context_features'].float_list.value, + [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]) + + def assert_expected_sequence_example(self, tf_sequence_example_list): + tf_sequence_example = tf_sequence_example_list[0] + num_frames = 2 + + self.assertAllEqual( + tf_sequence_example.context.feature[ + 'clip/media_id'].bytes_list.value[0], six.ensure_binary( + '01_0')) + self.assertAllClose( + tf_sequence_example.context.feature[ + 'image/context_features'].float_list.value, + [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]) + + seq_feature_dict = tf_sequence_example.feature_lists.feature_list + + self.assertLen( + seq_feature_dict['image/encoded'].feature[:], + num_frames) + actual_timestamps = [ + feature.int64_list.value[0] for feature + in seq_feature_dict['image/timestamp'].feature] + timestamps = [0, 1] + self.assertAllEqual(timestamps, actual_timestamps) + + # First image. + self.assertAllClose( + [0.4, 0.5], + seq_feature_dict['region/bbox/ymin'].feature[0].float_list.value[:]) + self.assertAllClose( + [0.0, 0.1], + seq_feature_dict['region/bbox/xmin'].feature[0].float_list.value[:]) + self.assertAllClose( + [0.6, 0.7], + seq_feature_dict['region/bbox/ymax'].feature[0].float_list.value[:]) + self.assertAllClose( + [0.2, 0.3], + seq_feature_dict['region/bbox/xmax'].feature[0].float_list.value[:]) + self.assertAllEqual( + [six.ensure_binary('hyena'), six.ensure_binary('hyena')], + seq_feature_dict['region/label/string'].feature[0].bytes_list.value[:]) + + # Second example. + self.assertAllClose( + [0.2], + seq_feature_dict['region/bbox/ymin'].feature[1].float_list.value[:]) + self.assertAllClose( + [0.0], + seq_feature_dict['region/bbox/xmin'].feature[1].float_list.value[:]) + self.assertAllClose( + [0.3], + seq_feature_dict['region/bbox/ymax'].feature[1].float_list.value[:]) + self.assertAllClose( + [0.1], + seq_feature_dict['region/bbox/xmax'].feature[1].float_list.value[:]) + self.assertAllEqual( + [six.ensure_binary('hyena')], + seq_feature_dict['region/label/string'].feature[1].bytes_list.value[:]) + + def assert_expected_key(self, key): + self.assertAllEqual(key, '01') + + def assert_sorted(self, example_collection): + example_list = list(example_collection) + counter = 0 + for example in example_list: + frame_num = example.features.feature[ + 'image/seq_frame_num'].int64_list.value[0] + self.assertGreaterEqual(frame_num, counter) + counter = frame_num + + def assert_context(self, example_collection): + example_list = list(example_collection) + for example in example_list: + context = example.features.feature[ + 'image/context_features'].float_list.value + self.assertAllClose([0.1, 0.2, 0.3, 0.4, 0.5, 0.6], context) + + def assert_resized(self, example): + width = example.features.feature['image/width'].int64_list.value[0] + self.assertAllEqual(width, 2) + height = example.features.feature['image/height'].int64_list.value[0] + self.assertAllEqual(height, 2) + + def assert_size(self, example): + width = example.features.feature['image/width'].int64_list.value[0] + self.assertAllEqual(width, 4) + height = example.features.feature['image/height'].int64_list.value[0] + self.assertAllEqual(height, 4) + + def test_sliding_window(self): + example_list = ['a', 'b', 'c', 'd', 'e', 'f', 'g'] + max_clip_length = 3 + stride_length = 3 + out_list = [list(i) for i in add_context_to_examples.get_sliding_window( + example_list, max_clip_length, stride_length)] + self.assertAllEqual(out_list, [['a', 'b', 'c'], + ['d', 'e', 'f'], + ['g']]) + + def test_rekey_data_fn(self): + sequence_key = 'image/seq_id' + time_horizon = None + reduce_image_size = False + max_dim = None + + rekey_fn = add_context_to_examples.ReKeyDataFn( + sequence_key, time_horizon, + reduce_image_size, max_dim) + output = rekey_fn.process(self._create_first_tf_example()) + + self.assert_expected_key(output[0][0]) + self.assert_size(output[0][1]) + + def test_rekey_data_fn_w_resize(self): + sequence_key = 'image/seq_id' + time_horizon = None + reduce_image_size = True + max_dim = 2 + + rekey_fn = add_context_to_examples.ReKeyDataFn( + sequence_key, time_horizon, + reduce_image_size, max_dim) + output = rekey_fn.process(self._create_first_tf_example()) + + self.assert_expected_key(output[0][0]) + self.assert_resized(output[0][1]) + + def test_sort_fn(self): + sequence_key = 'image/seq_id' + sorted_image_ids = False + max_num_elements_in_context_features = 10 + sort_fn = add_context_to_examples.SortGroupedDataFn( + sequence_key, sorted_image_ids, max_num_elements_in_context_features) + output = sort_fn.process( + ('dummy_key', [tf.train.Example.FromString( + self._create_second_tf_example()), + tf.train.Example.FromString( + self._create_first_tf_example())])) + + self.assert_sorted(output[0][1]) + + def test_add_context_fn(self): + sequence_key = 'image/seq_id' + add_context_features = True + image_ids_to_keep = 'All' + context_fn = add_context_to_examples.GenerateContextFn( + sequence_key, add_context_features, image_ids_to_keep) + output = context_fn.process( + ('dummy_key', [tf.train.Example.FromString( + self._create_first_tf_example()), + tf.train.Example.FromString( + self._create_second_tf_example())])) + + self.assertEqual(len(output), 2) + self.assert_context(output) + + def test_add_context_fn_output_sequence_example(self): + sequence_key = 'image/seq_id' + add_context_features = True + image_ids_to_keep = 'All' + context_fn = add_context_to_examples.GenerateContextFn( + sequence_key, add_context_features, image_ids_to_keep, + output_type='tf_sequence_example') + output = context_fn.process( + ('01', + [tf.train.Example.FromString(self._create_first_tf_example()), + tf.train.Example.FromString(self._create_second_tf_example())])) + + self.assertEqual(len(output), 1) + self.assert_expected_sequence_example(output) + + def test_add_context_fn_output_sequence_example_cliplen(self): + sequence_key = 'image/seq_id' + add_context_features = True + image_ids_to_keep = 'All' + context_fn = add_context_to_examples.GenerateContextFn( + sequence_key, add_context_features, image_ids_to_keep, + output_type='tf_sequence_example', max_clip_length=1) + output = context_fn.process( + ('01', + [tf.train.Example.FromString(self._create_first_tf_example()), + tf.train.Example.FromString(self._create_second_tf_example())])) + self.assertEqual(len(output), 2) + + def test_beam_pipeline(self): + with InMemoryTFRecord( + [self._create_first_tf_example(), + self._create_second_tf_example()]) as input_tfrecord: + runner = runners.DirectRunner() + temp_dir = tempfile.mkdtemp(dir=os.environ.get('TEST_TMPDIR')) + output_tfrecord = os.path.join(temp_dir, 'output_tfrecord') + sequence_key = six.ensure_binary('image/seq_id') + max_num_elements = 10 + num_shards = 1 + pipeline = add_context_to_examples.construct_pipeline( + input_tfrecord, + output_tfrecord, + sequence_key, + max_num_elements_in_context_features=max_num_elements, + num_shards=num_shards) + runner.run(pipeline) + filenames = tf.io.gfile.glob(output_tfrecord + '-?????-of-?????') + actual_output = [] + record_iterator = tf.python_io.tf_record_iterator(path=filenames[0]) + for record in record_iterator: + actual_output.append(record) + self.assertEqual(len(actual_output), 2) + self.assert_expected_examples([tf.train.Example.FromString( + tf_example) for tf_example in actual_output]) + + def test_beam_pipeline_sequence_example(self): + with InMemoryTFRecord( + [self._create_first_tf_example(), + self._create_second_tf_example()]) as input_tfrecord: + runner = runners.DirectRunner() + temp_dir = tempfile.mkdtemp(dir=os.environ.get('TEST_TMPDIR')) + output_tfrecord = os.path.join(temp_dir, 'output_tfrecord') + sequence_key = six.ensure_binary('image/seq_id') + max_num_elements = 10 + num_shards = 1 + pipeline = add_context_to_examples.construct_pipeline( + input_tfrecord, + output_tfrecord, + sequence_key, + max_num_elements_in_context_features=max_num_elements, + num_shards=num_shards, + output_type='tf_sequence_example') + runner.run(pipeline) + filenames = tf.io.gfile.glob(output_tfrecord + '-?????-of-?????') + actual_output = [] + record_iterator = tf.python_io.tf_record_iterator( + path=filenames[0]) + for record in record_iterator: + actual_output.append(record) + self.assertEqual(len(actual_output), 1) + self.assert_expected_sequence_example( + [tf.train.SequenceExample.FromString( + tf_example) for tf_example in actual_output]) + +if __name__ == '__main__': + tf.test.main() diff --git a/research/object_detection/dataset_tools/context_rcnn/create_cococameratraps_tfexample_main.py b/research/object_detection/dataset_tools/context_rcnn/create_cococameratraps_tfexample_main.py new file mode 100644 index 000000000..106cf5adb --- /dev/null +++ b/research/object_detection/dataset_tools/context_rcnn/create_cococameratraps_tfexample_main.py @@ -0,0 +1,324 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +r"""Beam pipeline to create COCO Camera Traps Object Detection TFRecords. + +Please note that this tool creates sharded output files. + +This tool assumes the input annotations are in the COCO Camera Traps json +format, specified here: +https://github.com/Microsoft/CameraTraps/blob/master/data_management/README.md + +Example usage: + + python create_cococameratraps_tfexample_main.py \ + --alsologtostderr \ + --output_tfrecord_prefix="/path/to/output/tfrecord/location/prefix" \ + --image_directory="/path/to/image/folder/" \ + --input_annotations_file="path/to/annotations.json" + +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import hashlib +import io +import json +import logging +import os +from absl import app +from absl import flags +import apache_beam as beam +import numpy as np +import PIL.Image +import tensorflow.compat.v1 as tf +from apache_beam import runners +from object_detection.utils import dataset_util + +flags.DEFINE_string('image_directory', None, 'Directory where images are ' + 'stored') +flags.DEFINE_string('output_tfrecord_prefix', None, + 'TFRecord containing images in tf.Example format.') +flags.DEFINE_string('input_annotations_file', None, 'Path to Coco-CameraTraps' + 'style annotations file') +flags.DEFINE_integer('num_images_per_shard', + 200, + 'The number of images to be stored in each shard.') + +FLAGS = flags.FLAGS + + +class ParseImage(beam.DoFn): + """A DoFn that parses a COCO-CameraTraps json and emits TFRecords.""" + + def __init__(self, image_directory, images, annotations, categories, + keep_bboxes): + """Initialization function. + + Args: + image_directory: Path to image directory + images: list of COCO Camera Traps style image dictionaries + annotations: list of COCO Camera Traps style annotation dictionaries + categories: list of COCO Camera Traps style category dictionaries + keep_bboxes: Whether to keep any bounding boxes that exist in the + annotations + """ + + self._image_directory = image_directory + self._image_dict = {im['id']: im for im in images} + self._annotation_dict = {im['id']: [] for im in images} + self._category_dict = {int(cat['id']): cat for cat in categories} + for ann in annotations: + self._annotation_dict[ann['image_id']].append(ann) + self._images = images + self._keep_bboxes = keep_bboxes + + self._num_examples_processed = beam.metrics.Metrics.counter( + 'cococameratraps_data_generation', 'num_tf_examples_processed') + + def process(self, image_id): + """Builds a tf.Example given an image id. + + Args: + image_id: the image id of the associated image + + Returns: + List of tf.Examples. + """ + + image = self._image_dict[image_id] + annotations = self._annotation_dict[image_id] + image_height = image['height'] + image_width = image['width'] + filename = image['file_name'] + image_id = image['id'] + image_location_id = image['location'] + + image_datetime = str(image['date_captured']) + + image_sequence_id = str(image['seq_id']) + image_sequence_num_frames = int(image['seq_num_frames']) + image_sequence_frame_num = int(image['frame_num']) + + full_path = os.path.join(self._image_directory, filename) + + try: + # Ensure the image exists and is not corrupted + with tf.io.gfile.GFile(full_path, 'rb') as fid: + encoded_jpg = fid.read() + encoded_jpg_io = io.BytesIO(encoded_jpg) + image = PIL.Image.open(encoded_jpg_io) + # Ensure the image can be read by tf + with tf.Graph().as_default(): + image = tf.image.decode_jpeg(encoded_jpg, channels=3) + init_op = tf.initialize_all_tables() + with tf.Session() as sess: + sess.run(init_op) + sess.run(image) + except Exception as e: # pylint: disable=broad-except + # The image file is missing or corrupt + tf.logging.error(str(e)) + return [] + + key = hashlib.sha256(encoded_jpg).hexdigest() + feature_dict = { + 'image/height': + dataset_util.int64_feature(image_height), + 'image/width': + dataset_util.int64_feature(image_width), + 'image/filename': + dataset_util.bytes_feature(filename.encode('utf8')), + 'image/source_id': + dataset_util.bytes_feature(str(image_id).encode('utf8')), + 'image/key/sha256': + dataset_util.bytes_feature(key.encode('utf8')), + 'image/encoded': + dataset_util.bytes_feature(encoded_jpg), + 'image/format': + dataset_util.bytes_feature('jpeg'.encode('utf8')), + 'image/location': + dataset_util.bytes_feature(str(image_location_id).encode('utf8')), + 'image/seq_num_frames': + dataset_util.int64_feature(image_sequence_num_frames), + 'image/seq_frame_num': + dataset_util.int64_feature(image_sequence_frame_num), + 'image/seq_id': + dataset_util.bytes_feature(image_sequence_id.encode('utf8')), + 'image/date_captured': + dataset_util.bytes_feature(image_datetime.encode('utf8')) + } + + num_annotations_skipped = 0 + if annotations: + xmin = [] + xmax = [] + ymin = [] + ymax = [] + category_names = [] + category_ids = [] + area = [] + + for object_annotations in annotations: + if 'bbox' in object_annotations and self._keep_bboxes: + (x, y, width, height) = tuple(object_annotations['bbox']) + if width <= 0 or height <= 0: + num_annotations_skipped += 1 + continue + if x + width > image_width or y + height > image_height: + num_annotations_skipped += 1 + continue + xmin.append(float(x) / image_width) + xmax.append(float(x + width) / image_width) + ymin.append(float(y) / image_height) + ymax.append(float(y + height) / image_height) + if 'area' in object_annotations: + area.append(object_annotations['area']) + else: + # approximate area using l*w/2 + area.append(width*height/2.0) + + category_id = int(object_annotations['category_id']) + category_ids.append(category_id) + category_names.append( + self._category_dict[category_id]['name'].encode('utf8')) + + feature_dict.update({ + 'image/object/bbox/xmin': + dataset_util.float_list_feature(xmin), + 'image/object/bbox/xmax': + dataset_util.float_list_feature(xmax), + 'image/object/bbox/ymin': + dataset_util.float_list_feature(ymin), + 'image/object/bbox/ymax': + dataset_util.float_list_feature(ymax), + 'image/object/class/text': + dataset_util.bytes_list_feature(category_names), + 'image/object/class/label': + dataset_util.int64_list_feature(category_ids), + 'image/object/area': + dataset_util.float_list_feature(area), + }) + + # For classification, add the first category to image/class/label and + # image/class/text + if not category_ids: + feature_dict.update({ + 'image/class/label': + dataset_util.int64_list_feature([0]), + 'image/class/text': + dataset_util.bytes_list_feature(['empty'.encode('utf8')]), + }) + else: + feature_dict.update({ + 'image/class/label': + dataset_util.int64_list_feature([category_ids[0]]), + 'image/class/text': + dataset_util.bytes_list_feature([category_names[0]]), + }) + + else: + # Add empty class if there are no annotations + feature_dict.update({ + 'image/class/label': + dataset_util.int64_list_feature([0]), + 'image/class/text': + dataset_util.bytes_list_feature(['empty'.encode('utf8')]), + }) + + example = tf.train.Example(features=tf.train.Features(feature=feature_dict)) + self._num_examples_processed.inc(1) + + return [(example)] + + +def _load_json_data(data_file): + with tf.io.gfile.GFile(data_file, 'r') as fid: + data_dict = json.load(fid) + return data_dict + + +def create_pipeline(image_directory, + input_annotations_file, + output_tfrecord_prefix=None, + num_images_per_shard=200, + keep_bboxes=True): + """Creates a beam pipeline for producing a COCO-CameraTraps Image dataset. + + Args: + image_directory: Path to image directory + input_annotations_file: Path to a coco-cameratraps annotation file + output_tfrecord_prefix: Absolute path for tfrecord outputs. Final files will + be named {output_tfrecord_prefix}@N. + num_images_per_shard: The number of images to store in each shard + keep_bboxes: Whether to keep any bounding boxes that exist in the json file + + Returns: + A Beam pipeline. + """ + + logging.info('Reading data from COCO-CameraTraps Dataset.') + + data = _load_json_data(input_annotations_file) + + num_shards = int(np.ceil(float(len(data['images']))/num_images_per_shard)) + + def pipeline(root): + """Builds beam pipeline.""" + + image_examples = ( + root + | ('CreateCollections') >> beam.Create( + [im['id'] for im in data['images']]) + | ('ParseImage') >> beam.ParDo(ParseImage( + image_directory, data['images'], data['annotations'], + data['categories'], keep_bboxes=keep_bboxes))) + _ = (image_examples + | ('Reshuffle') >> beam.Reshuffle() + | ('WriteTfImageExample') >> beam.io.tfrecordio.WriteToTFRecord( + output_tfrecord_prefix, + num_shards=num_shards, + coder=beam.coders.ProtoCoder(tf.train.Example))) + + return pipeline + + +def main(_): + """Runs the Beam pipeline that performs inference. + + Args: + _: unused + """ + + # must create before flags are used + runner = runners.DirectRunner() + + dirname = os.path.dirname(FLAGS.output_tfrecord_prefix) + tf.io.gfile.makedirs(dirname) + + runner.run( + create_pipeline( + image_directory=FLAGS.image_directory, + input_annotations_file=FLAGS.input_annotations_file, + output_tfrecord_prefix=FLAGS.output_tfrecord_prefix, + num_images_per_shard=FLAGS.num_images_per_shard)) + + +if __name__ == '__main__': + flags.mark_flags_as_required([ + 'image_directory', + 'input_annotations_file', + 'output_tfrecord_prefix' + ]) + app.run(main) diff --git a/research/object_detection/dataset_tools/context_rcnn/create_cococameratraps_tfexample_tf1_test.py b/research/object_detection/dataset_tools/context_rcnn/create_cococameratraps_tfexample_tf1_test.py new file mode 100644 index 000000000..be6dc0dc4 --- /dev/null +++ b/research/object_detection/dataset_tools/context_rcnn/create_cococameratraps_tfexample_tf1_test.py @@ -0,0 +1,201 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for create_cococameratraps_tfexample_main.""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +import datetime +import json +import os +import tempfile +import unittest +import numpy as np + +from PIL import Image +import tensorflow.compat.v1 as tf +from object_detection.dataset_tools.context_rcnn import create_cococameratraps_tfexample_main +from object_detection.utils import tf_version +from apache_beam import runners + + +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') +class CreateCOCOCameraTrapsTfexampleTest(tf.test.TestCase): + + IMAGE_HEIGHT = 360 + IMAGE_WIDTH = 480 + + def _write_random_images_to_directory(self, directory, num_frames): + for frame_num in range(num_frames): + img = np.random.randint(0, high=256, + size=(self.IMAGE_HEIGHT, self.IMAGE_WIDTH, 3), + dtype=np.uint8) + pil_image = Image.fromarray(img) + fname = 'im_' + str(frame_num) + '.jpg' + pil_image.save(os.path.join(directory, fname), 'JPEG') + + def _create_json_file(self, directory, num_frames, keep_bboxes=False): + json_dict = {'images': [], 'annotations': []} + json_dict['categories'] = [{'id': 0, 'name': 'empty'}, + {'id': 1, 'name': 'animal'}] + for idx in range(num_frames): + im = {'id': 'im_' + str(idx), + 'file_name': 'im_' + str(idx) + '.jpg', + 'height': self.IMAGE_HEIGHT, + 'width': self.IMAGE_WIDTH, + 'seq_id': 'seq_1', + 'seq_num_frames': num_frames, + 'frame_num': idx, + 'location': 'loc_' + str(idx), + 'date_captured': str(datetime.datetime.now()) + } + json_dict['images'].append(im) + ann = {'id': 'ann' + str(idx), + 'image_id': 'im_' + str(idx), + 'category_id': 1, + } + if keep_bboxes: + ann['bbox'] = [0.0 * self.IMAGE_WIDTH, + 0.1 * self.IMAGE_HEIGHT, + 0.5 * self.IMAGE_WIDTH, + 0.5 * self.IMAGE_HEIGHT] + json_dict['annotations'].append(ann) + + json_path = os.path.join(directory, 'test_file.json') + with tf.io.gfile.GFile(json_path, 'w') as f: + json.dump(json_dict, f) + return json_path + + def assert_expected_example_bbox(self, example): + self.assertAllClose( + example.features.feature['image/object/bbox/ymin'].float_list.value, + [0.1]) + self.assertAllClose( + example.features.feature['image/object/bbox/xmin'].float_list.value, + [0.0]) + self.assertAllClose( + example.features.feature['image/object/bbox/ymax'].float_list.value, + [0.6]) + self.assertAllClose( + example.features.feature['image/object/bbox/xmax'].float_list.value, + [0.5]) + self.assertAllClose( + example.features.feature['image/object/class/label'] + .int64_list.value, [1]) + self.assertAllEqual( + example.features.feature['image/object/class/text'] + .bytes_list.value, ['animal']) + self.assertAllClose( + example.features.feature['image/class/label'] + .int64_list.value, [1]) + self.assertAllEqual( + example.features.feature['image/class/text'] + .bytes_list.value, ['animal']) + + # Check other essential attributes. + self.assertAllEqual( + example.features.feature['image/height'].int64_list.value, + [self.IMAGE_HEIGHT]) + self.assertAllEqual( + example.features.feature['image/width'].int64_list.value, + [self.IMAGE_WIDTH]) + self.assertAllEqual( + example.features.feature['image/source_id'].bytes_list.value, + ['im_0']) + self.assertTrue( + example.features.feature['image/encoded'].bytes_list.value) + + def assert_expected_example(self, example): + self.assertAllClose( + example.features.feature['image/object/bbox/ymin'].float_list.value, + []) + self.assertAllClose( + example.features.feature['image/object/bbox/xmin'].float_list.value, + []) + self.assertAllClose( + example.features.feature['image/object/bbox/ymax'].float_list.value, + []) + self.assertAllClose( + example.features.feature['image/object/bbox/xmax'].float_list.value, + []) + self.assertAllClose( + example.features.feature['image/object/class/label'] + .int64_list.value, [1]) + self.assertAllEqual( + example.features.feature['image/object/class/text'] + .bytes_list.value, ['animal']) + self.assertAllClose( + example.features.feature['image/class/label'] + .int64_list.value, [1]) + self.assertAllEqual( + example.features.feature['image/class/text'] + .bytes_list.value, ['animal']) + + # Check other essential attributes. + self.assertAllEqual( + example.features.feature['image/height'].int64_list.value, + [self.IMAGE_HEIGHT]) + self.assertAllEqual( + example.features.feature['image/width'].int64_list.value, + [self.IMAGE_WIDTH]) + self.assertAllEqual( + example.features.feature['image/source_id'].bytes_list.value, + ['im_0']) + self.assertTrue( + example.features.feature['image/encoded'].bytes_list.value) + + def test_beam_pipeline(self): + runner = runners.DirectRunner() + num_frames = 1 + temp_dir = tempfile.mkdtemp(dir=os.environ.get('TEST_TMPDIR')) + json_path = self._create_json_file(temp_dir, num_frames) + output_tfrecord = temp_dir+'/output' + self._write_random_images_to_directory(temp_dir, num_frames) + pipeline = create_cococameratraps_tfexample_main.create_pipeline( + temp_dir, json_path, + output_tfrecord_prefix=output_tfrecord) + runner.run(pipeline) + filenames = tf.io.gfile.glob(output_tfrecord + '-?????-of-?????') + actual_output = [] + record_iterator = tf.python_io.tf_record_iterator(path=filenames[0]) + for record in record_iterator: + actual_output.append(record) + self.assertEqual(len(actual_output), num_frames) + self.assert_expected_example(tf.train.Example.FromString( + actual_output[0])) + + def test_beam_pipeline_bbox(self): + runner = runners.DirectRunner() + num_frames = 1 + temp_dir = tempfile.mkdtemp(dir=os.environ.get('TEST_TMPDIR')) + json_path = self._create_json_file(temp_dir, num_frames, keep_bboxes=True) + output_tfrecord = temp_dir+'/output' + self._write_random_images_to_directory(temp_dir, num_frames) + pipeline = create_cococameratraps_tfexample_main.create_pipeline( + temp_dir, json_path, + output_tfrecord_prefix=output_tfrecord, + keep_bboxes=True) + runner.run(pipeline) + filenames = tf.io.gfile.glob(output_tfrecord+'-?????-of-?????') + actual_output = [] + record_iterator = tf.python_io.tf_record_iterator(path=filenames[0]) + for record in record_iterator: + actual_output.append(record) + self.assertEqual(len(actual_output), num_frames) + self.assert_expected_example_bbox(tf.train.Example.FromString( + actual_output[0])) + + +if __name__ == '__main__': + tf.test.main() diff --git a/research/object_detection/dataset_tools/context_rcnn/generate_detection_data.py b/research/object_detection/dataset_tools/context_rcnn/generate_detection_data.py new file mode 100644 index 000000000..95c16c135 --- /dev/null +++ b/research/object_detection/dataset_tools/context_rcnn/generate_detection_data.py @@ -0,0 +1,262 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +r"""A Beam job to generate detection data for camera trap images. + +This tools allows to run inference with an exported Object Detection model in +`saved_model` format and produce raw detection boxes on images in tf.Examples, +with the assumption that the bounding box class label will match the image-level +class label in the tf.Example. + +Steps to generate a detection dataset: +1. Use object_detection/export_inference_graph.py to get a `saved_model` for + inference. The input node must accept a tf.Example proto. +2. Run this tool with `saved_model` from step 1 and an TFRecord of tf.Example + protos containing images for inference. + +Example Usage: +-------------- +python tensorflow_models/object_detection/export_inference_graph.py \ + --alsologtostderr \ + --input_type tf_example \ + --pipeline_config_path path/to/detection_model.config \ + --trained_checkpoint_prefix path/to/model.ckpt \ + --output_directory path/to/exported_model_directory + +python generate_detection_data.py \ + --alsologtostderr \ + --input_tfrecord path/to/input_tfrecord@X \ + --output_tfrecord path/to/output_tfrecord@X \ + --model_dir path/to/exported_model_directory/saved_model +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import threading +from absl import app +from absl import flags +import apache_beam as beam +import tensorflow.compat.v1 as tf +from apache_beam import runners + + +flags.DEFINE_string('detection_input_tfrecord', None, 'TFRecord containing ' + 'images in tf.Example format for object detection.') +flags.DEFINE_string('detection_output_tfrecord', None, + 'TFRecord containing detections in tf.Example format.') +flags.DEFINE_string('detection_model_dir', None, 'Path to directory containing' + 'an object detection SavedModel.') +flags.DEFINE_float('confidence_threshold', 0.9, + 'Min confidence to keep bounding boxes') +flags.DEFINE_integer('num_shards', 0, 'Number of output shards.') + +FLAGS = flags.FLAGS + + +class GenerateDetectionDataFn(beam.DoFn): + """Generates detection data for camera trap images. + + This Beam DoFn performs inference with an object detection `saved_model` and + produces detection boxes for camera trap data, matched to the + object class. + """ + session_lock = threading.Lock() + + def __init__(self, model_dir, confidence_threshold): + """Initialization function. + + Args: + model_dir: A directory containing saved model. + confidence_threshold: the confidence threshold for boxes to keep + """ + self._model_dir = model_dir + self._confidence_threshold = confidence_threshold + self._session = None + self._num_examples_processed = beam.metrics.Metrics.counter( + 'detection_data_generation', 'num_tf_examples_processed') + + def start_bundle(self): + self._load_inference_model() + + def _load_inference_model(self): + # Because initialization of the tf.Session is expensive we share + # one instance across all threads in the worker. This is possible since + # tf.Session.run() is thread safe. + with self.session_lock: + if self._session is None: + graph = tf.Graph() + self._session = tf.Session(graph=graph) + with graph.as_default(): + meta_graph = tf.saved_model.loader.load( + self._session, [tf.saved_model.tag_constants.SERVING], + self._model_dir) + signature = meta_graph.signature_def['serving_default'] + input_tensor_name = signature.inputs['inputs'].name + self._input = graph.get_tensor_by_name(input_tensor_name) + self._boxes_node = graph.get_tensor_by_name( + signature.outputs['detection_boxes'].name) + self._scores_node = graph.get_tensor_by_name( + signature.outputs['detection_scores'].name) + self._num_detections_node = graph.get_tensor_by_name( + signature.outputs['num_detections'].name) + + def process(self, tfrecord_entry): + return self._run_inference_and_generate_detections(tfrecord_entry) + + def _run_inference_and_generate_detections(self, tfrecord_entry): + input_example = tf.train.Example.FromString(tfrecord_entry) + if input_example.features.feature[ + 'image/object/bbox/ymin'].float_list.value: + # There are already ground truth boxes for this image, just keep them. + return [input_example] + + detection_boxes, detection_scores, num_detections = self._session.run( + [self._boxes_node, self._scores_node, self._num_detections_node], + feed_dict={self._input: [tfrecord_entry]}) + + example = tf.train.Example() + + num_detections = int(num_detections[0]) + + image_class_labels = input_example.features.feature[ + 'image/object/class/label'].int64_list.value + + image_class_texts = input_example.features.feature[ + 'image/object/class/text'].bytes_list.value + + # Ignore any images with multiple classes, + # we can't match the class to the box. + if len(image_class_labels) > 1: + return [] + + # Don't add boxes for images already labeled empty (for now) + if len(image_class_labels) == 1: + # Add boxes over confidence threshold. + for idx, score in enumerate(detection_scores[0]): + if score >= self._confidence_threshold and idx < num_detections: + example.features.feature[ + 'image/object/bbox/ymin'].float_list.value.extend([ + detection_boxes[0, idx, 0]]) + example.features.feature[ + 'image/object/bbox/xmin'].float_list.value.extend([ + detection_boxes[0, idx, 1]]) + example.features.feature[ + 'image/object/bbox/ymax'].float_list.value.extend([ + detection_boxes[0, idx, 2]]) + example.features.feature[ + 'image/object/bbox/xmax'].float_list.value.extend([ + detection_boxes[0, idx, 3]]) + + # Add box scores and class texts and labels. + example.features.feature[ + 'image/object/class/score'].float_list.value.extend( + [score]) + + example.features.feature[ + 'image/object/class/label'].int64_list.value.extend( + [image_class_labels[0]]) + + example.features.feature[ + 'image/object/class/text'].bytes_list.value.extend( + [image_class_texts[0]]) + + # Add other essential example attributes + example.features.feature['image/encoded'].bytes_list.value.extend( + input_example.features.feature['image/encoded'].bytes_list.value) + example.features.feature['image/height'].int64_list.value.extend( + input_example.features.feature['image/height'].int64_list.value) + example.features.feature['image/width'].int64_list.value.extend( + input_example.features.feature['image/width'].int64_list.value) + example.features.feature['image/source_id'].bytes_list.value.extend( + input_example.features.feature['image/source_id'].bytes_list.value) + example.features.feature['image/location'].bytes_list.value.extend( + input_example.features.feature['image/location'].bytes_list.value) + + example.features.feature['image/date_captured'].bytes_list.value.extend( + input_example.features.feature['image/date_captured'].bytes_list.value) + + example.features.feature['image/class/text'].bytes_list.value.extend( + input_example.features.feature['image/class/text'].bytes_list.value) + example.features.feature['image/class/label'].int64_list.value.extend( + input_example.features.feature['image/class/label'].int64_list.value) + + example.features.feature['image/seq_id'].bytes_list.value.extend( + input_example.features.feature['image/seq_id'].bytes_list.value) + example.features.feature['image/seq_num_frames'].int64_list.value.extend( + input_example.features.feature['image/seq_num_frames'].int64_list.value) + example.features.feature['image/seq_frame_num'].int64_list.value.extend( + input_example.features.feature['image/seq_frame_num'].int64_list.value) + + self._num_examples_processed.inc(1) + return [example] + + +def construct_pipeline(input_tfrecord, output_tfrecord, model_dir, + confidence_threshold, num_shards): + """Returns a Beam pipeline to run object detection inference. + + Args: + input_tfrecord: A TFRecord of tf.train.Example protos containing images. + output_tfrecord: A TFRecord of tf.train.Example protos that contain images + in the input TFRecord and the detections from the model. + model_dir: Path to `saved_model` to use for inference. + confidence_threshold: Threshold to use when keeping detection results. + num_shards: The number of output shards. + Returns: + pipeline: A Beam pipeline. + """ + def pipeline(root): + input_collection = ( + root | 'ReadInputTFRecord' >> beam.io.tfrecordio.ReadFromTFRecord( + input_tfrecord, + coder=beam.coders.BytesCoder())) + output_collection = input_collection | 'RunInference' >> beam.ParDo( + GenerateDetectionDataFn(model_dir, confidence_threshold)) + output_collection = output_collection | 'Reshuffle' >> beam.Reshuffle() + _ = output_collection | 'WritetoDisk' >> beam.io.tfrecordio.WriteToTFRecord( + output_tfrecord, + num_shards=num_shards, + coder=beam.coders.ProtoCoder(tf.train.Example)) + return pipeline + + +def main(_): + """Runs the Beam pipeline that performs inference. + + Args: + _: unused + """ + # must create before flags are used + runner = runners.DirectRunner() + + dirname = os.path.dirname(FLAGS.detection_output_tfrecord) + tf.io.gfile.makedirs(dirname) + runner.run( + construct_pipeline(FLAGS.detection_input_tfrecord, + FLAGS.detection_output_tfrecord, + FLAGS.detection_model_dir, + FLAGS.confidence_threshold, + FLAGS.num_shards)) + + +if __name__ == '__main__': + flags.mark_flags_as_required([ + 'detection_input_tfrecord', + 'detection_output_tfrecord', + 'detection_model_dir' + ]) + app.run(main) diff --git a/research/object_detection/dataset_tools/context_rcnn/generate_detection_data_tf1_test.py b/research/object_detection/dataset_tools/context_rcnn/generate_detection_data_tf1_test.py new file mode 100644 index 000000000..9002e750f --- /dev/null +++ b/research/object_detection/dataset_tools/context_rcnn/generate_detection_data_tf1_test.py @@ -0,0 +1,267 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for generate_detection_data.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import contextlib +import os +import tempfile +import unittest +import numpy as np +import six +import tensorflow.compat.v1 as tf + +from object_detection import exporter +from object_detection.builders import model_builder +from object_detection.core import model +from object_detection.dataset_tools.context_rcnn import generate_detection_data +from object_detection.protos import pipeline_pb2 +from object_detection.utils import tf_version +from apache_beam import runners + +if six.PY2: + import mock # pylint: disable=g-import-not-at-top +else: + mock = unittest.mock + + +class FakeModel(model.DetectionModel): + """A Fake Detection model with expected output nodes from post-processing.""" + + def preprocess(self, inputs): + true_image_shapes = [] # Doesn't matter for the fake model. + return tf.identity(inputs), true_image_shapes + + def predict(self, preprocessed_inputs, true_image_shapes): + return {'image': tf.layers.conv2d(preprocessed_inputs, 3, 1)} + + def postprocess(self, prediction_dict, true_image_shapes): + with tf.control_dependencies(prediction_dict.values()): + postprocessed_tensors = { + 'detection_boxes': tf.constant([[[0.0, 0.1, 0.5, 0.6], + [0.5, 0.5, 0.8, 0.8]]], tf.float32), + 'detection_scores': tf.constant([[0.95, 0.6]], tf.float32), + 'detection_multiclass_scores': tf.constant([[[0.1, 0.7, 0.2], + [0.3, 0.1, 0.6]]], + tf.float32), + 'detection_classes': tf.constant([[0, 1]], tf.float32), + 'num_detections': tf.constant([2], tf.float32) + } + return postprocessed_tensors + + def restore_map(self, checkpoint_path, fine_tune_checkpoint_type): + pass + + def loss(self, prediction_dict, true_image_shapes): + pass + + def regularization_losses(self): + pass + + def updates(self): + pass + + +@contextlib.contextmanager +def InMemoryTFRecord(entries): + temp = tempfile.NamedTemporaryFile(delete=False) + filename = temp.name + try: + with tf.python_io.TFRecordWriter(filename) as writer: + for value in entries: + writer.write(value) + yield filename + finally: + os.unlink(filename) + + +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') +class GenerateDetectionDataTest(tf.test.TestCase): + + def _save_checkpoint_from_mock_model(self, checkpoint_path): + """A function to save checkpoint from a fake Detection Model. + + Args: + checkpoint_path: Path to save checkpoint from Fake model. + """ + g = tf.Graph() + with g.as_default(): + mock_model = FakeModel(num_classes=5) + preprocessed_inputs, true_image_shapes = mock_model.preprocess( + tf.placeholder(tf.float32, shape=[None, None, None, 3])) + predictions = mock_model.predict(preprocessed_inputs, true_image_shapes) + mock_model.postprocess(predictions, true_image_shapes) + tf.train.get_or_create_global_step() + saver = tf.train.Saver() + init = tf.global_variables_initializer() + with self.test_session(graph=g) as sess: + sess.run(init) + saver.save(sess, checkpoint_path) + + def _export_saved_model(self): + tmp_dir = self.get_temp_dir() + checkpoint_path = os.path.join(tmp_dir, 'model.ckpt') + self._save_checkpoint_from_mock_model(checkpoint_path) + output_directory = os.path.join(tmp_dir, 'output') + saved_model_path = os.path.join(output_directory, 'saved_model') + tf.io.gfile.makedirs(output_directory) + with mock.patch.object( + model_builder, 'build', autospec=True) as mock_builder: + mock_builder.return_value = FakeModel(num_classes=5) + pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() + pipeline_config.eval_config.use_moving_averages = False + detection_model = model_builder.build(pipeline_config.model, + is_training=False) + outputs, placeholder_tensor = exporter.build_detection_graph( + input_type='tf_example', + detection_model=detection_model, + input_shape=None, + output_collection_name='inference_op', + graph_hook_fn=None) + output_node_names = ','.join(outputs.keys()) + saver = tf.train.Saver() + input_saver_def = saver.as_saver_def() + frozen_graph_def = exporter.freeze_graph_with_def_protos( + input_graph_def=tf.get_default_graph().as_graph_def(), + input_saver_def=input_saver_def, + input_checkpoint=checkpoint_path, + output_node_names=output_node_names, + restore_op_name='save/restore_all', + filename_tensor_name='save/Const:0', + output_graph='', + clear_devices=True, + initializer_nodes='') + exporter.write_saved_model( + saved_model_path=saved_model_path, + frozen_graph_def=frozen_graph_def, + inputs=placeholder_tensor, + outputs=outputs) + return saved_model_path + + def _create_tf_example(self): + with self.test_session(): + encoded_image = tf.image.encode_jpeg( + tf.constant(np.ones((4, 6, 3)).astype(np.uint8))).eval() + + def BytesFeature(value): + return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value])) + + def Int64Feature(value): + return tf.train.Feature(int64_list=tf.train.Int64List(value=[value])) + + example = tf.train.Example(features=tf.train.Features(feature={ + 'image/encoded': BytesFeature(encoded_image), + 'image/source_id': BytesFeature(b'image_id'), + 'image/height': Int64Feature(4), + 'image/width': Int64Feature(6), + 'image/object/class/label': Int64Feature(5), + 'image/object/class/text': BytesFeature(b'hyena'), + 'image/class/label': Int64Feature(5), + 'image/class/text': BytesFeature(b'hyena'), + })) + + return example.SerializeToString() + + def assert_expected_example(self, example): + self.assertAllClose( + example.features.feature['image/object/bbox/ymin'].float_list.value, + [0.0]) + self.assertAllClose( + example.features.feature['image/object/bbox/xmin'].float_list.value, + [0.1]) + self.assertAllClose( + example.features.feature['image/object/bbox/ymax'].float_list.value, + [0.5]) + self.assertAllClose( + example.features.feature['image/object/bbox/xmax'].float_list.value, + [0.6]) + self.assertAllClose( + example.features.feature['image/object/class/score'] + .float_list.value, [0.95]) + self.assertAllClose( + example.features.feature['image/object/class/label'] + .int64_list.value, [5]) + self.assertAllEqual( + example.features.feature['image/object/class/text'] + .bytes_list.value, [b'hyena']) + self.assertAllClose( + example.features.feature['image/class/label'] + .int64_list.value, [5]) + self.assertAllEqual( + example.features.feature['image/class/text'] + .bytes_list.value, [b'hyena']) + + # Check other essential attributes. + self.assertAllEqual( + example.features.feature['image/height'].int64_list.value, [4]) + self.assertAllEqual( + example.features.feature['image/width'].int64_list.value, [6]) + self.assertAllEqual( + example.features.feature['image/source_id'].bytes_list.value, + [b'image_id']) + self.assertTrue( + example.features.feature['image/encoded'].bytes_list.value) + + def test_generate_detection_data_fn(self): + saved_model_path = self._export_saved_model() + confidence_threshold = 0.8 + inference_fn = generate_detection_data.GenerateDetectionDataFn( + saved_model_path, confidence_threshold) + inference_fn.start_bundle() + generated_example = self._create_tf_example() + self.assertAllEqual(tf.train.Example.FromString( + generated_example).features.feature['image/object/class/label'] + .int64_list.value, [5]) + self.assertAllEqual(tf.train.Example.FromString( + generated_example).features.feature['image/object/class/text'] + .bytes_list.value, [b'hyena']) + output = inference_fn.process(generated_example) + output_example = output[0] + + self.assertAllEqual( + output_example.features.feature['image/object/class/label'] + .int64_list.value, [5]) + self.assertAllEqual(output_example.features.feature['image/width'] + .int64_list.value, [6]) + + self.assert_expected_example(output_example) + + def test_beam_pipeline(self): + with InMemoryTFRecord([self._create_tf_example()]) as input_tfrecord: + runner = runners.DirectRunner() + temp_dir = tempfile.mkdtemp(dir=os.environ.get('TEST_TMPDIR')) + output_tfrecord = os.path.join(temp_dir, 'output_tfrecord') + saved_model_path = self._export_saved_model() + confidence_threshold = 0.8 + num_shards = 1 + pipeline = generate_detection_data.construct_pipeline( + input_tfrecord, output_tfrecord, saved_model_path, + confidence_threshold, num_shards) + runner.run(pipeline) + filenames = tf.io.gfile.glob(output_tfrecord + '-?????-of-?????') + actual_output = [] + record_iterator = tf.python_io.tf_record_iterator(path=filenames[0]) + for record in record_iterator: + actual_output.append(record) + self.assertEqual(len(actual_output), 1) + self.assert_expected_example(tf.train.Example.FromString( + actual_output[0])) + + +if __name__ == '__main__': + tf.test.main() diff --git a/research/object_detection/dataset_tools/context_rcnn/generate_embedding_data.py b/research/object_detection/dataset_tools/context_rcnn/generate_embedding_data.py new file mode 100644 index 000000000..a147c4e88 --- /dev/null +++ b/research/object_detection/dataset_tools/context_rcnn/generate_embedding_data.py @@ -0,0 +1,378 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +r"""A Beam job to generate embedding data for camera trap images. + +This tool runs inference with an exported Object Detection model in +`saved_model` format and produce raw embeddings for camera trap data. These +embeddings contain an object-centric feature embedding from Faster R-CNN, the +datetime that the image was taken (normalized in a specific way), and the +position of the object of interest. By default, only the highest-scoring object +embedding is included. + +Steps to generate a embedding dataset: +1. Use object_detection/export_inference_graph.py to get a Faster R-CNN + `saved_model` for inference. The input node must accept a tf.Example proto. +2. Run this tool with `saved_model` from step 1 and an TFRecord of tf.Example + protos containing images for inference. + +Example Usage: +-------------- +python tensorflow_models/object_detection/export_inference_graph.py \ + --alsologtostderr \ + --input_type tf_example \ + --pipeline_config_path path/to/faster_rcnn_model.config \ + --trained_checkpoint_prefix path/to/model.ckpt \ + --output_directory path/to/exported_model_directory + +python generate_embedding_data.py \ + --alsologtostderr \ + --embedding_input_tfrecord path/to/input_tfrecords* \ + --embedding_output_tfrecord path/to/output_tfrecords \ + --embedding_model_dir path/to/exported_model_directory/saved_model +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import datetime +import os +import threading +from absl import app +from absl import flags +import apache_beam as beam +import numpy as np +import six +import tensorflow.compat.v1 as tf +from apache_beam import runners + +flags.DEFINE_string('embedding_input_tfrecord', None, 'TFRecord containing' + 'images in tf.Example format for object detection.') +flags.DEFINE_string('embedding_output_tfrecord', None, + 'TFRecord containing embeddings in tf.Example format.') +flags.DEFINE_string('embedding_model_dir', None, 'Path to directory containing' + 'an object detection SavedModel with' + 'detection_box_classifier_features in the output.') +flags.DEFINE_integer('top_k_embedding_count', 1, + 'The number of top k embeddings to add to the memory bank.' + ) +flags.DEFINE_integer('bottom_k_embedding_count', 0, + 'The number of bottom k embeddings to add to the memory ' + 'bank.') +flags.DEFINE_integer('num_shards', 0, 'Number of output shards.') + + +FLAGS = flags.FLAGS + + +class GenerateEmbeddingDataFn(beam.DoFn): + """Generates embedding data for camera trap images. + + This Beam DoFn performs inference with an object detection `saved_model` and + produces contextual embedding vectors. + """ + session_lock = threading.Lock() + + def __init__(self, model_dir, top_k_embedding_count, + bottom_k_embedding_count): + """Initialization function. + + Args: + model_dir: A directory containing saved model. + top_k_embedding_count: the number of high-confidence embeddings to store + bottom_k_embedding_count: the number of low-confidence embeddings to store + """ + self._model_dir = model_dir + self._session = None + self._num_examples_processed = beam.metrics.Metrics.counter( + 'embedding_data_generation', 'num_tf_examples_processed') + self._top_k_embedding_count = top_k_embedding_count + self._bottom_k_embedding_count = bottom_k_embedding_count + + def start_bundle(self): + self._load_inference_model() + + def _load_inference_model(self): + # Because initialization of the tf.Session is expensive we share + # one instance across all threads in the worker. This is possible since + # tf.Session.run() is thread safe. + with self.session_lock: + if self._session is None: + graph = tf.Graph() + self._session = tf.Session(graph=graph) + with graph.as_default(): + meta_graph = tf.saved_model.loader.load( + self._session, [tf.saved_model.tag_constants.SERVING], + self._model_dir) + signature = meta_graph.signature_def['serving_default'] + input_tensor_name = signature.inputs['inputs'].name + detection_features_name = signature.outputs['detection_features'].name + detection_boxes_name = signature.outputs['detection_boxes'].name + num_detections_name = signature.outputs['num_detections'].name + self._input = graph.get_tensor_by_name(input_tensor_name) + self._embedding_node = graph.get_tensor_by_name(detection_features_name) + self._box_node = graph.get_tensor_by_name(detection_boxes_name) + self._scores_node = graph.get_tensor_by_name( + signature.outputs['detection_scores'].name) + self._num_detections = graph.get_tensor_by_name(num_detections_name) + tf.logging.info(signature.outputs['detection_features'].name) + tf.logging.info(signature.outputs['detection_boxes'].name) + tf.logging.info(signature.outputs['num_detections'].name) + + def process(self, tfrecord_entry): + return self._run_inference_and_generate_embedding(tfrecord_entry) + + def _run_inference_and_generate_embedding(self, tfrecord_entry): + input_example = tf.train.Example.FromString(tfrecord_entry) + # Convert date_captured datetime string to unix time integer and store + + def get_date_captured(example): + date_captured = datetime.datetime.strptime( + six.ensure_str( + example.features.feature[ + 'image/date_captured'].bytes_list.value[0]), + '%Y-%m-%d %H:%M:%S') + return date_captured + + try: + date_captured = get_date_captured(input_example) + except Exception: # pylint: disable=broad-except + # we require date_captured to be available for all images + return [] + + def embed_date_captured(date_captured): + """Encodes the datetime of the image.""" + embedded_date_captured = [] + month_max = 12.0 + day_max = 31.0 + hour_max = 24.0 + minute_max = 60.0 + min_year = 1990.0 + max_year = 2030.0 + + year = (date_captured.year-min_year)/float(max_year-min_year) + embedded_date_captured.append(year) + + month = (date_captured.month-1)/month_max + embedded_date_captured.append(month) + + day = (date_captured.day-1)/day_max + embedded_date_captured.append(day) + + hour = date_captured.hour/hour_max + embedded_date_captured.append(hour) + + minute = date_captured.minute/minute_max + embedded_date_captured.append(minute) + + return np.asarray(embedded_date_captured) + + def embed_position_and_size(box): + """Encodes the bounding box of the object of interest.""" + ymin = box[0] + xmin = box[1] + ymax = box[2] + xmax = box[3] + w = xmax - xmin + h = ymax - ymin + x = xmin + w / 2.0 + y = ymin + h / 2.0 + return np.asarray([x, y, w, h]) + + unix_time = ( + (date_captured - datetime.datetime.fromtimestamp(0)).total_seconds()) + + example = tf.train.Example() + example.features.feature['image/unix_time'].float_list.value.extend( + [unix_time]) + + (detection_features, detection_boxes, num_detections, + detection_scores) = self._session.run( + [ + self._embedding_node, self._box_node, self._num_detections[0], + self._scores_node + ], + feed_dict={self._input: [tfrecord_entry]}) + + num_detections = int(num_detections) + embed_all = [] + score_all = [] + + detection_features = np.asarray(detection_features) + + def get_bb_embedding(detection_features, detection_boxes, detection_scores, + index): + embedding = detection_features[0][index] + pooled_embedding = np.mean(np.mean(embedding, axis=1), axis=0) + + box = detection_boxes[0][index] + position_embedding = embed_position_and_size(box) + + score = detection_scores[0][index] + return np.concatenate((pooled_embedding, position_embedding)), score + + temporal_embedding = embed_date_captured(date_captured) + + embedding_count = 0 + for index in range(min(num_detections, self._top_k_embedding_count)): + bb_embedding, score = get_bb_embedding( + detection_features, detection_boxes, detection_scores, index) + embed_all.extend(bb_embedding) + embed_all.extend(temporal_embedding) + score_all.append(score) + embedding_count += 1 + + for index in range( + max(0, num_detections - 1), + max(-1, num_detections - 1 - self._bottom_k_embedding_count), -1): + bb_embedding, score = get_bb_embedding( + detection_features, detection_boxes, detection_scores, index) + embed_all.extend(bb_embedding) + embed_all.extend(temporal_embedding) + score_all.append(score) + embedding_count += 1 + + if embedding_count == 0: + bb_embedding, score = get_bb_embedding( + detection_features, detection_boxes, detection_scores, 0) + embed_all.extend(bb_embedding) + embed_all.extend(temporal_embedding) + score_all.append(score) + + # Takes max in case embedding_count is 0. + embedding_length = len(embed_all) // max(1, embedding_count) + + embed_all = np.asarray(embed_all) + + example.features.feature['image/embedding'].float_list.value.extend( + embed_all) + example.features.feature['image/embedding_score'].float_list.value.extend( + score_all) + example.features.feature['image/embedding_length'].int64_list.value.append( + embedding_length) + example.features.feature['image/embedding_count'].int64_list.value.append( + embedding_count) + + # Add other essential example attributes + example.features.feature['image/encoded'].bytes_list.value.extend( + input_example.features.feature['image/encoded'].bytes_list.value) + example.features.feature['image/height'].int64_list.value.extend( + input_example.features.feature['image/height'].int64_list.value) + example.features.feature['image/width'].int64_list.value.extend( + input_example.features.feature['image/width'].int64_list.value) + example.features.feature['image/source_id'].bytes_list.value.extend( + input_example.features.feature['image/source_id'].bytes_list.value) + example.features.feature['image/location'].bytes_list.value.extend( + input_example.features.feature['image/location'].bytes_list.value) + + example.features.feature['image/date_captured'].bytes_list.value.extend( + input_example.features.feature['image/date_captured'].bytes_list.value) + + example.features.feature['image/class/text'].bytes_list.value.extend( + input_example.features.feature['image/class/text'].bytes_list.value) + example.features.feature['image/class/label'].int64_list.value.extend( + input_example.features.feature['image/class/label'].int64_list.value) + + example.features.feature['image/seq_id'].bytes_list.value.extend( + input_example.features.feature['image/seq_id'].bytes_list.value) + example.features.feature['image/seq_num_frames'].int64_list.value.extend( + input_example.features.feature['image/seq_num_frames'].int64_list.value) + example.features.feature['image/seq_frame_num'].int64_list.value.extend( + input_example.features.feature['image/seq_frame_num'].int64_list.value) + + example.features.feature['image/object/bbox/ymax'].float_list.value.extend( + input_example.features.feature[ + 'image/object/bbox/ymax'].float_list.value) + example.features.feature['image/object/bbox/ymin'].float_list.value.extend( + input_example.features.feature[ + 'image/object/bbox/ymin'].float_list.value) + example.features.feature['image/object/bbox/xmax'].float_list.value.extend( + input_example.features.feature[ + 'image/object/bbox/xmax'].float_list.value) + example.features.feature['image/object/bbox/xmin'].float_list.value.extend( + input_example.features.feature[ + 'image/object/bbox/xmin'].float_list.value) + example.features.feature[ + 'image/object/class/score'].float_list.value.extend( + input_example.features.feature[ + 'image/object/class/score'].float_list.value) + example.features.feature[ + 'image/object/class/label'].int64_list.value.extend( + input_example.features.feature[ + 'image/object/class/label'].int64_list.value) + example.features.feature[ + 'image/object/class/text'].bytes_list.value.extend( + input_example.features.feature[ + 'image/object/class/text'].bytes_list.value) + + self._num_examples_processed.inc(1) + return [example] + + +def construct_pipeline(input_tfrecord, output_tfrecord, model_dir, + top_k_embedding_count, bottom_k_embedding_count, + num_shards): + """Returns a beam pipeline to run object detection inference. + + Args: + input_tfrecord: An TFRecord of tf.train.Example protos containing images. + output_tfrecord: An TFRecord of tf.train.Example protos that contain images + in the input TFRecord and the detections from the model. + model_dir: Path to `saved_model` to use for inference. + top_k_embedding_count: The number of high-confidence embeddings to store. + bottom_k_embedding_count: The number of low-confidence embeddings to store. + num_shards: The number of output shards. + """ + def pipeline(root): + input_collection = ( + root | 'ReadInputTFRecord' >> beam.io.tfrecordio.ReadFromTFRecord( + input_tfrecord, + coder=beam.coders.BytesCoder())) + output_collection = input_collection | 'ExtractEmbedding' >> beam.ParDo( + GenerateEmbeddingDataFn(model_dir, top_k_embedding_count, + bottom_k_embedding_count)) + output_collection = output_collection | 'Reshuffle' >> beam.Reshuffle() + _ = output_collection | 'WritetoDisk' >> beam.io.tfrecordio.WriteToTFRecord( + output_tfrecord, + num_shards=num_shards, + coder=beam.coders.ProtoCoder(tf.train.Example)) + return pipeline + + +def main(_): + """Runs the Beam pipeline that performs inference. + + Args: + _: unused + """ + # must create before flags are used + runner = runners.DirectRunner() + + dirname = os.path.dirname(FLAGS.embedding_output_tfrecord) + tf.io.gfile.makedirs(dirname) + runner.run( + construct_pipeline(FLAGS.embedding_input_tfrecord, + FLAGS.embedding_output_tfrecord, + FLAGS.embedding_model_dir, FLAGS.top_k_embedding_count, + FLAGS.bottom_k_embedding_count, FLAGS.num_shards)) + + +if __name__ == '__main__': + flags.mark_flags_as_required([ + 'embedding_input_tfrecord', + 'embedding_output_tfrecord', + 'embedding_model_dir' + ]) + app.run(main) diff --git a/research/object_detection/dataset_tools/context_rcnn/generate_embedding_data_tf1_test.py b/research/object_detection/dataset_tools/context_rcnn/generate_embedding_data_tf1_test.py new file mode 100644 index 000000000..064a57e13 --- /dev/null +++ b/research/object_detection/dataset_tools/context_rcnn/generate_embedding_data_tf1_test.py @@ -0,0 +1,337 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for generate_embedding_data.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +import contextlib +import os +import tempfile +import unittest +import numpy as np +import six +import tensorflow.compat.v1 as tf +from object_detection import exporter +from object_detection.builders import model_builder +from object_detection.core import model +from object_detection.dataset_tools.context_rcnn import generate_embedding_data +from object_detection.protos import pipeline_pb2 +from object_detection.utils import tf_version +from apache_beam import runners + +if six.PY2: + import mock # pylint: disable=g-import-not-at-top +else: + mock = unittest.mock + + +class FakeModel(model.DetectionModel): + """A Fake Detection model with expected output nodes from post-processing.""" + + def preprocess(self, inputs): + true_image_shapes = [] # Doesn't matter for the fake model. + return tf.identity(inputs), true_image_shapes + + def predict(self, preprocessed_inputs, true_image_shapes): + return {'image': tf.layers.conv2d(preprocessed_inputs, 3, 1)} + + def postprocess(self, prediction_dict, true_image_shapes): + with tf.control_dependencies(prediction_dict.values()): + num_features = 100 + feature_dims = 10 + classifier_feature = np.ones( + (2, feature_dims, feature_dims, num_features), + dtype=np.float32).tolist() + postprocessed_tensors = { + 'detection_boxes': tf.constant([[[0.0, 0.1, 0.5, 0.6], + [0.5, 0.5, 0.8, 0.8]]], tf.float32), + 'detection_scores': tf.constant([[0.95, 0.6]], tf.float32), + 'detection_multiclass_scores': tf.constant([[[0.1, 0.7, 0.2], + [0.3, 0.1, 0.6]]], + tf.float32), + 'detection_classes': tf.constant([[0, 1]], tf.float32), + 'num_detections': tf.constant([2], tf.float32), + 'detection_features': + tf.constant([classifier_feature], + tf.float32) + } + return postprocessed_tensors + + def restore_map(self, checkpoint_path, fine_tune_checkpoint_type): + pass + + def loss(self, prediction_dict, true_image_shapes): + pass + + def regularization_losses(self): + pass + + def updates(self): + pass + + +@contextlib.contextmanager +def InMemoryTFRecord(entries): + temp = tempfile.NamedTemporaryFile(delete=False) + filename = temp.name + try: + with tf.python_io.TFRecordWriter(filename) as writer: + for value in entries: + writer.write(value) + yield filename + finally: + os.unlink(temp.name) + + +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') +class GenerateEmbeddingData(tf.test.TestCase): + + def _save_checkpoint_from_mock_model(self, checkpoint_path): + """A function to save checkpoint from a fake Detection Model. + + Args: + checkpoint_path: Path to save checkpoint from Fake model. + """ + g = tf.Graph() + with g.as_default(): + mock_model = FakeModel(num_classes=5) + preprocessed_inputs, true_image_shapes = mock_model.preprocess( + tf.placeholder(tf.float32, shape=[None, None, None, 3])) + predictions = mock_model.predict(preprocessed_inputs, true_image_shapes) + mock_model.postprocess(predictions, true_image_shapes) + tf.train.get_or_create_global_step() + saver = tf.train.Saver() + init = tf.global_variables_initializer() + with self.test_session(graph=g) as sess: + sess.run(init) + saver.save(sess, checkpoint_path) + + def _export_saved_model(self): + tmp_dir = self.get_temp_dir() + checkpoint_path = os.path.join(tmp_dir, 'model.ckpt') + self._save_checkpoint_from_mock_model(checkpoint_path) + output_directory = os.path.join(tmp_dir, 'output') + saved_model_path = os.path.join(output_directory, 'saved_model') + tf.io.gfile.makedirs(output_directory) + with mock.patch.object( + model_builder, 'build', autospec=True) as mock_builder: + mock_builder.return_value = FakeModel(num_classes=5) + pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() + pipeline_config.eval_config.use_moving_averages = False + detection_model = model_builder.build(pipeline_config.model, + is_training=False) + outputs, placeholder_tensor = exporter.build_detection_graph( + input_type='tf_example', + detection_model=detection_model, + input_shape=None, + output_collection_name='inference_op', + graph_hook_fn=None) + output_node_names = ','.join(outputs.keys()) + saver = tf.train.Saver() + input_saver_def = saver.as_saver_def() + frozen_graph_def = exporter.freeze_graph_with_def_protos( + input_graph_def=tf.get_default_graph().as_graph_def(), + input_saver_def=input_saver_def, + input_checkpoint=checkpoint_path, + output_node_names=output_node_names, + restore_op_name='save/restore_all', + filename_tensor_name='save/Const:0', + output_graph='', + clear_devices=True, + initializer_nodes='') + exporter.write_saved_model( + saved_model_path=saved_model_path, + frozen_graph_def=frozen_graph_def, + inputs=placeholder_tensor, + outputs=outputs) + return saved_model_path + + def _create_tf_example(self): + with self.test_session(): + encoded_image = tf.image.encode_jpeg( + tf.constant(np.ones((4, 4, 3)).astype(np.uint8))).eval() + + def BytesFeature(value): + return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value])) + + def Int64Feature(value): + return tf.train.Feature(int64_list=tf.train.Int64List(value=[value])) + + def FloatFeature(value): + return tf.train.Feature(float_list=tf.train.FloatList(value=[value])) + + example = tf.train.Example( + features=tf.train.Features( + feature={ + 'image/encoded': BytesFeature(encoded_image), + 'image/source_id': BytesFeature(b'image_id'), + 'image/height': Int64Feature(400), + 'image/width': Int64Feature(600), + 'image/class/label': Int64Feature(5), + 'image/class/text': BytesFeature(b'hyena'), + 'image/object/bbox/xmin': FloatFeature(0.1), + 'image/object/bbox/xmax': FloatFeature(0.6), + 'image/object/bbox/ymin': FloatFeature(0.0), + 'image/object/bbox/ymax': FloatFeature(0.5), + 'image/object/class/score': FloatFeature(0.95), + 'image/object/class/label': Int64Feature(5), + 'image/object/class/text': BytesFeature(b'hyena'), + 'image/date_captured': BytesFeature(b'2019-10-20 12:12:12') + })) + + return example.SerializeToString() + + def assert_expected_example(self, example, topk=False, botk=False): + # Check embeddings + if topk or botk: + self.assertEqual(len( + example.features.feature['image/embedding'].float_list.value), + 218) + self.assertAllEqual( + example.features.feature['image/embedding_count'].int64_list.value, + [2]) + else: + self.assertEqual(len( + example.features.feature['image/embedding'].float_list.value), + 109) + self.assertAllEqual( + example.features.feature['image/embedding_count'].int64_list.value, + [1]) + + self.assertAllEqual( + example.features.feature['image/embedding_length'].int64_list.value, + [109]) + + # Check annotations + self.assertAllClose( + example.features.feature['image/object/bbox/ymin'].float_list.value, + [0.0]) + self.assertAllClose( + example.features.feature['image/object/bbox/xmin'].float_list.value, + [0.1]) + self.assertAllClose( + example.features.feature['image/object/bbox/ymax'].float_list.value, + [0.5]) + self.assertAllClose( + example.features.feature['image/object/bbox/xmax'].float_list.value, + [0.6]) + self.assertAllClose( + example.features.feature['image/object/class/score'] + .float_list.value, [0.95]) + self.assertAllClose( + example.features.feature['image/object/class/label'] + .int64_list.value, [5]) + self.assertAllEqual( + example.features.feature['image/object/class/text'] + .bytes_list.value, ['hyena']) + self.assertAllClose( + example.features.feature['image/class/label'] + .int64_list.value, [5]) + self.assertAllEqual( + example.features.feature['image/class/text'] + .bytes_list.value, ['hyena']) + + # Check other essential attributes. + self.assertAllEqual( + example.features.feature['image/height'].int64_list.value, [400]) + self.assertAllEqual( + example.features.feature['image/width'].int64_list.value, [600]) + self.assertAllEqual( + example.features.feature['image/source_id'].bytes_list.value, + ['image_id']) + self.assertTrue( + example.features.feature['image/encoded'].bytes_list.value) + + def test_generate_embedding_data_fn(self): + saved_model_path = self._export_saved_model() + top_k_embedding_count = 1 + bottom_k_embedding_count = 0 + inference_fn = generate_embedding_data.GenerateEmbeddingDataFn( + saved_model_path, top_k_embedding_count, bottom_k_embedding_count) + inference_fn.start_bundle() + generated_example = self._create_tf_example() + self.assertAllEqual(tf.train.Example.FromString( + generated_example).features.feature['image/object/class/label'] + .int64_list.value, [5]) + self.assertAllEqual(tf.train.Example.FromString( + generated_example).features.feature['image/object/class/text'] + .bytes_list.value, ['hyena']) + output = inference_fn.process(generated_example) + output_example = output[0] + self.assert_expected_example(output_example) + + def test_generate_embedding_data_with_top_k_boxes(self): + saved_model_path = self._export_saved_model() + top_k_embedding_count = 2 + bottom_k_embedding_count = 0 + inference_fn = generate_embedding_data.GenerateEmbeddingDataFn( + saved_model_path, top_k_embedding_count, bottom_k_embedding_count) + inference_fn.start_bundle() + generated_example = self._create_tf_example() + self.assertAllEqual( + tf.train.Example.FromString(generated_example).features + .feature['image/object/class/label'].int64_list.value, [5]) + self.assertAllEqual( + tf.train.Example.FromString(generated_example).features + .feature['image/object/class/text'].bytes_list.value, [b'hyena']) + output = inference_fn.process(generated_example) + output_example = output[0] + self.assert_expected_example(output_example, topk=True) + + def test_generate_embedding_data_with_bottom_k_boxes(self): + saved_model_path = self._export_saved_model() + top_k_embedding_count = 0 + bottom_k_embedding_count = 2 + inference_fn = generate_embedding_data.GenerateEmbeddingDataFn( + saved_model_path, top_k_embedding_count, bottom_k_embedding_count) + inference_fn.start_bundle() + generated_example = self._create_tf_example() + self.assertAllEqual( + tf.train.Example.FromString(generated_example).features + .feature['image/object/class/label'].int64_list.value, [5]) + self.assertAllEqual( + tf.train.Example.FromString(generated_example).features + .feature['image/object/class/text'].bytes_list.value, ['hyena']) + output = inference_fn.process(generated_example) + output_example = output[0] + self.assert_expected_example(output_example, botk=True) + + def test_beam_pipeline(self): + with InMemoryTFRecord([self._create_tf_example()]) as input_tfrecord: + runner = runners.DirectRunner() + temp_dir = tempfile.mkdtemp(dir=os.environ.get('TEST_TMPDIR')) + output_tfrecord = os.path.join(temp_dir, 'output_tfrecord') + saved_model_path = self._export_saved_model() + top_k_embedding_count = 1 + bottom_k_embedding_count = 0 + num_shards = 1 + pipeline = generate_embedding_data.construct_pipeline( + input_tfrecord, output_tfrecord, saved_model_path, + top_k_embedding_count, bottom_k_embedding_count, num_shards) + runner.run(pipeline) + filenames = tf.io.gfile.glob( + output_tfrecord + '-?????-of-?????') + actual_output = [] + record_iterator = tf.python_io.tf_record_iterator(path=filenames[0]) + for record in record_iterator: + actual_output.append(record) + self.assertEqual(len(actual_output), 1) + self.assert_expected_example(tf.train.Example.FromString( + actual_output[0])) + + +if __name__ == '__main__': + tf.test.main() diff --git a/research/object_detection/dataset_tools/seq_example_util_test.py b/research/object_detection/dataset_tools/seq_example_util_test.py index 81fd4f54f..ba898d735 100644 --- a/research/object_detection/dataset_tools/seq_example_util_test.py +++ b/research/object_detection/dataset_tools/seq_example_util_test.py @@ -24,10 +24,18 @@ import six import tensorflow.compat.v1 as tf from object_detection.dataset_tools import seq_example_util +from object_detection.utils import tf_version class SeqExampleUtilTest(tf.test.TestCase): + def materialize_tensors(self, list_of_tensors): + if tf_version.is_tf2(): + return [tensor.numpy() for tensor in list_of_tensors] + else: + with self.cached_session() as sess: + return sess.run(list_of_tensors) + def test_make_unlabeled_example(self): num_frames = 5 image_height = 100 @@ -41,8 +49,7 @@ class SeqExampleUtilTest(tf.test.TestCase): image_source_ids = [str(idx) for idx in range(num_frames)] images_list = tf.unstack(images, axis=0) encoded_images_list = [tf.io.encode_jpeg(image) for image in images_list] - with tf.Session() as sess: - encoded_images = sess.run(encoded_images_list) + encoded_images = self.materialize_tensors(encoded_images_list) seq_example = seq_example_util.make_sequence_example( dataset_name=dataset_name, video_id=video_id, @@ -109,8 +116,7 @@ class SeqExampleUtilTest(tf.test.TestCase): dtype=tf.int32), dtype=tf.uint8) images_list = tf.unstack(images, axis=0) encoded_images_list = [tf.io.encode_jpeg(image) for image in images_list] - with tf.Session() as sess: - encoded_images = sess.run(encoded_images_list) + encoded_images = self.materialize_tensors(encoded_images_list) timestamps = [100000, 110000] is_annotated = [1, 0] bboxes = [ @@ -208,8 +214,7 @@ class SeqExampleUtilTest(tf.test.TestCase): dtype=tf.int32), dtype=tf.uint8) images_list = tf.unstack(images, axis=0) encoded_images_list = [tf.io.encode_jpeg(image) for image in images_list] - with tf.Session() as sess: - encoded_images = sess.run(encoded_images_list) + encoded_images = self.materialize_tensors(encoded_images_list) bboxes = [ np.array([[0., 0., 0.75, 0.75], [0., 0., 1., 1.]], dtype=np.float32), diff --git a/research/object_detection/eval_util.py b/research/object_detection/eval_util.py index e2d1255b5..3b365df19 100644 --- a/research/object_detection/eval_util.py +++ b/research/object_detection/eval_util.py @@ -52,6 +52,8 @@ EVAL_METRICS_CLASS_DICT = { coco_evaluation.CocoKeypointEvaluator, 'coco_mask_metrics': coco_evaluation.CocoMaskEvaluator, + 'coco_panoptic_metrics': + coco_evaluation.CocoPanopticSegmentationEvaluator, 'oid_challenge_detection_metrics': object_detection_evaluation.OpenImagesDetectionChallengeEvaluator, 'oid_challenge_segmentation_metrics': diff --git a/research/object_detection/eval_util_test.py b/research/object_detection/eval_util_test.py index f2f66405f..d0623f1fc 100644 --- a/research/object_detection/eval_util_test.py +++ b/research/object_detection/eval_util_test.py @@ -18,6 +18,7 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function +import unittest from absl.testing import parameterized import numpy as np @@ -30,6 +31,7 @@ from object_detection.core import standard_fields as fields from object_detection.metrics import coco_evaluation from object_detection.protos import eval_pb2 from object_detection.utils import test_case +from object_detection.utils import tf_version class EvalUtilTest(test_case.TestCase, parameterized.TestCase): @@ -127,6 +129,7 @@ class EvalUtilTest(test_case.TestCase, parameterized.TestCase): {'batch_size': 1, 'max_gt_boxes': None, 'scale_to_absolute': False}, {'batch_size': 8, 'max_gt_boxes': [1], 'scale_to_absolute': False} ) + @unittest.skipIf(tf_version.is_tf2(), 'Only compatible with TF1.X') def test_get_eval_metric_ops_for_coco_detections(self, batch_size=1, max_gt_boxes=None, scale_to_absolute=False): @@ -155,6 +158,7 @@ class EvalUtilTest(test_case.TestCase, parameterized.TestCase): {'batch_size': 1, 'max_gt_boxes': None, 'scale_to_absolute': False}, {'batch_size': 8, 'max_gt_boxes': [1], 'scale_to_absolute': False} ) + @unittest.skipIf(tf_version.is_tf2(), 'Only compatible with TF1.X') def test_get_eval_metric_ops_for_coco_detections_and_masks( self, batch_size=1, max_gt_boxes=None, scale_to_absolute=False): eval_config = eval_pb2.EvalConfig() @@ -185,6 +189,7 @@ class EvalUtilTest(test_case.TestCase, parameterized.TestCase): {'batch_size': 1, 'max_gt_boxes': None, 'scale_to_absolute': False}, {'batch_size': 8, 'max_gt_boxes': [1], 'scale_to_absolute': False} ) + @unittest.skipIf(tf_version.is_tf2(), 'Only compatible with TF1.X') def test_get_eval_metric_ops_for_coco_detections_and_resized_masks( self, batch_size=1, max_gt_boxes=None, scale_to_absolute=False): eval_config = eval_pb2.EvalConfig() @@ -210,6 +215,7 @@ class EvalUtilTest(test_case.TestCase, parameterized.TestCase): self.assertAlmostEqual(1.0, metrics['DetectionBoxes_Precision/mAP']) self.assertAlmostEqual(1.0, metrics['DetectionMasks_Precision/mAP']) + @unittest.skipIf(tf_version.is_tf2(), 'Only compatible with TF1.X') def test_get_eval_metric_ops_raises_error_with_unsupported_metric(self): eval_config = eval_pb2.EvalConfig() eval_config.metrics_set.extend(['unsupported_metric']) @@ -334,63 +340,67 @@ class EvalUtilTest(test_case.TestCase, parameterized.TestCase): dtype=np.float32) detection_keypoints = np.array([[0.0, 0.0], [0.5, 0.5], [1.0, 1.0]], dtype=np.float32) - detections = { - detection_fields.detection_boxes: - tf.constant(detection_boxes), - detection_fields.detection_scores: - tf.constant([[1.], [1.]]), - detection_fields.detection_classes: - tf.constant([[1], [2]]), - detection_fields.num_detections: - tf.constant([1, 1]), - detection_fields.detection_keypoints: - tf.tile( - tf.reshape( - tf.constant(detection_keypoints), shape=[1, 1, 3, 2]), - multiples=[2, 1, 1, 1]) - } - - gt_boxes = detection_boxes - groundtruth = { - input_data_fields.groundtruth_boxes: - tf.constant(gt_boxes), - input_data_fields.groundtruth_classes: - tf.constant([[1.], [1.]]), - input_data_fields.groundtruth_keypoints: - tf.tile( - tf.reshape( - tf.constant(detection_keypoints), shape=[1, 1, 3, 2]), - multiples=[2, 1, 1, 1]) - } - - image = tf.zeros((2, 100, 100, 3), dtype=tf.float32) - - true_image_shapes = tf.constant([[100, 100, 3], [50, 100, 3]]) - original_image_spatial_shapes = tf.constant([[200, 200], [150, 300]]) - - result = eval_util.result_dict_for_batched_example( - image, key, detections, groundtruth, - scale_to_absolute=True, - true_image_shapes=true_image_shapes, - original_image_spatial_shapes=original_image_spatial_shapes, - max_gt_boxes=tf.constant(1)) - - with self.test_session() as sess: - result = sess.run(result) - self.assertAllEqual( - [[[0., 0., 200., 200.]], [[0.0, 0.0, 150., 150.]]], - result[input_data_fields.groundtruth_boxes]) - self.assertAllClose([[[[0., 0.], [100., 100.], [200., 200.]]], - [[[0., 0.], [150., 150.], [300., 300.]]]], - result[input_data_fields.groundtruth_keypoints]) - - # Predictions from the model are not scaled. - self.assertAllEqual( - [[[0., 0., 200., 200.]], [[0.0, 0.0, 75., 150.]]], - result[detection_fields.detection_boxes]) - self.assertAllClose([[[[0., 0.], [100., 100.], [200., 200.]]], - [[[0., 0.], [75., 150.], [150., 300.]]]], - result[detection_fields.detection_keypoints]) + def graph_fn(): + detections = { + detection_fields.detection_boxes: + tf.constant(detection_boxes), + detection_fields.detection_scores: + tf.constant([[1.], [1.]]), + detection_fields.detection_classes: + tf.constant([[1], [2]]), + detection_fields.num_detections: + tf.constant([1, 1]), + detection_fields.detection_keypoints: + tf.tile( + tf.reshape( + tf.constant(detection_keypoints), shape=[1, 1, 3, 2]), + multiples=[2, 1, 1, 1]) + } + + gt_boxes = detection_boxes + groundtruth = { + input_data_fields.groundtruth_boxes: + tf.constant(gt_boxes), + input_data_fields.groundtruth_classes: + tf.constant([[1.], [1.]]), + input_data_fields.groundtruth_keypoints: + tf.tile( + tf.reshape( + tf.constant(detection_keypoints), shape=[1, 1, 3, 2]), + multiples=[2, 1, 1, 1]) + } + + image = tf.zeros((2, 100, 100, 3), dtype=tf.float32) + + true_image_shapes = tf.constant([[100, 100, 3], [50, 100, 3]]) + original_image_spatial_shapes = tf.constant([[200, 200], [150, 300]]) + + result = eval_util.result_dict_for_batched_example( + image, key, detections, groundtruth, + scale_to_absolute=True, + true_image_shapes=true_image_shapes, + original_image_spatial_shapes=original_image_spatial_shapes, + max_gt_boxes=tf.constant(1)) + return (result[input_data_fields.groundtruth_boxes], + result[input_data_fields.groundtruth_keypoints], + result[detection_fields.detection_boxes], + result[detection_fields.detection_keypoints]) + (gt_boxes, gt_keypoints, detection_boxes, + detection_keypoints) = self.execute_cpu(graph_fn, []) + self.assertAllEqual( + [[[0., 0., 200., 200.]], [[0.0, 0.0, 150., 150.]]], + gt_boxes) + self.assertAllClose([[[[0., 0.], [100., 100.], [200., 200.]]], + [[[0., 0.], [150., 150.], [300., 300.]]]], + gt_keypoints) + + # Predictions from the model are not scaled. + self.assertAllEqual( + [[[0., 0., 200., 200.]], [[0.0, 0.0, 75., 150.]]], + detection_boxes) + self.assertAllClose([[[[0., 0.], [100., 100.], [200., 200.]]], + [[[0., 0.], [75., 150.], [150., 300.]]]], + detection_keypoints) if __name__ == '__main__': diff --git a/research/object_detection/export_inference_graph.py b/research/object_detection/export_inference_graph.py index bcb5c40b3..5a0ee0dde 100644 --- a/research/object_detection/export_inference_graph.py +++ b/research/object_detection/export_inference_graph.py @@ -134,6 +134,30 @@ flags.DEFINE_string('config_override', '', 'text proto to override pipeline_config_path.') flags.DEFINE_boolean('write_inference_graph', False, 'If true, writes inference graph to disk.') +flags.DEFINE_string('additional_output_tensor_names', None, + 'Additional Tensors to output, to be specified as a comma ' + 'separated list of tensor names.') +flags.DEFINE_boolean('use_side_inputs', False, + 'If True, uses side inputs as well as image inputs.') +flags.DEFINE_string('side_input_shapes', None, + 'If use_side_inputs is True, this explicitly sets ' + 'the shape of the side input tensors to a fixed size. The ' + 'dimensions are to be provided as a comma-separated list ' + 'of integers. A value of -1 can be used for unknown ' + 'dimensions. A `/` denotes a break, starting the shape of ' + 'the next side input tensor. This flag is required if ' + 'using side inputs.') +flags.DEFINE_string('side_input_types', None, + 'If use_side_inputs is True, this explicitly sets ' + 'the type of the side input tensors. The ' + 'dimensions are to be provided as a comma-separated list ' + 'of types, each of `string`, `integer`, or `float`. ' + 'This flag is required if using side inputs.') +flags.DEFINE_string('side_input_names', None, + 'If use_side_inputs is True, this explicitly sets ' + 'the names of the side input tensors required by the model ' + 'assuming the names will be a comma-separated list of ' + 'strings. This flag is required if using side inputs.') tf.app.flags.mark_flag_as_required('pipeline_config_path') tf.app.flags.mark_flag_as_required('trained_checkpoint_prefix') tf.app.flags.mark_flag_as_required('output_directory') @@ -152,10 +176,30 @@ def main(_): ] else: input_shape = None + if FLAGS.use_side_inputs: + side_input_shapes, side_input_names, side_input_types = ( + exporter.parse_side_inputs( + FLAGS.side_input_shapes, + FLAGS.side_input_names, + FLAGS.side_input_types)) + else: + side_input_shapes = None + side_input_names = None + side_input_types = None + if FLAGS.additional_output_tensor_names: + additional_output_tensor_names = list( + FLAGS.additional_output_tensor_names.split(',')) + else: + additional_output_tensor_names = None exporter.export_inference_graph( FLAGS.input_type, pipeline_config, FLAGS.trained_checkpoint_prefix, FLAGS.output_directory, input_shape=input_shape, - write_inference_graph=FLAGS.write_inference_graph) + write_inference_graph=FLAGS.write_inference_graph, + additional_output_tensor_names=additional_output_tensor_names, + use_side_inputs=FLAGS.use_side_inputs, + side_input_shapes=side_input_shapes, + side_input_names=side_input_names, + side_input_types=side_input_types) if __name__ == '__main__': diff --git a/research/object_detection/export_tflite_ssd_graph_lib.py b/research/object_detection/export_tflite_ssd_graph_lib.py index 229daab00..f72e9525b 100644 --- a/research/object_detection/export_tflite_ssd_graph_lib.py +++ b/research/object_detection/export_tflite_ssd_graph_lib.py @@ -24,16 +24,19 @@ import tensorflow.compat.v1 as tf from tensorflow.core.framework import attr_value_pb2 from tensorflow.core.framework import types_pb2 from tensorflow.core.protobuf import saver_pb2 -from tensorflow.tools.graph_transforms import TransformGraph from object_detection import exporter from object_detection.builders import graph_rewriter_builder from object_detection.builders import model_builder from object_detection.builders import post_processing_builder from object_detection.core import box_list +from object_detection.utils import tf_version _DEFAULT_NUM_CHANNELS = 3 _DEFAULT_NUM_COORD_BOX = 4 +if tf_version.is_tf1(): + from tensorflow.tools.graph_transforms import TransformGraph # pylint: disable=g-import-not-at-top + def get_const_center_size_encoded_anchors(anchors): """Exports center-size encoded anchors as a constant tensor. diff --git a/research/object_detection/export_tflite_ssd_graph_lib_test.py b/research/object_detection/export_tflite_ssd_graph_lib_tf1_test.py similarity index 99% rename from research/object_detection/export_tflite_ssd_graph_lib_test.py rename to research/object_detection/export_tflite_ssd_graph_lib_tf1_test.py index 5b6082d10..0da7b9aa2 100644 --- a/research/object_detection/export_tflite_ssd_graph_lib_test.py +++ b/research/object_detection/export_tflite_ssd_graph_lib_tf1_test.py @@ -18,6 +18,7 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function import os +import unittest import numpy as np import six import tensorflow.compat.v1 as tf @@ -32,6 +33,7 @@ from object_detection.core import model from object_detection.protos import graph_rewriter_pb2 from object_detection.protos import pipeline_pb2 from object_detection.protos import post_processing_pb2 +from object_detection.utils import tf_version # pylint: disable=g-import-not-at-top @@ -82,6 +84,7 @@ class FakeModel(model.DetectionModel): pass +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class ExportTfliteGraphTest(tf.test.TestCase): def _save_checkpoint_from_mock_model(self, diff --git a/research/object_detection/exporter.py b/research/object_detection/exporter.py index 676e34deb..61c5f7f22 100644 --- a/research/object_detection/exporter.py +++ b/research/object_detection/exporter.py @@ -39,6 +39,54 @@ except ImportError: freeze_graph_with_def_protos = freeze_graph.freeze_graph_with_def_protos +def parse_side_inputs(side_input_shapes_string, side_input_names_string, + side_input_types_string): + """Parses side input flags. + + Args: + side_input_shapes_string: The shape of the side input tensors, provided as a + comma-separated list of integers. A value of -1 is used for unknown + dimensions. A `/` denotes a break, starting the shape of the next side + input tensor. + side_input_names_string: The names of the side input tensors, provided as a + comma-separated list of strings. + side_input_types_string: The type of the side input tensors, provided as a + comma-separated list of types, each of `string`, `integer`, or `float`. + + Returns: + side_input_shapes: A list of shapes. + side_input_names: A list of strings. + side_input_types: A list of tensorflow dtypes. + + """ + if side_input_shapes_string: + side_input_shapes = [] + for side_input_shape_list in side_input_shapes_string.split('/'): + side_input_shape = [ + int(dim) if dim != '-1' else None + for dim in side_input_shape_list.split(',') + ] + side_input_shapes.append(side_input_shape) + else: + raise ValueError('When using side_inputs, side_input_shapes must be ' + 'specified in the input flags.') + if side_input_names_string: + side_input_names = list(side_input_names_string.split(',')) + else: + raise ValueError('When using side_inputs, side_input_names must be ' + 'specified in the input flags.') + if side_input_types_string: + typelookup = {'float': tf.float32, 'int': tf.int32, 'string': tf.string} + side_input_types = [ + typelookup[side_input_type] + for side_input_type in side_input_types_string.split(',') + ] + else: + raise ValueError('When using side_inputs, side_input_types must be ' + 'specified in the input flags.') + return side_input_shapes, side_input_names, side_input_types + + def rewrite_nn_resize_op(is_quantized=False): """Replaces a custom nearest-neighbor resize op with the Tensorflow version. @@ -140,6 +188,14 @@ def _image_tensor_input_placeholder(input_shape=None): return input_tensor, input_tensor +def _side_input_tensor_placeholder(side_input_shape, side_input_name, + side_input_type): + """Returns side input placeholder and side input tensor.""" + side_input_tensor = tf.placeholder( + dtype=side_input_type, shape=side_input_shape, name=side_input_name) + return side_input_tensor, side_input_tensor + + def _tf_example_input_placeholder(input_shape=None): """Returns input that accepts a batch of strings with tf examples. @@ -200,7 +256,7 @@ input_placeholder_fn_map = { 'image_tensor': _image_tensor_input_placeholder, 'encoded_image_string_tensor': _encoded_image_string_tensor_input_placeholder, - 'tf_example': _tf_example_input_placeholder, + 'tf_example': _tf_example_input_placeholder } @@ -312,7 +368,7 @@ def write_saved_model(saved_model_path, Args: saved_model_path: Path to write SavedModel. frozen_graph_def: tf.GraphDef holding frozen graph. - inputs: The input placeholder tensor. + inputs: A tensor dictionary containing the inputs to a DetectionModel. outputs: A tensor dictionary containing the outputs of a DetectionModel. """ with tf.Graph().as_default(): @@ -322,8 +378,13 @@ def write_saved_model(saved_model_path, builder = tf.saved_model.builder.SavedModelBuilder(saved_model_path) - tensor_info_inputs = { - 'inputs': tf.saved_model.utils.build_tensor_info(inputs)} + tensor_info_inputs = {} + if isinstance(inputs, dict): + for k, v in inputs.items(): + tensor_info_inputs[k] = tf.saved_model.utils.build_tensor_info(v) + else: + tensor_info_inputs['inputs'] = tf.saved_model.utils.build_tensor_info( + inputs) tensor_info_outputs = {} for k, v in outputs.items(): tensor_info_outputs[k] = tf.saved_model.utils.build_tensor_info(v) @@ -364,11 +425,11 @@ def write_graph_and_checkpoint(inference_graph_def, def _get_outputs_from_inputs(input_tensors, detection_model, - output_collection_name): + output_collection_name, **side_inputs): inputs = tf.cast(input_tensors, dtype=tf.float32) preprocessed_inputs, true_image_shapes = detection_model.preprocess(inputs) output_tensors = detection_model.predict( - preprocessed_inputs, true_image_shapes) + preprocessed_inputs, true_image_shapes, **side_inputs) postprocessed_tensors = detection_model.postprocess( output_tensors, true_image_shapes) return add_output_tensor_nodes(postprocessed_tensors, @@ -376,32 +437,45 @@ def _get_outputs_from_inputs(input_tensors, detection_model, def build_detection_graph(input_type, detection_model, input_shape, - output_collection_name, graph_hook_fn): + output_collection_name, graph_hook_fn, + use_side_inputs=False, side_input_shapes=None, + side_input_names=None, side_input_types=None): """Build the detection graph.""" if input_type not in input_placeholder_fn_map: raise ValueError('Unknown input type: {}'.format(input_type)) placeholder_args = {} + side_inputs = {} if input_shape is not None: if (input_type != 'image_tensor' and input_type != 'encoded_image_string_tensor' and - input_type != 'tf_example'): + input_type != 'tf_example' and + input_type != 'tf_sequence_example'): raise ValueError('Can only specify input shape for `image_tensor`, ' - '`encoded_image_string_tensor`, or `tf_example` ' - 'inputs.') + '`encoded_image_string_tensor`, `tf_example`, ' + ' or `tf_sequence_example` inputs.') placeholder_args['input_shape'] = input_shape placeholder_tensor, input_tensors = input_placeholder_fn_map[input_type]( **placeholder_args) + placeholder_tensors = {'inputs': placeholder_tensor} + if use_side_inputs: + for idx, side_input_name in enumerate(side_input_names): + side_input_placeholder, side_input = _side_input_tensor_placeholder( + side_input_shapes[idx], side_input_name, side_input_types[idx]) + print(side_input) + side_inputs[side_input_name] = side_input + placeholder_tensors[side_input_name] = side_input_placeholder outputs = _get_outputs_from_inputs( input_tensors=input_tensors, detection_model=detection_model, - output_collection_name=output_collection_name) + output_collection_name=output_collection_name, + **side_inputs) # Add global step to the graph. slim.get_or_create_global_step() if graph_hook_fn: graph_hook_fn() - return outputs, placeholder_tensor + return outputs, placeholder_tensors def _export_inference_graph(input_type, @@ -414,7 +488,11 @@ def _export_inference_graph(input_type, output_collection_name='inference_op', graph_hook_fn=None, write_inference_graph=False, - temp_checkpoint_prefix=''): + temp_checkpoint_prefix='', + use_side_inputs=False, + side_input_shapes=None, + side_input_names=None, + side_input_types=None): """Export helper.""" tf.gfile.MakeDirs(output_directory) frozen_graph_path = os.path.join(output_directory, @@ -422,12 +500,16 @@ def _export_inference_graph(input_type, saved_model_path = os.path.join(output_directory, 'saved_model') model_path = os.path.join(output_directory, 'model.ckpt') - outputs, placeholder_tensor = build_detection_graph( + outputs, placeholder_tensor_dict = build_detection_graph( input_type=input_type, detection_model=detection_model, input_shape=input_shape, output_collection_name=output_collection_name, - graph_hook_fn=graph_hook_fn) + graph_hook_fn=graph_hook_fn, + use_side_inputs=use_side_inputs, + side_input_shapes=side_input_shapes, + side_input_names=side_input_names, + side_input_types=side_input_types) profile_inference_graph(tf.get_default_graph()) saver_kwargs = {} @@ -464,7 +546,8 @@ def _export_inference_graph(input_type, f.write(str(inference_graph_def)) if additional_output_tensor_names is not None: - output_node_names = ','.join(outputs.keys()+additional_output_tensor_names) + output_node_names = ','.join(list(outputs.keys())+( + additional_output_tensor_names)) else: output_node_names = ','.join(outputs.keys()) @@ -480,7 +563,7 @@ def _export_inference_graph(input_type, initializer_nodes='') write_saved_model(saved_model_path, frozen_graph_def, - placeholder_tensor, outputs) + placeholder_tensor_dict, outputs) def export_inference_graph(input_type, @@ -490,7 +573,11 @@ def export_inference_graph(input_type, input_shape=None, output_collection_name='inference_op', additional_output_tensor_names=None, - write_inference_graph=False): + write_inference_graph=False, + use_side_inputs=False, + side_input_shapes=None, + side_input_names=None, + side_input_types=None): """Exports inference graph for the model specified in the pipeline config. Args: @@ -506,6 +593,13 @@ def export_inference_graph(input_type, additional_output_tensor_names: list of additional output tensors to include in the frozen graph. write_inference_graph: If true, writes inference graph to disk. + use_side_inputs: If True, the model requires side_inputs. + side_input_shapes: List of shapes of the side input tensors, + required if use_side_inputs is True. + side_input_names: List of names of the side input tensors, + required if use_side_inputs is True. + side_input_types: List of types of the side input tensors, + required if use_side_inputs is True. """ detection_model = model_builder.build(pipeline_config.model, is_training=False) @@ -524,7 +618,11 @@ def export_inference_graph(input_type, input_shape, output_collection_name, graph_hook_fn=graph_rewriter_fn, - write_inference_graph=write_inference_graph) + write_inference_graph=write_inference_graph, + use_side_inputs=use_side_inputs, + side_input_shapes=side_input_shapes, + side_input_names=side_input_names, + side_input_types=side_input_types) pipeline_config.eval_config.use_moving_averages = False config_util.save_pipeline_config(pipeline_config, output_directory) diff --git a/research/object_detection/exporter_lib_tf2_test.py b/research/object_detection/exporter_lib_tf2_test.py new file mode 100644 index 000000000..d30d80cb0 --- /dev/null +++ b/research/object_detection/exporter_lib_tf2_test.py @@ -0,0 +1,237 @@ +# Lint as: python2, python3 +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""Test for exporter_lib_v2.py.""" + +from __future__ import division +import io +import os +import unittest +from absl.testing import parameterized +import numpy as np +from PIL import Image +import six + +import tensorflow.compat.v2 as tf + +from object_detection import exporter_lib_v2 +from object_detection.builders import model_builder +from object_detection.core import model +from object_detection.core import standard_fields as fields +from object_detection.protos import pipeline_pb2 +from object_detection.utils import dataset_util +from object_detection.utils import tf_version + +if six.PY2: + import mock # pylint: disable=g-importing-member,g-import-not-at-top +else: + from unittest import mock # pylint: disable=g-importing-member,g-import-not-at-top + + +class FakeModel(model.DetectionModel): + + def __init__(self, conv_weight_scalar=1.0): + super(FakeModel, self).__init__(num_classes=2) + self._conv = tf.keras.layers.Conv2D( + filters=1, kernel_size=1, strides=(1, 1), padding='valid', + kernel_initializer=tf.keras.initializers.Constant( + value=conv_weight_scalar)) + + def preprocess(self, inputs): + true_image_shapes = [] # Doesn't matter for the fake model. + return tf.identity(inputs), true_image_shapes + + def predict(self, preprocessed_inputs, true_image_shapes): + return {'image': self._conv(preprocessed_inputs)} + + def postprocess(self, prediction_dict, true_image_shapes): + predict_tensor_sum = tf.reduce_sum(prediction_dict['image']) + with tf.control_dependencies(list(prediction_dict.values())): + postprocessed_tensors = { + 'detection_boxes': tf.constant([[[0.0, 0.0, 0.5, 0.5], + [0.5, 0.5, 0.8, 0.8]], + [[0.5, 0.5, 1.0, 1.0], + [0.0, 0.0, 0.0, 0.0]]], tf.float32), + 'detection_scores': predict_tensor_sum + tf.constant( + [[0.7, 0.6], [0.9, 0.0]], tf.float32), + 'detection_classes': tf.constant([[0, 1], + [1, 0]], tf.float32), + 'num_detections': tf.constant([2, 1], tf.float32), + } + return postprocessed_tensors + + def restore_map(self, checkpoint_path, fine_tune_checkpoint_type): + pass + + def loss(self, prediction_dict, true_image_shapes): + pass + + def regularization_losses(self): + pass + + def updates(self): + pass + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class ExportInferenceGraphTest(tf.test.TestCase, parameterized.TestCase): + + def _save_checkpoint_from_mock_model( + self, checkpoint_dir, conv_weight_scalar=6.0): + mock_model = FakeModel(conv_weight_scalar) + fake_image = tf.zeros(shape=[1, 10, 10, 3], dtype=tf.float32) + preprocessed_inputs, true_image_shapes = mock_model.preprocess(fake_image) + predictions = mock_model.predict(preprocessed_inputs, true_image_shapes) + mock_model.postprocess(predictions, true_image_shapes) + + ckpt = tf.train.Checkpoint(model=mock_model) + exported_checkpoint_manager = tf.train.CheckpointManager( + ckpt, checkpoint_dir, max_to_keep=1) + exported_checkpoint_manager.save(checkpoint_number=0) + + @parameterized.parameters( + {'input_type': 'image_tensor'}, + {'input_type': 'encoded_image_string_tensor'}, + {'input_type': 'tf_example'}, + ) + def test_export_yields_correct_directory_structure( + self, input_type='image_tensor'): + tmp_dir = self.get_temp_dir() + self._save_checkpoint_from_mock_model(tmp_dir) + with mock.patch.object( + model_builder, 'build', autospec=True) as mock_builder: + mock_builder.return_value = FakeModel() + output_directory = os.path.join(tmp_dir, 'output') + pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() + exporter_lib_v2.export_inference_graph( + input_type=input_type, + pipeline_config=pipeline_config, + trained_checkpoint_dir=tmp_dir, + output_directory=output_directory) + self.assertTrue(os.path.exists(os.path.join( + output_directory, 'saved_model', 'saved_model.pb'))) + self.assertTrue(os.path.exists(os.path.join( + output_directory, 'saved_model', 'variables', 'variables.index'))) + self.assertTrue(os.path.exists(os.path.join( + output_directory, 'saved_model', 'variables', + 'variables.data-00000-of-00001'))) + self.assertTrue(os.path.exists(os.path.join( + output_directory, 'checkpoint', 'ckpt-0.index'))) + self.assertTrue(os.path.exists(os.path.join( + output_directory, 'checkpoint', 'ckpt-0.data-00000-of-00001'))) + self.assertTrue(os.path.exists(os.path.join( + output_directory, 'pipeline.config'))) + + def get_dummy_input(self, input_type): + """Get dummy input for the given input type.""" + + if input_type == 'image_tensor': + return np.zeros(shape=(1, 20, 20, 3), dtype=np.uint8) + if input_type == 'float_image_tensor': + return np.zeros(shape=(1, 20, 20, 3), dtype=np.float32) + elif input_type == 'encoded_image_string_tensor': + image = Image.new('RGB', (20, 20)) + byte_io = io.BytesIO() + image.save(byte_io, 'PNG') + return [byte_io.getvalue()] + elif input_type == 'tf_example': + image_tensor = tf.zeros((20, 20, 3), dtype=tf.uint8) + encoded_jpeg = tf.image.encode_jpeg(tf.constant(image_tensor)).numpy() + example = tf.train.Example( + features=tf.train.Features( + feature={ + 'image/encoded': + dataset_util.bytes_feature(encoded_jpeg), + 'image/format': + dataset_util.bytes_feature(six.b('jpeg')), + 'image/source_id': + dataset_util.bytes_feature(six.b('image_id')), + })).SerializeToString() + return [example] + + @parameterized.parameters( + {'input_type': 'image_tensor'}, + {'input_type': 'encoded_image_string_tensor'}, + {'input_type': 'tf_example'}, + {'input_type': 'float_image_tensor'}, + ) + def test_export_saved_model_and_run_inference( + self, input_type='image_tensor'): + tmp_dir = self.get_temp_dir() + self._save_checkpoint_from_mock_model(tmp_dir) + with mock.patch.object( + model_builder, 'build', autospec=True) as mock_builder: + mock_builder.return_value = FakeModel() + output_directory = os.path.join(tmp_dir, 'output') + pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() + exporter_lib_v2.export_inference_graph( + input_type=input_type, + pipeline_config=pipeline_config, + trained_checkpoint_dir=tmp_dir, + output_directory=output_directory) + + saved_model_path = os.path.join(output_directory, 'saved_model') + detect_fn = tf.saved_model.load(saved_model_path) + image = self.get_dummy_input(input_type) + detections = detect_fn(image) + + detection_fields = fields.DetectionResultFields + self.assertAllClose(detections[detection_fields.detection_boxes], + [[[0.0, 0.0, 0.5, 0.5], + [0.5, 0.5, 0.8, 0.8]], + [[0.5, 0.5, 1.0, 1.0], + [0.0, 0.0, 0.0, 0.0]]]) + self.assertAllClose(detections[detection_fields.detection_scores], + [[0.7, 0.6], [0.9, 0.0]]) + self.assertAllClose(detections[detection_fields.detection_classes], + [[1, 2], [2, 1]]) + self.assertAllClose(detections[detection_fields.num_detections], [2, 1]) + + def test_export_checkpoint_and_run_inference_with_image(self): + tmp_dir = self.get_temp_dir() + self._save_checkpoint_from_mock_model(tmp_dir, conv_weight_scalar=2.0) + with mock.patch.object( + model_builder, 'build', autospec=True) as mock_builder: + mock_builder.return_value = FakeModel() + output_directory = os.path.join(tmp_dir, 'output') + pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() + exporter_lib_v2.export_inference_graph( + input_type='image_tensor', + pipeline_config=pipeline_config, + trained_checkpoint_dir=tmp_dir, + output_directory=output_directory) + + mock_model = FakeModel() + ckpt = tf.compat.v2.train.Checkpoint( + model=mock_model) + checkpoint_dir = os.path.join(tmp_dir, 'output', 'checkpoint') + manager = tf.compat.v2.train.CheckpointManager( + ckpt, checkpoint_dir, max_to_keep=7) + ckpt.restore(manager.latest_checkpoint).expect_partial() + + fake_image = tf.ones(shape=[1, 5, 5, 3], dtype=tf.float32) + preprocessed_inputs, true_image_shapes = mock_model.preprocess(fake_image) + predictions = mock_model.predict(preprocessed_inputs, true_image_shapes) + detections = mock_model.postprocess(predictions, true_image_shapes) + + # 150 = conv_weight_scalar * height * width * channels = 2 * 5 * 5 * 3. + self.assertAllClose(detections['detection_scores'], + [[150 + 0.7, 150 + 0.6], [150 + 0.9, 150 + 0.0]]) + + +if __name__ == '__main__': + tf.enable_v2_behavior() + tf.test.main() diff --git a/research/object_detection/exporter_lib_v2.py b/research/object_detection/exporter_lib_v2.py new file mode 100644 index 000000000..a7ecb45ad --- /dev/null +++ b/research/object_detection/exporter_lib_v2.py @@ -0,0 +1,182 @@ +# Lint as: python2, python3 +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""Functions to export object detection inference graph.""" +import os +import tensorflow.compat.v2 as tf +from object_detection.builders import model_builder +from object_detection.core import standard_fields as fields +from object_detection.data_decoders import tf_example_decoder +from object_detection.utils import config_util + + +def _decode_image(encoded_image_string_tensor): + image_tensor = tf.image.decode_image(encoded_image_string_tensor, + channels=3) + image_tensor.set_shape((None, None, 3)) + return image_tensor + + +def _decode_tf_example(tf_example_string_tensor): + tensor_dict = tf_example_decoder.TfExampleDecoder().decode( + tf_example_string_tensor) + image_tensor = tensor_dict[fields.InputDataFields.image] + return image_tensor + + +class DetectionInferenceModule(tf.Module): + """Detection Inference Module.""" + + def __init__(self, detection_model): + """Initializes a module for detection. + + Args: + detection_model: The detection model to use for inference. + """ + self._model = detection_model + + def _run_inference_on_images(self, image): + """Cast image to float and run inference. + + Args: + image: uint8 Tensor of shape [1, None, None, 3] + Returns: + Tensor dictionary holding detections. + """ + label_id_offset = 1 + + image = tf.cast(image, tf.float32) + image, shapes = self._model.preprocess(image) + prediction_dict = self._model.predict(image, shapes) + detections = self._model.postprocess(prediction_dict, shapes) + classes_field = fields.DetectionResultFields.detection_classes + detections[classes_field] = ( + tf.cast(detections[classes_field], tf.float32) + label_id_offset) + + for key, val in detections.items(): + detections[key] = tf.cast(val, tf.float32) + + return detections + + +class DetectionFromImageModule(DetectionInferenceModule): + """Detection Inference Module for image inputs.""" + + @tf.function( + input_signature=[ + tf.TensorSpec(shape=[1, None, None, 3], dtype=tf.uint8)]) + def __call__(self, input_tensor): + return self._run_inference_on_images(input_tensor) + + +class DetectionFromFloatImageModule(DetectionInferenceModule): + """Detection Inference Module for float image inputs.""" + + @tf.function( + input_signature=[ + tf.TensorSpec(shape=[1, None, None, 3], dtype=tf.float32)]) + def __call__(self, input_tensor): + return self._run_inference_on_images(input_tensor) + + +class DetectionFromEncodedImageModule(DetectionInferenceModule): + """Detection Inference Module for encoded image string inputs.""" + + @tf.function(input_signature=[tf.TensorSpec(shape=[1], dtype=tf.string)]) + def __call__(self, input_tensor): + with tf.device('cpu:0'): + image = tf.map_fn( + _decode_image, + elems=input_tensor, + dtype=tf.uint8, + parallel_iterations=32, + back_prop=False) + return self._run_inference_on_images(image) + + +class DetectionFromTFExampleModule(DetectionInferenceModule): + """Detection Inference Module for TF.Example inputs.""" + + @tf.function(input_signature=[tf.TensorSpec(shape=[1], dtype=tf.string)]) + def __call__(self, input_tensor): + with tf.device('cpu:0'): + image = tf.map_fn( + _decode_tf_example, + elems=input_tensor, + dtype=tf.uint8, + parallel_iterations=32, + back_prop=False) + return self._run_inference_on_images(image) + +DETECTION_MODULE_MAP = { + 'image_tensor': DetectionFromImageModule, + 'encoded_image_string_tensor': + DetectionFromEncodedImageModule, + 'tf_example': DetectionFromTFExampleModule, + 'float_image_tensor': DetectionFromFloatImageModule +} + + +def export_inference_graph(input_type, + pipeline_config, + trained_checkpoint_dir, + output_directory): + """Exports inference graph for the model specified in the pipeline config. + + This function creates `output_directory` if it does not already exist, + which will hold a copy of the pipeline config with filename `pipeline.config`, + and two subdirectories named `checkpoint` and `saved_model` + (containing the exported checkpoint and SavedModel respectively). + + Args: + input_type: Type of input for the graph. Can be one of ['image_tensor', + 'encoded_image_string_tensor', 'tf_example']. + pipeline_config: pipeline_pb2.TrainAndEvalPipelineConfig proto. + trained_checkpoint_dir: Path to the trained checkpoint file. + output_directory: Path to write outputs. + Raises: + ValueError: if input_type is invalid. + """ + output_checkpoint_directory = os.path.join(output_directory, 'checkpoint') + output_saved_model_directory = os.path.join(output_directory, 'saved_model') + + detection_model = model_builder.build(pipeline_config.model, + is_training=False) + + ckpt = tf.train.Checkpoint( + model=detection_model) + manager = tf.train.CheckpointManager( + ckpt, trained_checkpoint_dir, max_to_keep=1) + status = ckpt.restore(manager.latest_checkpoint).expect_partial() + + if input_type not in DETECTION_MODULE_MAP: + raise ValueError('Unrecognized `input_type`') + detection_module = DETECTION_MODULE_MAP[input_type](detection_model) + # Getting the concrete function traces the graph and forces variables to + # be constructed --- only after this can we save the checkpoint and + # saved model. + concrete_function = detection_module.__call__.get_concrete_function() + status.assert_existing_objects_matched() + + exported_checkpoint_manager = tf.train.CheckpointManager( + ckpt, output_checkpoint_directory, max_to_keep=1) + exported_checkpoint_manager.save(checkpoint_number=0) + + tf.saved_model.save(detection_module, + output_saved_model_directory, + signatures=concrete_function) + + config_util.save_pipeline_config(pipeline_config, output_directory) diff --git a/research/object_detection/exporter_main_v2.py b/research/object_detection/exporter_main_v2.py new file mode 100644 index 000000000..a2ba84560 --- /dev/null +++ b/research/object_detection/exporter_main_v2.py @@ -0,0 +1,126 @@ +# Lint as: python2, python3 +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +r"""Tool to export an object detection model for inference. + +Prepares an object detection tensorflow graph for inference using model +configuration and a trained checkpoint. Outputs associated checkpoint files, +a SavedModel, and a copy of the model config. + +The inference graph contains one of three input nodes depending on the user +specified option. + * `image_tensor`: Accepts a uint8 4-D tensor of shape [1, None, None, 3] + * `float_image_tensor`: Accepts a float32 4-D tensor of shape + [1, None, None, 3] + * `encoded_image_string_tensor`: Accepts a 1-D string tensor of shape [None] + containing encoded PNG or JPEG images. Image resolutions are expected to be + the same if more than 1 image is provided. + * `tf_example`: Accepts a 1-D string tensor of shape [None] containing + serialized TFExample protos. Image resolutions are expected to be the same + if more than 1 image is provided. + +and the following output nodes returned by the model.postprocess(..): + * `num_detections`: Outputs float32 tensors of the form [batch] + that specifies the number of valid boxes per image in the batch. + * `detection_boxes`: Outputs float32 tensors of the form + [batch, num_boxes, 4] containing detected boxes. + * `detection_scores`: Outputs float32 tensors of the form + [batch, num_boxes] containing class scores for the detections. + * `detection_classes`: Outputs float32 tensors of the form + [batch, num_boxes] containing classes for the detections. + + +Example Usage: +-------------- +python exporter_main_v2.py \ + --input_type image_tensor \ + --pipeline_config_path path/to/ssd_inception_v2.config \ + --trained_checkpoint_dir path/to/checkpoint \ + --output_directory path/to/exported_model_directory + +The expected output would be in the directory +path/to/exported_model_directory (which is created if it does not exist) +holding two subdirectories (corresponding to checkpoint and SavedModel, +respectively) and a copy of the pipeline config. + +Config overrides (see the `config_override` flag) are text protobufs +(also of type pipeline_pb2.TrainEvalPipelineConfig) which are used to override +certain fields in the provided pipeline_config_path. These are useful for +making small changes to the inference graph that differ from the training or +eval config. + +Example Usage (in which we change the second stage post-processing score +threshold to be 0.5): + +python exporter_main_v2.py \ + --input_type image_tensor \ + --pipeline_config_path path/to/ssd_inception_v2.config \ + --trained_checkpoint_dir path/to/checkpoint \ + --output_directory path/to/exported_model_directory \ + --config_override " \ + model{ \ + faster_rcnn { \ + second_stage_post_processing { \ + batch_non_max_suppression { \ + score_threshold: 0.5 \ + } \ + } \ + } \ + }" +""" +from absl import app +from absl import flags + +import tensorflow.compat.v2 as tf +from google.protobuf import text_format +from object_detection import exporter_lib_v2 +from object_detection.protos import pipeline_pb2 + +tf.enable_v2_behavior() + + +FLAGS = flags.FLAGS + +flags.DEFINE_string('input_type', 'image_tensor', 'Type of input node. Can be ' + 'one of [`image_tensor`, `encoded_image_string_tensor`, ' + '`tf_example`, `float_image_tensor`]') +flags.DEFINE_string('pipeline_config_path', None, + 'Path to a pipeline_pb2.TrainEvalPipelineConfig config ' + 'file.') +flags.DEFINE_string('trained_checkpoint_dir', None, + 'Path to trained checkpoint directory') +flags.DEFINE_string('output_directory', None, 'Path to write outputs.') +flags.DEFINE_string('config_override', '', + 'pipeline_pb2.TrainEvalPipelineConfig ' + 'text proto to override pipeline_config_path.') + +flags.mark_flag_as_required('pipeline_config_path') +flags.mark_flag_as_required('trained_checkpoint_dir') +flags.mark_flag_as_required('output_directory') + + +def main(_): + pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() + with tf.io.gfile.GFile(FLAGS.pipeline_config_path, 'r') as f: + text_format.Merge(f.read(), pipeline_config) + text_format.Merge(FLAGS.config_override, pipeline_config) + exporter_lib_v2.export_inference_graph( + FLAGS.input_type, pipeline_config, FLAGS.trained_checkpoint_dir, + FLAGS.output_directory) + + +if __name__ == '__main__': + app.run(main) diff --git a/research/object_detection/exporter_test.py b/research/object_detection/exporter_tf1_test.py similarity index 99% rename from research/object_detection/exporter_test.py rename to research/object_detection/exporter_tf1_test.py index babe41d1e..40bdb966f 100644 --- a/research/object_detection/exporter_test.py +++ b/research/object_detection/exporter_tf1_test.py @@ -19,6 +19,7 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function import os +import unittest import numpy as np import six import tensorflow.compat.v1 as tf @@ -33,12 +34,13 @@ from object_detection.core import model from object_detection.protos import graph_rewriter_pb2 from object_detection.protos import pipeline_pb2 from object_detection.utils import ops +from object_detection.utils import tf_version from object_detection.utils import variables_helper if six.PY2: import mock # pylint: disable=g-import-not-at-top else: - from unittest import mock # pylint: disable=g-import-not-at-top + mock = unittest.mock # pylint: disable=g-import-not-at-top, g-importing-member # pylint: disable=g-import-not-at-top try: @@ -113,6 +115,7 @@ class FakeModel(model.DetectionModel): pass +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class ExportInferenceGraphTest(tf.test.TestCase): def _save_checkpoint_from_mock_model(self, diff --git a/research/object_detection/g3doc/context_rcnn.md b/research/object_detection/g3doc/context_rcnn.md new file mode 100644 index 000000000..d322b3fd8 --- /dev/null +++ b/research/object_detection/g3doc/context_rcnn.md @@ -0,0 +1,173 @@ +# Context R-CNN + +Context R-CNN is an object detection model that uses contextual features to +improve object detection. See https://arxiv.org/abs/1912.03538 for more details. + +## Table of Contents + +* [Preparing Context Data for Context R-CNN](#preparing-context-data-for-context-r-cnn) + + [Generating TfRecords from a set of images and a COCO-CameraTraps style + JSON](#generating-tfrecords-from-a-set-of-images-and-a-coco-cameratraps-style-json) + + [Generating weakly-supervised bounding box labels for image-labeled data](#generating-weakly-supervised-bounding-box-labels-for-image-labeled-data) + + [Generating and saving contextual features for each image](#generating-and-saving-contextual-features-for-each-image) + + [Building up contextual memory banks and storing them for each context + group](#building-up-contextual-memory-banks-and-storing-them-for-each-context-group) +- [Training a Context R-CNN Model](#training-a-context-r-cnn-model) +- [Exporting a Context R-CNN Model](#exporting-a-context-r-cnn-model) + +## Preparing Context Data for Context R-CNN + +In this section, we will walk through the process of generating TfRecords with +contextual features. We focus on building context from object-centric features +generated with a pre-trained Faster R-CNN model, but you can adapt the provided +code to use alternative feature extractors. + +### Generating TfRecords from a set of images and a COCO-CameraTraps style JSON + +If your data is already stored in TfRecords, you can skip this first step. + +We assume a COCO-CameraTraps json format, as described on +[LILA.science](https://github.com/microsoft/CameraTraps/blob/master/data_management/README.md). + +COCO-CameraTraps is a format that adds static-camera-specific fields, such as a +location ID and datetime, to the well-established COCO format. To generate +appropriate context later on, be sure you have specified each contextual group +with a different location ID, which in the static camera case would be the ID of +the camera, as well as the datetime each photo was taken. We assume that empty +images will be labeled 'empty' with class id 0. + +To generate TfRecords from your database and local image folder, run + +``` +python object_detection/dataset_tools/context_rcnn/create_cococameratraps_tfexample_main.py \ + --alsologtostderr \ + --output_tfrecord_prefix="/path/to/output/tfrecord/location/prefix" \ + --image_directory="/path/to/image/folder/" \ + --input_annotations_file="path/to/annotations.json" +``` + +### Generating weakly-supervised bounding box labels for image-labeled data + +If all your data already has bounding box labels you can skip this step. + +Many camera trap datasets do not have bounding box labels, or only have bounding +box labels for some of the data. We have provided code to add bounding boxes +from a pretrained model (such as the +[Microsoft AI for Earth MegaDetector](https://github.com/microsoft/CameraTraps/blob/master/megadetector.md)) +and match the boxes to the image-level class label. + +To export your pretrained detection model, run + +``` +python object_detection/export_inference_graph.py \ + --alsologtostderr \ + --input_type tf_example \ + --pipeline_config_path path/to/faster_rcnn_model.config \ + --trained_checkpoint_prefix path/to/model.ckpt \ + --output_directory path/to/exported_model_directory +``` + +To add bounding boxes to your dataset using the above model, run + +``` +python object_detection/dataset_tools/context_rcnn/generate_detection_data.py \ + --alsologtostderr \ + --input_tfrecord path/to/input_tfrecord@X \ + --output_tfrecord path/to/output_tfrecord@X \ + --model_dir path/to/exported_model_directory/saved_model +``` + +If an image already has bounding box labels, those labels are left unchanged. If +an image is labeled 'empty' (class ID 0), we will not generate boxes for that +image. + +### Generating and saving contextual features for each image + +We next extract and store features for each image from a pretrained model. This +model can be the same model as above, or be a class-specific detection model +trained on data from your classes of interest. + +To export your pretrained detection model, run + +``` +python object_detection/export_inference_graph.py \ + --alsologtostderr \ + --input_type tf_example \ + --pipeline_config_path path/to/pipeline.config \ + --trained_checkpoint_prefix path/to/model.ckpt \ + --output_directory path/to/exported_model_directory \ + --additional_output_tensor_names detection_features +``` + +To generate and save contextual features for your data, run + +``` +python object_detection/dataset_tools/context_rcnn/generate_embedding_data.py \ + --alsologtostderr \ + --embedding_input_tfrecord path/to/input_tfrecords* \ + --embedding_output_tfrecord path/to/output_tfrecords \ + --embedding_model_dir path/to/exported_model_directory/saved_model +``` + +### Building up contextual memory banks and storing them for each context group + +To build the context features into memory banks, run + +``` +python object_detection/dataset_tools/context_rcnn/add_context_to_examples.py \ + --input_tfrecord path/to/input_tfrecords* \ + --output_tfrecord path/to/output_tfrecords \ + --sequence_key image/location \ + --time_horizon month +``` + +For all options, see add_context_to_examples.py. By default, this code builds +TfSequenceExamples, which are more data efficient (this allows you to store the +context features once for each context group, as opposed to once per image). If +you would like to export TfExamples instead, set flag `--output_type +tf_example`. + +If you use TfSequenceExamples, you must be sure to set `input_type: +TF_SEQUENCE_EXAMPLE` within your Context R-CNN configs for both +train_input_reader and test_input_reader. See +`object_detection/test_data/context_rcnn_camera_trap.config` +for an example. + +## Training a Context R-CNN Model + +To train a Context R-CNN model, you must first set up your config file. See +`test_data/context_rcnn_camera_trap.config` for an example. The important +difference between this config and a Faster R-CNN config is the inclusion of a +`context_config` within the model, which defines the necessary Context R-CNN +parameters. + +``` +context_config { + max_num_context_features: 2000 + context_feature_length: 2057 + } +``` + +Once your config file has been updated with your local paths, you can follow +along with documentation for running [locally](running_locally.md), or +[on the cloud](running_on_cloud.md). + +## Exporting a Context R-CNN Model + +Since Context R-CNN takes context features as well as images as input, we have +to explicitly define the other inputs ("side_inputs") to the model when +exporting, as below. This example is shown with default context feature shapes. + +``` +python export_inference_graph.py \ + --input_type image_tensor \ + --input_shape 1,-1,-1,3 \ + --pipeline_config_path /path/to/context_rcnn_model/pipeline.config \ + --trained_checkpoint_prefix /path/to/context_rcnn_model/model.ckpt \ + --output_directory /path/to/output_directory \ + --use_side_inputs True \ + --side_input_shapes 1,2000,2057/1 \ + --side_input_names context_features,valid_context_size \ + --side_input_types float,int + +``` diff --git a/research/object_detection/g3doc/detection_model_zoo.md b/research/object_detection/g3doc/detection_model_zoo.md index b13fe6a31..cb515b813 100644 --- a/research/object_detection/g3doc/detection_model_zoo.md +++ b/research/object_detection/g3doc/detection_model_zoo.md @@ -1,32 +1,34 @@ # Tensorflow detection model zoo -We provide a collection of detection models pre-trained on the [COCO -dataset](http://cocodataset.org), the [Kitti dataset](http://www.cvlibs.net/datasets/kitti/), -the +We provide a collection of detection models pre-trained on the +[COCO dataset](http://cocodataset.org), the +[Kitti dataset](http://www.cvlibs.net/datasets/kitti/), the [Open Images dataset](https://storage.googleapis.com/openimages/web/index.html), -the [AVA v2.1 dataset](https://research.google.com/ava/) and the -[iNaturalist Species Detection Dataset](https://github.com/visipedia/inat_comp/blob/master/2017/README.md#bounding-boxes). +the [AVA v2.1 dataset](https://research.google.com/ava/) the +[iNaturalist Species Detection Dataset](https://github.com/visipedia/inat_comp/blob/master/2017/README.md#bounding-boxes) +and the +[Snapshot Serengeti Dataset](http://lila.science/datasets/snapshot-serengeti). These models can be useful for out-of-the-box inference if you are interested in categories already in those datasets. They are also useful for initializing your models when training on novel datasets. In the table below, we list each such pre-trained model including: -* a model name that corresponds to a config file that was used to train this - model in the `samples/configs` directory, -* a download link to a tar.gz file containing the pre-trained model, -* model speed --- we report running time in ms per 600x600 image (including all - pre and post-processing), but please be - aware that these timings depend highly on one's specific hardware - configuration (these timings were performed using an Nvidia - GeForce GTX TITAN X card) and should be treated more as relative timings in - many cases. Also note that desktop GPU timing does not always reflect mobile - run time. For example Mobilenet V2 is faster on mobile devices than Mobilenet - V1, but is slightly slower on desktop GPU. -* detector performance on subset of the COCO validation set or Open Images test split as measured by the dataset-specific mAP measure. - Here, higher is better, and we only report bounding box mAP rounded to the - nearest integer. -* Output types (`Boxes`, and `Masks` if applicable ) +* a model name that corresponds to a config file that was used to train this + model in the `samples/configs` directory, +* a download link to a tar.gz file containing the pre-trained model, +* model speed --- we report running time in ms per 600x600 image (including + all pre and post-processing), but please be aware that these timings depend + highly on one's specific hardware configuration (these timings were + performed using an Nvidia GeForce GTX TITAN X card) and should be treated + more as relative timings in many cases. Also note that desktop GPU timing + does not always reflect mobile run time. For example Mobilenet V2 is faster + on mobile devices than Mobilenet V1, but is slightly slower on desktop GPU. +* detector performance on subset of the COCO validation set, Open Images test + split, iNaturalist test split, or Snapshot Serengeti LILA.science test + split. as measured by the dataset-specific mAP measure. Here, higher is + better, and we only report bounding box mAP rounded to the nearest integer. +* Output types (`Boxes`, and `Masks` if applicable ) You can un-tar each tar.gz file via, e.g.,: @@ -53,118 +55,133 @@ Inside the un-tar'ed directory, you will find: Some remarks on frozen inference graphs: -* If you try to evaluate the frozen graph, you may find performance numbers for - some of the models to be slightly lower than what we report in the below - tables. This is because we discard detections with scores below a - threshold (typically 0.3) when creating the frozen graph. This corresponds - effectively to picking a point on the precision recall curve of - a detector (and discarding the part past that point), which negatively impacts - standard mAP metrics. -* Our frozen inference graphs are generated using the - [v1.12.0](https://github.com/tensorflow/tensorflow/tree/v1.12.0) - release version of Tensorflow and we do not guarantee that these will work - with other versions; this being said, each frozen inference graph can be - regenerated using your current version of Tensorflow by re-running the - [exporter](https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/exporting_models.md), - pointing it at the model directory as well as the corresponding config file in - [samples/configs](https://github.com/tensorflow/models/tree/master/research/object_detection/samples/configs). - +* If you try to evaluate the frozen graph, you may find performance numbers + for some of the models to be slightly lower than what we report in the below + tables. This is because we discard detections with scores below a threshold + (typically 0.3) when creating the frozen graph. This corresponds effectively + to picking a point on the precision recall curve of a detector (and + discarding the part past that point), which negatively impacts standard mAP + metrics. +* Our frozen inference graphs are generated using the + [v1.12.0](https://github.com/tensorflow/tensorflow/tree/v1.12.0) release + version of Tensorflow and we do not guarantee that these will work with + other versions; this being said, each frozen inference graph can be + regenerated using your current version of Tensorflow by re-running the + [exporter](https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/exporting_models.md), + pointing it at the model directory as well as the corresponding config file + in + [samples/configs](https://github.com/tensorflow/models/tree/master/research/object_detection/samples/configs). ## COCO-trained models -| Model name | Speed (ms) | COCO mAP[^1] | Outputs | -| ------------ | :--------------: | :--------------: | :-------------: | -| [ssd_mobilenet_v1_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_coco_2018_01_28.tar.gz) | 30 | 21 | Boxes | -| [ssd_mobilenet_v1_0.75_depth_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_0.75_depth_300x300_coco14_sync_2018_07_03.tar.gz) | 26 | 18 | Boxes | -| [ssd_mobilenet_v1_quantized_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_quantized_300x300_coco14_sync_2018_07_18.tar.gz) | 29 | 18 | Boxes | -| [ssd_mobilenet_v1_0.75_depth_quantized_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_0.75_depth_quantized_300x300_coco14_sync_2018_07_18.tar.gz) | 29 | 16 | Boxes | -| [ssd_mobilenet_v1_ppn_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_ppn_shared_box_predictor_300x300_coco14_sync_2018_07_03.tar.gz) | 26 | 20 | Boxes | -| [ssd_mobilenet_v1_fpn_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_fpn_shared_box_predictor_640x640_coco14_sync_2018_07_03.tar.gz) | 56 | 32 | Boxes | -| [ssd_resnet_50_fpn_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_resnet50_v1_fpn_shared_box_predictor_640x640_coco14_sync_2018_07_03.tar.gz) | 76 | 35 | Boxes | -| [ssd_mobilenet_v2_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v2_coco_2018_03_29.tar.gz) | 31 | 22 | Boxes | -| [ssd_mobilenet_v2_quantized_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v2_quantized_300x300_coco_2019_01_03.tar.gz) | 29 | 22 | Boxes | -| [ssdlite_mobilenet_v2_coco](http://download.tensorflow.org/models/object_detection/ssdlite_mobilenet_v2_coco_2018_05_09.tar.gz) | 27 | 22 | Boxes | -| [ssd_inception_v2_coco](http://download.tensorflow.org/models/object_detection/ssd_inception_v2_coco_2018_01_28.tar.gz) | 42 | 24 | Boxes | -| [faster_rcnn_inception_v2_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_v2_coco_2018_01_28.tar.gz) | 58 | 28 | Boxes | -| [faster_rcnn_resnet50_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet50_coco_2018_01_28.tar.gz) | 89 | 30 | Boxes | -| [faster_rcnn_resnet50_lowproposals_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet50_lowproposals_coco_2018_01_28.tar.gz) | 64 | | Boxes | -| [rfcn_resnet101_coco](http://download.tensorflow.org/models/object_detection/rfcn_resnet101_coco_2018_01_28.tar.gz) | 92 | 30 | Boxes | -| [faster_rcnn_resnet101_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_coco_2018_01_28.tar.gz) | 106 | 32 | Boxes | -| [faster_rcnn_resnet101_lowproposals_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_lowproposals_coco_2018_01_28.tar.gz) | 82 | | Boxes | -| [faster_rcnn_inception_resnet_v2_atrous_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_coco_2018_01_28.tar.gz) | 620 | 37 | Boxes | -| [faster_rcnn_inception_resnet_v2_atrous_lowproposals_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_lowproposals_coco_2018_01_28.tar.gz) | 241 | | Boxes | -| [faster_rcnn_nas](http://download.tensorflow.org/models/object_detection/faster_rcnn_nas_coco_2018_01_28.tar.gz) | 1833 | 43 | Boxes | -| [faster_rcnn_nas_lowproposals_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_nas_lowproposals_coco_2018_01_28.tar.gz) | 540 | | Boxes | -| [mask_rcnn_inception_resnet_v2_atrous_coco](http://download.tensorflow.org/models/object_detection/mask_rcnn_inception_resnet_v2_atrous_coco_2018_01_28.tar.gz) | 771 | 36 | Masks | -| [mask_rcnn_inception_v2_coco](http://download.tensorflow.org/models/object_detection/mask_rcnn_inception_v2_coco_2018_01_28.tar.gz) | 79 | 25 | Masks | -| [mask_rcnn_resnet101_atrous_coco](http://download.tensorflow.org/models/object_detection/mask_rcnn_resnet101_atrous_coco_2018_01_28.tar.gz) | 470 | 33 | Masks | -| [mask_rcnn_resnet50_atrous_coco](http://download.tensorflow.org/models/object_detection/mask_rcnn_resnet50_atrous_coco_2018_01_28.tar.gz) | 343 | 29 | Masks | - -Note: The asterisk (☆) at the end of model name indicates that this model supports TPU training. - -Note: If you download the tar.gz file of quantized models and un-tar, you will get different set of files - a checkpoint, a config file and tflite frozen graphs (txt/binary). - +Model name | Speed (ms) | COCO mAP[^1] | Outputs +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :--------: | :----------: | :-----: +[ssd_mobilenet_v1_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_coco_2018_01_28.tar.gz) | 30 | 21 | Boxes +[ssd_mobilenet_v1_0.75_depth_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_0.75_depth_300x300_coco14_sync_2018_07_03.tar.gz) | 26 | 18 | Boxes +[ssd_mobilenet_v1_quantized_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_quantized_300x300_coco14_sync_2018_07_18.tar.gz) | 29 | 18 | Boxes +[ssd_mobilenet_v1_0.75_depth_quantized_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_0.75_depth_quantized_300x300_coco14_sync_2018_07_18.tar.gz) | 29 | 16 | Boxes +[ssd_mobilenet_v1_ppn_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_ppn_shared_box_predictor_300x300_coco14_sync_2018_07_03.tar.gz) | 26 | 20 | Boxes +[ssd_mobilenet_v1_fpn_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_fpn_shared_box_predictor_640x640_coco14_sync_2018_07_03.tar.gz) | 56 | 32 | Boxes +[ssd_resnet_50_fpn_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_resnet50_v1_fpn_shared_box_predictor_640x640_coco14_sync_2018_07_03.tar.gz) | 76 | 35 | Boxes +[ssd_mobilenet_v2_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v2_coco_2018_03_29.tar.gz) | 31 | 22 | Boxes +[ssd_mobilenet_v2_quantized_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v2_quantized_300x300_coco_2019_01_03.tar.gz) | 29 | 22 | Boxes +[ssdlite_mobilenet_v2_coco](http://download.tensorflow.org/models/object_detection/ssdlite_mobilenet_v2_coco_2018_05_09.tar.gz) | 27 | 22 | Boxes +[ssd_inception_v2_coco](http://download.tensorflow.org/models/object_detection/ssd_inception_v2_coco_2018_01_28.tar.gz) | 42 | 24 | Boxes +[faster_rcnn_inception_v2_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_v2_coco_2018_01_28.tar.gz) | 58 | 28 | Boxes +[faster_rcnn_resnet50_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet50_coco_2018_01_28.tar.gz) | 89 | 30 | Boxes +[faster_rcnn_resnet50_lowproposals_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet50_lowproposals_coco_2018_01_28.tar.gz) | 64 | | Boxes +[rfcn_resnet101_coco](http://download.tensorflow.org/models/object_detection/rfcn_resnet101_coco_2018_01_28.tar.gz) | 92 | 30 | Boxes +[faster_rcnn_resnet101_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_coco_2018_01_28.tar.gz) | 106 | 32 | Boxes +[faster_rcnn_resnet101_lowproposals_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_lowproposals_coco_2018_01_28.tar.gz) | 82 | | Boxes +[faster_rcnn_inception_resnet_v2_atrous_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_coco_2018_01_28.tar.gz) | 620 | 37 | Boxes +[faster_rcnn_inception_resnet_v2_atrous_lowproposals_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_lowproposals_coco_2018_01_28.tar.gz) | 241 | | Boxes +[faster_rcnn_nas](http://download.tensorflow.org/models/object_detection/faster_rcnn_nas_coco_2018_01_28.tar.gz) | 1833 | 43 | Boxes +[faster_rcnn_nas_lowproposals_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_nas_lowproposals_coco_2018_01_28.tar.gz) | 540 | | Boxes +[mask_rcnn_inception_resnet_v2_atrous_coco](http://download.tensorflow.org/models/object_detection/mask_rcnn_inception_resnet_v2_atrous_coco_2018_01_28.tar.gz) | 771 | 36 | Masks +[mask_rcnn_inception_v2_coco](http://download.tensorflow.org/models/object_detection/mask_rcnn_inception_v2_coco_2018_01_28.tar.gz) | 79 | 25 | Masks +[mask_rcnn_resnet101_atrous_coco](http://download.tensorflow.org/models/object_detection/mask_rcnn_resnet101_atrous_coco_2018_01_28.tar.gz) | 470 | 33 | Masks +[mask_rcnn_resnet50_atrous_coco](http://download.tensorflow.org/models/object_detection/mask_rcnn_resnet50_atrous_coco_2018_01_28.tar.gz) | 343 | 29 | Masks + +Note: The asterisk (☆) at the end of model name indicates that this model +supports TPU training. + +Note: If you download the tar.gz file of quantized models and un-tar, you will +get different set of files - a checkpoint, a config file and tflite frozen +graphs (txt/binary). ### Mobile models Model name | Pixel 1 Latency (ms) | COCO mAP | Outputs ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------------------: | :------: | :-----: -[ssd_mobiledet_cpu_coco](http://download.tensorflow.org/models/object_detection/ssdlite_mobiledet_cpu_320x320_coco_2020_05_19.tar.gz) | 113 | 24.0 | Boxes -[ssd_mobilenet_v2_mnasfpn_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v2_mnasfpn_shared_box_predictor_320x320_coco_sync_2020_05_18.tar.gz) | 183 | 26.6 | Boxes -[ssd_mobilenet_v3_large_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v3_large_coco_2020_01_14.tar.gz) | 119 | 22.6 | Boxes -[ssd_mobilenet_v3_small_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v3_small_coco_2020_01_14.tar.gz) | 43 | 15.4 | Boxes +[ssd_mobiledet_cpu_coco](http://download.tensorflow.org/models/object_detection/ssdlite_mobiledet_cpu_320x320_coco_2020_05_19.tar.gz) | 113 | 24.0 | Boxes +[ssd_mobilenet_v2_mnasfpn_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v2_mnasfpn_shared_box_predictor_320x320_coco_sync_2020_05_18.tar.gz) | 183 | 26.6 | Boxes +[ssd_mobilenet_v3_large_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v3_large_coco_2020_01_14.tar.gz) | 119 | 22.6 | Boxes +[ssd_mobilenet_v3_small_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v3_small_coco_2020_01_14.tar.gz) | 43 | 15.4 | Boxes ### Pixel4 Edge TPU models -Model name | Pixel 4 Edge TPU Latency (ms) | COCO mAP (fp32/uint8) | Outputs ------------------------------------------------------------------------------------------------------------------------------------ | :------------------: | :------: | :-----: -[ssd_mobiledet_edgetpu_coco](http://download.tensorflow.org/models/object_detection/ssdlite_mobiledet_edgetpu_320x320_coco_2020_05_19.tar.gz) | 6.9 | 25.9/25.6 | Boxes -[ssd_mobilenet_edgetpu_coco](https://storage.cloud.google.com/mobilenet_edgetpu/checkpoints/ssdlite_mobilenet_edgetpu_coco_quant.tar.gz) | 6.6 | -/24.3 | Boxes + +Model name | Pixel 4 Edge TPU Latency (ms) | COCO mAP (fp32/uint8) | Outputs +--------------------------------------------------------------------------------------------------------------------------------------------- | :---------------------------: | :-------------------: | :-----: +[ssd_mobiledet_edgetpu_coco](http://download.tensorflow.org/models/object_detection/ssdlite_mobiledet_edgetpu_320x320_coco_2020_05_19.tar.gz) | 6.9 | 25.9/25.6 | Boxes +[ssd_mobilenet_edgetpu_coco](https://storage.cloud.google.com/mobilenet_edgetpu/checkpoints/ssdlite_mobilenet_edgetpu_coco_quant.tar.gz) | 6.6 | -/24.3 | Boxes ### Pixel4 DSP models -Model name | Pixel 4 DSP Latency (ms) | COCO mAP (fp32/uint8) | Outputs ------------------------------------------------------------------------------------------------------------------------------------ | :------------------: | :------: | :-----: -[ssd_mobiledet_dsp_coco](http://download.tensorflow.org/models/object_detection/ssdlite_mobiledet_dsp_320x320_coco_2020_05_19.tar.gz) | 12.3 | 28.9/28.8 | Boxes + +Model name | Pixel 4 DSP Latency (ms) | COCO mAP (fp32/uint8) | Outputs +------------------------------------------------------------------------------------------------------------------------------------- | :----------------------: | :-------------------: | :-----: +[ssd_mobiledet_dsp_coco](http://download.tensorflow.org/models/object_detection/ssdlite_mobiledet_dsp_320x320_coco_2020_05_19.tar.gz) | 12.3 | 28.9/28.8 | Boxes ## Kitti-trained models -Model name | Speed (ms) | Pascal mAP@0.5 | Outputs ------------------------------------------------------------------------------------------------------------------------------------------------------------------ | :---: | :-------------: | :-----: -[faster_rcnn_resnet101_kitti](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_kitti_2018_01_28.tar.gz) | 79 | 87 | Boxes +Model name | Speed (ms) | Pascal mAP@0.5 | Outputs +----------------------------------------------------------------------------------------------------------------------------------- | :--------: | :------------: | :-----: +[faster_rcnn_resnet101_kitti](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_kitti_2018_01_28.tar.gz) | 79 | 87 | Boxes ## Open Images-trained models Model name | Speed (ms) | Open Images mAP@0.5[^2] | Outputs --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :--------: | :---------------------: | :-----: -[faster_rcnn_inception_resnet_v2_atrous_oidv2](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_oid_2018_01_28.tar.gz) | 727 | 37 | Boxes +[faster_rcnn_inception_resnet_v2_atrous_oidv2](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_oid_2018_01_28.tar.gz) | 727 | 37 | Boxes [faster_rcnn_inception_resnet_v2_atrous_lowproposals_oidv2](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_lowproposals_oid_2018_01_28.tar.gz) | 347 | | Boxes [facessd_mobilenet_v2_quantized_open_image_v4](http://download.tensorflow.org/models/object_detection/facessd_mobilenet_v2_quantized_320x320_open_image_v4.tar.gz) [^3] | 20 | 73 (faces) | Boxes -Model name | Speed (ms) | Open Images mAP@0.5[^4] | Outputs ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :--------: | :---------------------: | :-----: -[faster_rcnn_inception_resnet_v2_atrous_oidv4](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_oid_v4_2018_12_12.tar.gz) | 425 | 54 | Boxes -[ssd_mobilenetv2_oidv4](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v2_oid_v4_2018_12_12.tar.gz) | 89 | 36 | Boxes -[ssd_resnet_101_fpn_oidv4](http://download.tensorflow.org/models/object_detection/ssd_resnet101_v1_fpn_shared_box_predictor_oid_512x512_sync_2019_01_20.tar.gz) | 237 | 38 | Boxes -## iNaturalist Species-trained models +Model name | Speed (ms) | Open Images mAP@0.5[^4] | Outputs +---------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :--------: | :---------------------: | :-----: +[faster_rcnn_inception_resnet_v2_atrous_oidv4](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_oid_v4_2018_12_12.tar.gz) | 425 | 54 | Boxes +[ssd_mobilenetv2_oidv4](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v2_oid_v4_2018_12_12.tar.gz) | 89 | 36 | Boxes +[ssd_resnet_101_fpn_oidv4](http://download.tensorflow.org/models/object_detection/ssd_resnet101_v1_fpn_shared_box_predictor_oid_512x512_sync_2019_01_20.tar.gz) | 237 | 38 | Boxes -Model name | Speed (ms) | Pascal mAP@0.5 | Outputs ------------------------------------------------------------------------------------------------------------------------------------------------------------------ | :---: | :-------------: | :-----: -[faster_rcnn_resnet101_fgvc](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_fgvc_2018_07_19.tar.gz) | 395 | 58 | Boxes -[faster_rcnn_resnet50_fgvc](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet50_fgvc_2018_07_19.tar.gz) | 366 | 55 | Boxes +## iNaturalist Species-trained models +Model name | Speed (ms) | Pascal mAP@0.5 | Outputs +--------------------------------------------------------------------------------------------------------------------------------- | :--------: | :------------: | :-----: +[faster_rcnn_resnet101_fgvc](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_fgvc_2018_07_19.tar.gz) | 395 | 58 | Boxes +[faster_rcnn_resnet50_fgvc](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet50_fgvc_2018_07_19.tar.gz) | 366 | 55 | Boxes ## AVA v2.1 trained models -Model name | Speed (ms) | Pascal mAP@0.5 | Outputs ------------------------------------------------------------------------------------------------------------------------------------------------------------------ | :---: | :-------------: | :-----: -[faster_rcnn_resnet101_ava_v2.1](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_ava_v2.1_2018_04_30.tar.gz) | 93 | 11 | Boxes - - -[^1]: See [MSCOCO evaluation protocol](http://cocodataset.org/#detections-eval). The COCO mAP numbers here are evaluated on COCO 14 minival set (note that our split is different from COCO 17 Val). A full list of image ids used in our split could be fould [here](https://github.com/tensorflow/models/blob/master/research/object_detection/data/mscoco_minival_ids.txt). - - -[^2]: This is PASCAL mAP with a slightly different way of true positives computation: see [Open Images evaluation protocols](evaluation_protocols.md), oid_V2_detection_metrics. - -[^3]: Non-face boxes are dropped during training and non-face groundtruth boxes are ignored when evaluating. - -[^4]: This is Open Images Challenge metric: see [Open Images evaluation protocols](evaluation_protocols.md), oid_challenge_detection_metrics. - +Model name | Speed (ms) | Pascal mAP@0.5 | Outputs +----------------------------------------------------------------------------------------------------------------------------------------- | :--------: | :------------: | :-----: +[faster_rcnn_resnet101_ava_v2.1](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_ava_v2.1_2018_04_30.tar.gz) | 93 | 11 | Boxes + +## Snapshot Serengeti Camera Trap trained models + +Model name | COCO mAP@0.5 | Outputs +--------------------------------------------------------------------------------------------------------------------------------------------------------------- | :----------: | :-----: +[faster_rcnn_resnet101_snapshot_serengeti](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_snapshot_serengeti_2020_06_10.tar.gz) | 38 | Boxes +[context_rcnn_resnet101_snapshot_serengeti](http://download.tensorflow.org/models/object_detection/context_rcnn_resnet101_snapshot_serengeti_2020_06_10.tar.gz) | 56 | Boxes + +[^1]: See [MSCOCO evaluation protocol](http://cocodataset.org/#detections-eval). + The COCO mAP numbers here are evaluated on COCO 14 minival set (note that + our split is different from COCO 17 Val). A full list of image ids used in + our split could be fould + [here](https://github.com/tensorflow/models/blob/master/research/object_detection/data/mscoco_minival_ids.txt). +[^2]: This is PASCAL mAP with a slightly different way of true positives + computation: see + [Open Images evaluation protocols](evaluation_protocols.md), + oid_V2_detection_metrics. +[^3]: Non-face boxes are dropped during training and non-face groundtruth boxes + are ignored when evaluating. +[^4]: This is Open Images Challenge metric: see + [Open Images evaluation protocols](evaluation_protocols.md), + oid_challenge_detection_metrics. diff --git a/research/object_detection/inference/detection_inference_test.py b/research/object_detection/inference/detection_inference_tf1_test.py similarity index 98% rename from research/object_detection/inference/detection_inference_test.py rename to research/object_detection/inference/detection_inference_tf1_test.py index 6d35f2b68..899da1298 100644 --- a/research/object_detection/inference/detection_inference_test.py +++ b/research/object_detection/inference/detection_inference_tf1_test.py @@ -15,7 +15,7 @@ r"""Tests for detection_inference.py.""" import os - +import unittest import numpy as np from PIL import Image import six @@ -25,6 +25,7 @@ from google.protobuf import text_format from object_detection.core import standard_fields from object_detection.inference import detection_inference from object_detection.utils import dataset_util +from object_detection.utils import tf_version def get_mock_tfrecord_path(): @@ -74,6 +75,7 @@ def create_mock_graph(): fl.write(graph_def.SerializeToString()) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class InferDetectionsTests(tf.test.TestCase): def test_simple(self): diff --git a/research/object_detection/inputs.py b/research/object_detection/inputs.py index 7512a56b1..a3eb2f0bd 100644 --- a/research/object_detection/inputs.py +++ b/research/object_detection/inputs.py @@ -64,7 +64,6 @@ def _multiclass_scores_or_one_hot_labels(multiclass_scores, [tf.shape(groundtruth_boxes)[0], num_classes]) def false_fn(): return tf.one_hot(groundtruth_classes, num_classes) - return tf.cond(tf.size(multiclass_scores) > 0, true_fn, false_fn) @@ -1006,14 +1005,21 @@ def get_reduce_to_frame_fn(input_reader_config, is_training): `reduce_to_frame_fn` for the dataset builder """ if input_reader_config.input_type != ( - input_reader_pb2.InputType.TF_SEQUENCE_EXAMPLE): - return lambda d: d + input_reader_pb2.InputType.Value('TF_SEQUENCE_EXAMPLE')): + return lambda dataset, dataset_map_fn, batch_size, config: dataset else: - def reduce_to_frame(dataset): + def reduce_to_frame(dataset, dataset_map_fn, batch_size, + input_reader_config): """Returns a function reducing sequence tensors to single frame tensors. Args: dataset: A tf dataset containing sequence tensors. + dataset_map_fn: A function that handles whether to + map_with_legacy_function for this dataset + batch_size: used if map_with_legacy_function is true to determine + num_parallel_calls + input_reader_config: used if map_with_legacy_function is true to + determine num_parallel_calls Returns: A tf dataset containing single frame tensors. @@ -1046,13 +1052,14 @@ def get_reduce_to_frame_fn(input_reader_config, is_training): # Copy all context tensors. out_tensor_dict[key] = tensor_dict[key] return out_tensor_dict - dataset = dataset.map(get_single_frame, tf.data.experimental.AUTOTUNE) + dataset = dataset_map_fn(dataset, get_single_frame, batch_size, + input_reader_config) else: - dataset = dataset.map(util_ops.tile_context_tensors, - tf.data.experimental.AUTOTUNE) + dataset = dataset_map_fn(dataset, util_ops.tile_context_tensors, + batch_size, input_reader_config) dataset = dataset.unbatch() # Decode frame here as SequenceExample tensors contain encoded images. - dataset = dataset.map(util_ops.decode_image, - tf.data.experimental.AUTOTUNE) + dataset = dataset_map_fn(dataset, util_ops.decode_image, batch_size, + input_reader_config) return dataset return reduce_to_frame diff --git a/research/object_detection/inputs_test.py b/research/object_detection/inputs_test.py index 78e268b25..1fca6538f 100644 --- a/research/object_detection/inputs_test.py +++ b/research/object_detection/inputs_test.py @@ -20,10 +20,11 @@ from __future__ import print_function import functools import os +import unittest from absl import logging from absl.testing import parameterized - import numpy as np +import six import tensorflow.compat.v1 as tf from object_detection import inputs @@ -31,6 +32,13 @@ from object_detection.core import preprocessor from object_detection.core import standard_fields as fields from object_detection.utils import config_util from object_detection.utils import test_case +from object_detection.utils import test_utils +from object_detection.utils import tf_version + +if six.PY2: + import mock # pylint: disable=g-import-not-at-top +else: + from unittest import mock # pylint: disable=g-import-not-at-top, g-importing-member FLAGS = tf.flags.FLAGS @@ -86,7 +94,8 @@ def _make_initializable_iterator(dataset): return iterator -class InputsTest(test_case.TestCase, parameterized.TestCase): +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only tests under TF2.X.') +class InputFnTest(test_case.TestCase, parameterized.TestCase): def test_faster_rcnn_resnet50_train_input(self): """Tests the training input function for FasterRcnnResnet50.""" @@ -402,7 +411,7 @@ class InputsTest(test_case.TestCase, parameterized.TestCase): def test_ssd_inceptionV2_eval_input_with_additional_channels( self, eval_batch_size=1): - """Tests the eval input function for SSDInceptionV2 with additional channels. + """Tests the eval input function for SSDInceptionV2 with additional channel. Args: eval_batch_size: Batch size for eval set. @@ -638,24 +647,20 @@ class DataAugmentationFnTest(test_case.TestCase): data_augmentation_fn = functools.partial( inputs.augment_input_data, data_augmentation_options=data_augmentation_options) - tensor_dict = { - fields.InputDataFields.image: - tf.constant(np.random.rand(10, 10, 3).astype(np.float32)), - fields.InputDataFields.groundtruth_boxes: - tf.constant(np.array([[.5, .5, 1., 1.]], np.float32)) - } - augmented_tensor_dict = data_augmentation_fn(tensor_dict=tensor_dict) - with self.test_session() as sess: - augmented_tensor_dict_out = sess.run(augmented_tensor_dict) - - self.assertAllEqual( - augmented_tensor_dict_out[fields.InputDataFields.image].shape, - [20, 20, 3] - ) - self.assertAllClose( - augmented_tensor_dict_out[fields.InputDataFields.groundtruth_boxes], - [[10, 10, 20, 20]] - ) + def graph_fn(): + tensor_dict = { + fields.InputDataFields.image: + tf.constant(np.random.rand(10, 10, 3).astype(np.float32)), + fields.InputDataFields.groundtruth_boxes: + tf.constant(np.array([[.5, .5, 1., 1.]], np.float32)) + } + augmented_tensor_dict = data_augmentation_fn(tensor_dict=tensor_dict) + return (augmented_tensor_dict[fields.InputDataFields.image], + augmented_tensor_dict[fields.InputDataFields. + groundtruth_boxes]) + image, groundtruth_boxes = self.execute_cpu(graph_fn, []) + self.assertAllEqual(image.shape, [20, 20, 3]) + self.assertAllClose(groundtruth_boxes, [[10, 10, 20, 20]]) def test_apply_image_and_box_augmentation_with_scores(self): data_augmentation_options = [ @@ -669,37 +674,28 @@ class DataAugmentationFnTest(test_case.TestCase): data_augmentation_fn = functools.partial( inputs.augment_input_data, data_augmentation_options=data_augmentation_options) - tensor_dict = { - fields.InputDataFields.image: - tf.constant(np.random.rand(10, 10, 3).astype(np.float32)), - fields.InputDataFields.groundtruth_boxes: - tf.constant(np.array([[.5, .5, 1., 1.]], np.float32)), - fields.InputDataFields.groundtruth_classes: - tf.constant(np.array([1.0], np.float32)), - fields.InputDataFields.groundtruth_weights: - tf.constant(np.array([0.8], np.float32)), - } - augmented_tensor_dict = data_augmentation_fn(tensor_dict=tensor_dict) - with self.test_session() as sess: - augmented_tensor_dict_out = sess.run(augmented_tensor_dict) - - self.assertAllEqual( - augmented_tensor_dict_out[fields.InputDataFields.image].shape, - [20, 20, 3] - ) - self.assertAllClose( - augmented_tensor_dict_out[fields.InputDataFields.groundtruth_boxes], - [[10, 10, 20, 20]] - ) - self.assertAllClose( - augmented_tensor_dict_out[fields.InputDataFields.groundtruth_classes], - [1.0] - ) - self.assertAllClose( - augmented_tensor_dict_out[ - fields.InputDataFields.groundtruth_weights], - [0.8] - ) + def graph_fn(): + tensor_dict = { + fields.InputDataFields.image: + tf.constant(np.random.rand(10, 10, 3).astype(np.float32)), + fields.InputDataFields.groundtruth_boxes: + tf.constant(np.array([[.5, .5, 1., 1.]], np.float32)), + fields.InputDataFields.groundtruth_classes: + tf.constant(np.array([1.0], np.float32)), + fields.InputDataFields.groundtruth_weights: + tf.constant(np.array([0.8], np.float32)), + } + augmented_tensor_dict = data_augmentation_fn(tensor_dict=tensor_dict) + return (augmented_tensor_dict[fields.InputDataFields.image], + augmented_tensor_dict[fields.InputDataFields.groundtruth_boxes], + augmented_tensor_dict[fields.InputDataFields.groundtruth_classes], + augmented_tensor_dict[fields.InputDataFields.groundtruth_weights]) + (image, groundtruth_boxes, + groundtruth_classes, groundtruth_weights) = self.execute_cpu(graph_fn, []) + self.assertAllEqual(image.shape, [20, 20, 3]) + self.assertAllClose(groundtruth_boxes, [[10, 10, 20, 20]]) + self.assertAllClose(groundtruth_classes.shape, [1.0]) + self.assertAllClose(groundtruth_weights, [0.8]) def test_include_masks_in_data_augmentation(self): data_augmentation_options = [ @@ -712,21 +708,20 @@ class DataAugmentationFnTest(test_case.TestCase): data_augmentation_fn = functools.partial( inputs.augment_input_data, data_augmentation_options=data_augmentation_options) - tensor_dict = { - fields.InputDataFields.image: - tf.constant(np.random.rand(10, 10, 3).astype(np.float32)), - fields.InputDataFields.groundtruth_instance_masks: - tf.constant(np.zeros([2, 10, 10], np.uint8)) - } - augmented_tensor_dict = data_augmentation_fn(tensor_dict=tensor_dict) - with self.test_session() as sess: - augmented_tensor_dict_out = sess.run(augmented_tensor_dict) - - self.assertAllEqual( - augmented_tensor_dict_out[fields.InputDataFields.image].shape, - [20, 20, 3]) - self.assertAllEqual(augmented_tensor_dict_out[ - fields.InputDataFields.groundtruth_instance_masks].shape, [2, 20, 20]) + def graph_fn(): + tensor_dict = { + fields.InputDataFields.image: + tf.constant(np.random.rand(10, 10, 3).astype(np.float32)), + fields.InputDataFields.groundtruth_instance_masks: + tf.constant(np.zeros([2, 10, 10], np.uint8)) + } + augmented_tensor_dict = data_augmentation_fn(tensor_dict=tensor_dict) + return (augmented_tensor_dict[fields.InputDataFields.image], + augmented_tensor_dict[fields.InputDataFields. + groundtruth_instance_masks]) + image, masks = self.execute_cpu(graph_fn, []) + self.assertAllEqual(image.shape, [20, 20, 3]) + self.assertAllEqual(masks.shape, [2, 20, 20]) def test_include_keypoints_in_data_augmentation(self): data_augmentation_options = [ @@ -740,30 +735,24 @@ class DataAugmentationFnTest(test_case.TestCase): data_augmentation_fn = functools.partial( inputs.augment_input_data, data_augmentation_options=data_augmentation_options) - tensor_dict = { - fields.InputDataFields.image: - tf.constant(np.random.rand(10, 10, 3).astype(np.float32)), - fields.InputDataFields.groundtruth_boxes: - tf.constant(np.array([[.5, .5, 1., 1.]], np.float32)), - fields.InputDataFields.groundtruth_keypoints: - tf.constant(np.array([[[0.5, 1.0], [0.5, 0.5]]], np.float32)) - } - augmented_tensor_dict = data_augmentation_fn(tensor_dict=tensor_dict) - with self.test_session() as sess: - augmented_tensor_dict_out = sess.run(augmented_tensor_dict) - - self.assertAllEqual( - augmented_tensor_dict_out[fields.InputDataFields.image].shape, - [20, 20, 3] - ) - self.assertAllClose( - augmented_tensor_dict_out[fields.InputDataFields.groundtruth_boxes], - [[10, 10, 20, 20]] - ) - self.assertAllClose( - augmented_tensor_dict_out[fields.InputDataFields.groundtruth_keypoints], - [[[10, 20], [10, 10]]] - ) + def graph_fn(): + tensor_dict = { + fields.InputDataFields.image: + tf.constant(np.random.rand(10, 10, 3).astype(np.float32)), + fields.InputDataFields.groundtruth_boxes: + tf.constant(np.array([[.5, .5, 1., 1.]], np.float32)), + fields.InputDataFields.groundtruth_keypoints: + tf.constant(np.array([[[0.5, 1.0], [0.5, 0.5]]], np.float32)) + } + augmented_tensor_dict = data_augmentation_fn(tensor_dict=tensor_dict) + return (augmented_tensor_dict[fields.InputDataFields.image], + augmented_tensor_dict[fields.InputDataFields.groundtruth_boxes], + augmented_tensor_dict[fields.InputDataFields. + groundtruth_keypoints]) + image, boxes, keypoints = self.execute_cpu(graph_fn, []) + self.assertAllEqual(image.shape, [20, 20, 3]) + self.assertAllClose(boxes, [[10, 10, 20, 20]]) + self.assertAllClose(keypoints, [[[10, 20], [10, 10]]]) def _fake_model_preprocessor_fn(image): @@ -787,85 +776,82 @@ class DataTransformationFnTest(test_case.TestCase, parameterized.TestCase): def test_combine_additional_channels_if_present(self): image = np.random.rand(4, 4, 3).astype(np.float32) additional_channels = np.random.rand(4, 4, 2).astype(np.float32) - tensor_dict = { - fields.InputDataFields.image: - tf.constant(image), - fields.InputDataFields.image_additional_channels: - tf.constant(additional_channels), - fields.InputDataFields.groundtruth_classes: - tf.constant(np.array([1, 1], np.int32)) - } + def graph_fn(image, additional_channels): + tensor_dict = { + fields.InputDataFields.image: image, + fields.InputDataFields.image_additional_channels: additional_channels, + fields.InputDataFields.groundtruth_classes: + tf.constant([1, 1], tf.int32) + } - input_transformation_fn = functools.partial( - inputs.transform_input_data, - model_preprocess_fn=_fake_model_preprocessor_fn, - image_resizer_fn=_fake_image_resizer_fn, - num_classes=1) - with self.test_session() as sess: - transformed_inputs = sess.run( - input_transformation_fn(tensor_dict=tensor_dict)) - self.assertAllEqual(transformed_inputs[fields.InputDataFields.image].dtype, - tf.float32) - self.assertAllEqual(transformed_inputs[fields.InputDataFields.image].shape, - [4, 4, 5]) - self.assertAllClose(transformed_inputs[fields.InputDataFields.image], - np.concatenate((image, additional_channels), axis=2)) + input_transformation_fn = functools.partial( + inputs.transform_input_data, + model_preprocess_fn=_fake_model_preprocessor_fn, + image_resizer_fn=_fake_image_resizer_fn, + num_classes=1) + out_tensors = input_transformation_fn(tensor_dict=tensor_dict) + return out_tensors[fields.InputDataFields.image] + out_image = self.execute_cpu(graph_fn, [image, additional_channels]) + self.assertAllEqual(out_image.dtype, tf.float32) + self.assertAllEqual(out_image.shape, [4, 4, 5]) + self.assertAllClose(out_image, np.concatenate((image, additional_channels), + axis=2)) def test_use_multiclass_scores_when_present(self): - image = np.random.rand(4, 4, 3).astype(np.float32) - tensor_dict = { - fields.InputDataFields.image: - tf.constant(image), - fields.InputDataFields.groundtruth_boxes: - tf.constant(np.array([[.5, .5, 1, 1], [.5, .5, 1, 1]], np.float32)), - fields.InputDataFields.multiclass_scores: - tf.constant(np.array([0.2, 0.3, 0.5, 0.1, 0.6, 0.3], np.float32)), - fields.InputDataFields.groundtruth_classes: - tf.constant(np.array([1, 2], np.int32)) - } - - input_transformation_fn = functools.partial( - inputs.transform_input_data, - model_preprocess_fn=_fake_model_preprocessor_fn, - image_resizer_fn=_fake_image_resizer_fn, - num_classes=3, use_multiclass_scores=True) - with self.test_session() as sess: - transformed_inputs = sess.run( - input_transformation_fn(tensor_dict=tensor_dict)) + def graph_fn(): + tensor_dict = { + fields.InputDataFields.image: tf.constant(np.random.rand(4, 4, 3). + astype(np.float32)), + fields.InputDataFields.groundtruth_boxes: + tf.constant(np.array([[.5, .5, 1, 1], [.5, .5, 1, 1]], + np.float32)), + fields.InputDataFields.multiclass_scores: + tf.constant(np.array([0.2, 0.3, 0.5, 0.1, 0.6, 0.3], np.float32)), + fields.InputDataFields.groundtruth_classes: + tf.constant(np.array([1, 2], np.int32)) + } + input_transformation_fn = functools.partial( + inputs.transform_input_data, + model_preprocess_fn=_fake_model_preprocessor_fn, + image_resizer_fn=_fake_image_resizer_fn, + num_classes=3, use_multiclass_scores=True) + transformed_inputs = input_transformation_fn(tensor_dict=tensor_dict) + return transformed_inputs[fields.InputDataFields.groundtruth_classes] + groundtruth_classes = self.execute_cpu(graph_fn, []) self.assertAllClose( np.array([[0.2, 0.3, 0.5], [0.1, 0.6, 0.3]], np.float32), - transformed_inputs[fields.InputDataFields.groundtruth_classes]) + groundtruth_classes) + @unittest.skipIf(tf_version.is_tf2(), ('Skipping due to different behaviour ' + 'in TF 2.X')) def test_use_multiclass_scores_when_not_present(self): - image = np.random.rand(4, 4, 3).astype(np.float32) - tensor_dict = { - fields.InputDataFields.image: - tf.constant(image), - fields.InputDataFields.groundtruth_boxes: - tf.constant(np.array([[.5, .5, 1, 1], [.5, .5, 1, 1]], np.float32)), - fields.InputDataFields.multiclass_scores: - tf.placeholder(tf.float32), - fields.InputDataFields.groundtruth_classes: - tf.constant(np.array([1, 2], np.int32)) - } + def graph_fn(): + zero_num_elements = tf.random.uniform([], minval=0, maxval=1, + dtype=tf.int32) + tensor_dict = { + fields.InputDataFields.image: + tf.constant(np.random.rand(4, 4, 3).astype(np.float32)), + fields.InputDataFields.groundtruth_boxes: + tf.constant(np.array([[.5, .5, 1, 1], [.5, .5, 1, 1]], + np.float32)), + fields.InputDataFields.multiclass_scores: tf.zeros(zero_num_elements), + fields.InputDataFields.groundtruth_classes: + tf.constant(np.array([1, 2], np.int32)) + } - input_transformation_fn = functools.partial( - inputs.transform_input_data, - model_preprocess_fn=_fake_model_preprocessor_fn, - image_resizer_fn=_fake_image_resizer_fn, - num_classes=3, use_multiclass_scores=True) - with self.test_session() as sess: - transformed_inputs = sess.run( - input_transformation_fn(tensor_dict=tensor_dict), - feed_dict={ - tensor_dict[fields.InputDataFields.multiclass_scores]: - np.array([], dtype=np.float32) - }) + input_transformation_fn = functools.partial( + inputs.transform_input_data, + model_preprocess_fn=_fake_model_preprocessor_fn, + image_resizer_fn=_fake_image_resizer_fn, + num_classes=3, use_multiclass_scores=True) + transformed_inputs = input_transformation_fn(tensor_dict=tensor_dict) + return transformed_inputs[fields.InputDataFields.groundtruth_classes] + groundtruth_classes = self.execute_cpu(graph_fn, []) self.assertAllClose( np.array([[0, 1, 0], [0, 0, 1]], np.float32), - transformed_inputs[fields.InputDataFields.groundtruth_classes]) + groundtruth_classes) @parameterized.parameters( {'labeled_classes': [1, 2]}, @@ -916,385 +902,395 @@ class DataTransformationFnTest(test_case.TestCase, parameterized.TestCase): transformed_inputs[fields.InputDataFields.groundtruth_labeled_classes]) def test_returns_correct_class_label_encodings(self): - tensor_dict = { - fields.InputDataFields.image: - tf.constant(np.random.rand(4, 4, 3).astype(np.float32)), - fields.InputDataFields.groundtruth_boxes: - tf.constant(np.array([[0, 0, 1, 1], [.5, .5, 1, 1]], np.float32)), - fields.InputDataFields.groundtruth_classes: - tf.constant(np.array([3, 1], np.int32)) - } - num_classes = 3 - input_transformation_fn = functools.partial( - inputs.transform_input_data, - model_preprocess_fn=_fake_model_preprocessor_fn, - image_resizer_fn=_fake_image_resizer_fn, - num_classes=num_classes) - with self.test_session() as sess: - transformed_inputs = sess.run( - input_transformation_fn(tensor_dict=tensor_dict)) - - self.assertAllClose( - transformed_inputs[fields.InputDataFields.groundtruth_classes], - [[0, 0, 1], [1, 0, 0]]) - self.assertAllClose( - transformed_inputs[fields.InputDataFields.groundtruth_confidences], - [[0, 0, 1], [1, 0, 0]]) + def graph_fn(): + tensor_dict = { + fields.InputDataFields.image: + tf.constant(np.random.rand(4, 4, 3).astype(np.float32)), + fields.InputDataFields.groundtruth_boxes: + tf.constant(np.array([[0, 0, 1, 1], [.5, .5, 1, 1]], np.float32)), + fields.InputDataFields.groundtruth_classes: + tf.constant(np.array([3, 1], np.int32)) + } + num_classes = 3 + input_transformation_fn = functools.partial( + inputs.transform_input_data, + model_preprocess_fn=_fake_model_preprocessor_fn, + image_resizer_fn=_fake_image_resizer_fn, + num_classes=num_classes) + transformed_inputs = input_transformation_fn(tensor_dict=tensor_dict) + return (transformed_inputs[fields.InputDataFields.groundtruth_classes], + transformed_inputs[fields.InputDataFields. + groundtruth_confidences]) + (groundtruth_classes, groundtruth_confidences) = self.execute_cpu(graph_fn, + []) + self.assertAllClose(groundtruth_classes, [[0, 0, 1], [1, 0, 0]]) + self.assertAllClose(groundtruth_confidences, [[0, 0, 1], [1, 0, 0]]) def test_returns_correct_labels_with_unrecognized_class(self): - tensor_dict = { - fields.InputDataFields.image: - tf.constant(np.random.rand(4, 4, 3).astype(np.float32)), - fields.InputDataFields.groundtruth_boxes: - tf.constant( - np.array([[0, 0, 1, 1], [.2, .2, 4, 4], [.5, .5, 1, 1]], - np.float32)), - fields.InputDataFields.groundtruth_area: - tf.constant(np.array([.5, .4, .3])), - fields.InputDataFields.groundtruth_classes: - tf.constant(np.array([3, -1, 1], np.int32)), - fields.InputDataFields.groundtruth_keypoints: - tf.constant( - np.array([[[.1, .1]], [[.2, .2]], [[.5, .5]]], - np.float32)), - fields.InputDataFields.groundtruth_keypoint_visibilities: - tf.constant([[True, True], [False, False], [True, True]]), - fields.InputDataFields.groundtruth_instance_masks: - tf.constant(np.random.rand(3, 4, 4).astype(np.float32)), - fields.InputDataFields.groundtruth_is_crowd: - tf.constant([False, True, False]), - fields.InputDataFields.groundtruth_difficult: - tf.constant(np.array([0, 0, 1], np.int32)) - } - - num_classes = 3 - input_transformation_fn = functools.partial( - inputs.transform_input_data, - model_preprocess_fn=_fake_model_preprocessor_fn, - image_resizer_fn=_fake_image_resizer_fn, - num_classes=num_classes) - with self.test_session() as sess: - transformed_inputs = sess.run( - input_transformation_fn(tensor_dict=tensor_dict)) + def graph_fn(): + tensor_dict = { + fields.InputDataFields.image: + tf.constant(np.random.rand(4, 4, 3).astype(np.float32)), + fields.InputDataFields.groundtruth_boxes: + tf.constant( + np.array([[0, 0, 1, 1], [.2, .2, 4, 4], [.5, .5, 1, 1]], + np.float32)), + fields.InputDataFields.groundtruth_area: + tf.constant(np.array([.5, .4, .3])), + fields.InputDataFields.groundtruth_classes: + tf.constant(np.array([3, -1, 1], np.int32)), + fields.InputDataFields.groundtruth_keypoints: + tf.constant( + np.array([[[.1, .1]], [[.2, .2]], [[.5, .5]]], + np.float32)), + fields.InputDataFields.groundtruth_keypoint_visibilities: + tf.constant([[True, True], [False, False], [True, True]]), + fields.InputDataFields.groundtruth_instance_masks: + tf.constant(np.random.rand(3, 4, 4).astype(np.float32)), + fields.InputDataFields.groundtruth_is_crowd: + tf.constant([False, True, False]), + fields.InputDataFields.groundtruth_difficult: + tf.constant(np.array([0, 0, 1], np.int32)) + } - self.assertAllClose( - transformed_inputs[fields.InputDataFields.groundtruth_classes], - [[0, 0, 1], [1, 0, 0]]) - self.assertAllEqual( - transformed_inputs[fields.InputDataFields.num_groundtruth_boxes], 2) - self.assertAllClose( - transformed_inputs[fields.InputDataFields.groundtruth_area], [.5, .3]) - self.assertAllEqual( - transformed_inputs[fields.InputDataFields.groundtruth_confidences], - [[0, 0, 1], [1, 0, 0]]) - self.assertAllClose( - transformed_inputs[fields.InputDataFields.groundtruth_boxes], - [[0, 0, 1, 1], [.5, .5, 1, 1]]) - self.assertAllClose( - transformed_inputs[fields.InputDataFields.groundtruth_keypoints], - [[[.1, .1]], [[.5, .5]]]) - self.assertAllEqual( - transformed_inputs[ - fields.InputDataFields.groundtruth_keypoint_visibilities], - [[True, True], [True, True]]) - self.assertAllEqual( - transformed_inputs[ - fields.InputDataFields.groundtruth_instance_masks].shape, [2, 4, 4]) - self.assertAllEqual( - transformed_inputs[fields.InputDataFields.groundtruth_is_crowd], - [False, False]) - self.assertAllEqual( - transformed_inputs[fields.InputDataFields.groundtruth_difficult], - [0, 1]) + num_classes = 3 + input_transformation_fn = functools.partial( + inputs.transform_input_data, + model_preprocess_fn=_fake_model_preprocessor_fn, + image_resizer_fn=_fake_image_resizer_fn, + num_classes=num_classes) + transformed_inputs = input_transformation_fn(tensor_dict) + return (transformed_inputs[fields.InputDataFields.groundtruth_classes], + transformed_inputs[fields.InputDataFields.num_groundtruth_boxes], + transformed_inputs[fields.InputDataFields.groundtruth_area], + transformed_inputs[fields.InputDataFields. + groundtruth_confidences], + transformed_inputs[fields.InputDataFields.groundtruth_boxes], + transformed_inputs[fields.InputDataFields.groundtruth_keypoints], + transformed_inputs[fields.InputDataFields. + groundtruth_keypoint_visibilities], + transformed_inputs[fields.InputDataFields. + groundtruth_instance_masks], + transformed_inputs[fields.InputDataFields.groundtruth_is_crowd], + transformed_inputs[fields.InputDataFields.groundtruth_difficult]) + (groundtruth_classes, num_groundtruth_boxes, groundtruth_area, + groundtruth_confidences, groundtruth_boxes, groundtruth_keypoints, + groundtruth_keypoint_visibilities, groundtruth_instance_masks, + groundtruth_is_crowd, groundtruth_difficult) = self.execute_cpu(graph_fn, + []) + + self.assertAllClose(groundtruth_classes, [[0, 0, 1], [1, 0, 0]]) + self.assertAllEqual(num_groundtruth_boxes, 2) + self.assertAllClose(groundtruth_area, [.5, .3]) + self.assertAllEqual(groundtruth_confidences, [[0, 0, 1], [1, 0, 0]]) + self.assertAllClose(groundtruth_boxes, [[0, 0, 1, 1], [.5, .5, 1, 1]]) + self.assertAllClose(groundtruth_keypoints, [[[.1, .1]], [[.5, .5]]]) + self.assertAllEqual(groundtruth_keypoint_visibilities, + [[True, True], [True, True]]) + self.assertAllEqual(groundtruth_instance_masks.shape, [2, 4, 4]) + self.assertAllEqual(groundtruth_is_crowd, [False, False]) + self.assertAllEqual(groundtruth_difficult, [0, 1]) def test_returns_correct_merged_boxes(self): - tensor_dict = { - fields.InputDataFields.image: - tf.constant(np.random.rand(4, 4, 3).astype(np.float32)), - fields.InputDataFields.groundtruth_boxes: - tf.constant(np.array([[.5, .5, 1, 1], [.5, .5, 1, 1]], np.float32)), - fields.InputDataFields.groundtruth_classes: - tf.constant(np.array([3, 1], np.int32)) - } - - num_classes = 3 - input_transformation_fn = functools.partial( - inputs.transform_input_data, - model_preprocess_fn=_fake_model_preprocessor_fn, - image_resizer_fn=_fake_image_resizer_fn, - num_classes=num_classes, - merge_multiple_boxes=True) + def graph_fn(): + tensor_dict = { + fields.InputDataFields.image: + tf.constant(np.random.rand(4, 4, 3).astype(np.float32)), + fields.InputDataFields.groundtruth_boxes: + tf.constant(np.array([[.5, .5, 1, 1], [.5, .5, 1, 1]], + np.float32)), + fields.InputDataFields.groundtruth_classes: + tf.constant(np.array([3, 1], np.int32)) + } - with self.test_session() as sess: - transformed_inputs = sess.run( - input_transformation_fn(tensor_dict=tensor_dict)) + num_classes = 3 + input_transformation_fn = functools.partial( + inputs.transform_input_data, + model_preprocess_fn=_fake_model_preprocessor_fn, + image_resizer_fn=_fake_image_resizer_fn, + num_classes=num_classes, + merge_multiple_boxes=True) + transformed_inputs = input_transformation_fn(tensor_dict) + return (transformed_inputs[fields.InputDataFields.groundtruth_boxes], + transformed_inputs[fields.InputDataFields.groundtruth_classes], + transformed_inputs[fields.InputDataFields. + groundtruth_confidences], + transformed_inputs[fields.InputDataFields.num_groundtruth_boxes]) + (groundtruth_boxes, groundtruth_classes, groundtruth_confidences, + num_groundtruth_boxes) = self.execute_cpu(graph_fn, []) self.assertAllClose( - transformed_inputs[fields.InputDataFields.groundtruth_boxes], + groundtruth_boxes, [[.5, .5, 1., 1.]]) self.assertAllClose( - transformed_inputs[fields.InputDataFields.groundtruth_classes], + groundtruth_classes, [[1, 0, 1]]) self.assertAllClose( - transformed_inputs[fields.InputDataFields.groundtruth_confidences], + groundtruth_confidences, [[1, 0, 1]]) self.assertAllClose( - transformed_inputs[fields.InputDataFields.num_groundtruth_boxes], + num_groundtruth_boxes, 1) def test_returns_correct_groundtruth_confidences_when_input_present(self): - tensor_dict = { - fields.InputDataFields.image: - tf.constant(np.random.rand(4, 4, 3).astype(np.float32)), - fields.InputDataFields.groundtruth_boxes: - tf.constant(np.array([[0, 0, 1, 1], [.5, .5, 1, 1]], np.float32)), - fields.InputDataFields.groundtruth_classes: - tf.constant(np.array([3, 1], np.int32)), - fields.InputDataFields.groundtruth_confidences: - tf.constant(np.array([1.0, -1.0], np.float32)) - } - num_classes = 3 - input_transformation_fn = functools.partial( - inputs.transform_input_data, - model_preprocess_fn=_fake_model_preprocessor_fn, - image_resizer_fn=_fake_image_resizer_fn, - num_classes=num_classes) - with self.test_session() as sess: - transformed_inputs = sess.run( - input_transformation_fn(tensor_dict=tensor_dict)) - + def graph_fn(): + tensor_dict = { + fields.InputDataFields.image: + tf.constant(np.random.rand(4, 4, 3).astype(np.float32)), + fields.InputDataFields.groundtruth_boxes: + tf.constant(np.array([[0, 0, 1, 1], [.5, .5, 1, 1]], np.float32)), + fields.InputDataFields.groundtruth_classes: + tf.constant(np.array([3, 1], np.int32)), + fields.InputDataFields.groundtruth_confidences: + tf.constant(np.array([1.0, -1.0], np.float32)) + } + num_classes = 3 + input_transformation_fn = functools.partial( + inputs.transform_input_data, + model_preprocess_fn=_fake_model_preprocessor_fn, + image_resizer_fn=_fake_image_resizer_fn, + num_classes=num_classes) + transformed_inputs = input_transformation_fn(tensor_dict) + return (transformed_inputs[fields.InputDataFields.groundtruth_classes], + transformed_inputs[fields.InputDataFields. + groundtruth_confidences]) + groundtruth_classes, groundtruth_confidences = self.execute_cpu(graph_fn, + []) self.assertAllClose( - transformed_inputs[fields.InputDataFields.groundtruth_classes], + groundtruth_classes, [[0, 0, 1], [1, 0, 0]]) self.assertAllClose( - transformed_inputs[fields.InputDataFields.groundtruth_confidences], + groundtruth_confidences, [[0, 0, 1], [-1, 0, 0]]) def test_returns_resized_masks(self): - tensor_dict = { - fields.InputDataFields.image: - tf.constant(np.random.rand(4, 4, 3).astype(np.float32)), - fields.InputDataFields.groundtruth_instance_masks: - tf.constant(np.random.rand(2, 4, 4).astype(np.float32)), - fields.InputDataFields.groundtruth_classes: - tf.constant(np.array([3, 1], np.int32)), - fields.InputDataFields.original_image_spatial_shape: - tf.constant(np.array([4, 4], np.int32)) - } + def graph_fn(): + tensor_dict = { + fields.InputDataFields.image: + tf.constant(np.random.rand(4, 4, 3).astype(np.float32)), + fields.InputDataFields.groundtruth_instance_masks: + tf.constant(np.random.rand(2, 4, 4).astype(np.float32)), + fields.InputDataFields.groundtruth_classes: + tf.constant(np.array([3, 1], np.int32)), + fields.InputDataFields.original_image_spatial_shape: + tf.constant(np.array([4, 4], np.int32)) + } - def fake_image_resizer_fn(image, masks=None): - resized_image = tf.image.resize_images(image, [8, 8]) - results = [resized_image] - if masks is not None: - resized_masks = tf.transpose( - tf.image.resize_images(tf.transpose(masks, [1, 2, 0]), [8, 8]), - [2, 0, 1]) - results.append(resized_masks) - results.append(tf.shape(resized_image)) - return results - - num_classes = 3 - input_transformation_fn = functools.partial( - inputs.transform_input_data, - model_preprocess_fn=_fake_model_preprocessor_fn, - image_resizer_fn=fake_image_resizer_fn, - num_classes=num_classes, - retain_original_image=True) - with self.test_session() as sess: - transformed_inputs = sess.run( - input_transformation_fn(tensor_dict=tensor_dict)) - self.assertAllEqual(transformed_inputs[ - fields.InputDataFields.original_image].dtype, tf.uint8) - self.assertAllEqual(transformed_inputs[ - fields.InputDataFields.original_image_spatial_shape], [4, 4]) - self.assertAllEqual(transformed_inputs[ - fields.InputDataFields.original_image].shape, [8, 8, 3]) - self.assertAllEqual(transformed_inputs[ - fields.InputDataFields.groundtruth_instance_masks].shape, [2, 8, 8]) + def fake_image_resizer_fn(image, masks=None): + resized_image = tf.image.resize_images(image, [8, 8]) + results = [resized_image] + if masks is not None: + resized_masks = tf.transpose( + tf.image.resize_images(tf.transpose(masks, [1, 2, 0]), [8, 8]), + [2, 0, 1]) + results.append(resized_masks) + results.append(tf.shape(resized_image)) + return results + + num_classes = 3 + input_transformation_fn = functools.partial( + inputs.transform_input_data, + model_preprocess_fn=_fake_model_preprocessor_fn, + image_resizer_fn=fake_image_resizer_fn, + num_classes=num_classes, + retain_original_image=True) + transformed_inputs = input_transformation_fn(tensor_dict) + return (transformed_inputs[fields.InputDataFields.original_image], + transformed_inputs[fields.InputDataFields. + original_image_spatial_shape], + transformed_inputs[fields.InputDataFields. + groundtruth_instance_masks]) + (original_image, original_image_shape, + groundtruth_instance_masks) = self.execute_cpu(graph_fn, []) + self.assertEqual(original_image.dtype, np.uint8) + self.assertAllEqual(original_image_shape, [4, 4]) + self.assertAllEqual(original_image.shape, [8, 8, 3]) + self.assertAllEqual(groundtruth_instance_masks.shape, [2, 8, 8]) def test_applies_model_preprocess_fn_to_image_tensor(self): np_image = np.random.randint(256, size=(4, 4, 3)) - tensor_dict = { - fields.InputDataFields.image: - tf.constant(np_image), - fields.InputDataFields.groundtruth_classes: - tf.constant(np.array([3, 1], np.int32)) - } - - def fake_model_preprocessor_fn(image): - return (image / 255., tf.expand_dims(tf.shape(image)[1:], axis=0)) + def graph_fn(image): + tensor_dict = { + fields.InputDataFields.image: image, + fields.InputDataFields.groundtruth_classes: + tf.constant(np.array([3, 1], np.int32)) + } - num_classes = 3 - input_transformation_fn = functools.partial( - inputs.transform_input_data, - model_preprocess_fn=fake_model_preprocessor_fn, - image_resizer_fn=_fake_image_resizer_fn, - num_classes=num_classes) + def fake_model_preprocessor_fn(image): + return (image / 255., tf.expand_dims(tf.shape(image)[1:], axis=0)) - with self.test_session() as sess: - transformed_inputs = sess.run( - input_transformation_fn(tensor_dict=tensor_dict)) - self.assertAllClose(transformed_inputs[fields.InputDataFields.image], - np_image / 255.) - self.assertAllClose(transformed_inputs[fields.InputDataFields. - true_image_shape], - [4, 4, 3]) + num_classes = 3 + input_transformation_fn = functools.partial( + inputs.transform_input_data, + model_preprocess_fn=fake_model_preprocessor_fn, + image_resizer_fn=_fake_image_resizer_fn, + num_classes=num_classes) + transformed_inputs = input_transformation_fn(tensor_dict) + return (transformed_inputs[fields.InputDataFields.image], + transformed_inputs[fields.InputDataFields.true_image_shape]) + image, true_image_shape = self.execute_cpu(graph_fn, [np_image]) + self.assertAllClose(image, np_image / 255.) + self.assertAllClose(true_image_shape, [4, 4, 3]) def test_applies_data_augmentation_fn_to_tensor_dict(self): np_image = np.random.randint(256, size=(4, 4, 3)) - tensor_dict = { - fields.InputDataFields.image: - tf.constant(np_image), - fields.InputDataFields.groundtruth_classes: - tf.constant(np.array([3, 1], np.int32)) - } - - def add_one_data_augmentation_fn(tensor_dict): - return {key: value + 1 for key, value in tensor_dict.items()} + def graph_fn(image): + tensor_dict = { + fields.InputDataFields.image: image, + fields.InputDataFields.groundtruth_classes: + tf.constant(np.array([3, 1], np.int32)) + } - num_classes = 4 - input_transformation_fn = functools.partial( - inputs.transform_input_data, - model_preprocess_fn=_fake_model_preprocessor_fn, - image_resizer_fn=_fake_image_resizer_fn, - num_classes=num_classes, - data_augmentation_fn=add_one_data_augmentation_fn) - with self.test_session() as sess: - augmented_tensor_dict = sess.run( - input_transformation_fn(tensor_dict=tensor_dict)) + def add_one_data_augmentation_fn(tensor_dict): + return {key: value + 1 for key, value in tensor_dict.items()} - self.assertAllEqual(augmented_tensor_dict[fields.InputDataFields.image], - np_image + 1) - self.assertAllEqual( - augmented_tensor_dict[fields.InputDataFields.groundtruth_classes], + num_classes = 4 + input_transformation_fn = functools.partial( + inputs.transform_input_data, + model_preprocess_fn=_fake_model_preprocessor_fn, + image_resizer_fn=_fake_image_resizer_fn, + num_classes=num_classes, + data_augmentation_fn=add_one_data_augmentation_fn) + transformed_inputs = input_transformation_fn(tensor_dict) + return (transformed_inputs[fields.InputDataFields.image], + transformed_inputs[fields.InputDataFields.groundtruth_classes]) + image, groundtruth_classes = self.execute_cpu(graph_fn, [np_image]) + self.assertAllEqual(image, np_image + 1) + self.assertAllEqual( + groundtruth_classes, [[0, 0, 0, 1], [0, 1, 0, 0]]) def test_applies_data_augmentation_fn_before_model_preprocess_fn(self): np_image = np.random.randint(256, size=(4, 4, 3)) - tensor_dict = { - fields.InputDataFields.image: - tf.constant(np_image), - fields.InputDataFields.groundtruth_classes: - tf.constant(np.array([3, 1], np.int32)) - } - - def mul_two_model_preprocessor_fn(image): - return (image * 2, tf.expand_dims(tf.shape(image)[1:], axis=0)) + def graph_fn(image): + tensor_dict = { + fields.InputDataFields.image: image, + fields.InputDataFields.groundtruth_classes: + tf.constant(np.array([3, 1], np.int32)) + } - def add_five_to_image_data_augmentation_fn(tensor_dict): - tensor_dict[fields.InputDataFields.image] += 5 - return tensor_dict + def mul_two_model_preprocessor_fn(image): + return (image * 2, tf.expand_dims(tf.shape(image)[1:], axis=0)) - num_classes = 4 - input_transformation_fn = functools.partial( - inputs.transform_input_data, - model_preprocess_fn=mul_two_model_preprocessor_fn, - image_resizer_fn=_fake_image_resizer_fn, - num_classes=num_classes, - data_augmentation_fn=add_five_to_image_data_augmentation_fn) - with self.test_session() as sess: - augmented_tensor_dict = sess.run( - input_transformation_fn(tensor_dict=tensor_dict)) + def add_five_to_image_data_augmentation_fn(tensor_dict): + tensor_dict[fields.InputDataFields.image] += 5 + return tensor_dict - self.assertAllEqual(augmented_tensor_dict[fields.InputDataFields.image], - (np_image + 5) * 2) + num_classes = 4 + input_transformation_fn = functools.partial( + inputs.transform_input_data, + model_preprocess_fn=mul_two_model_preprocessor_fn, + image_resizer_fn=_fake_image_resizer_fn, + num_classes=num_classes, + data_augmentation_fn=add_five_to_image_data_augmentation_fn) + transformed_inputs = input_transformation_fn(tensor_dict) + return transformed_inputs[fields.InputDataFields.image] + image = self.execute_cpu(graph_fn, [np_image]) + self.assertAllEqual(image, (np_image + 5) * 2) def test_resize_with_padding(self): + def graph_fn(): + tensor_dict = { + fields.InputDataFields.image: + tf.constant(np.random.rand(100, 50, 3).astype(np.float32)), + fields.InputDataFields.groundtruth_boxes: + tf.constant(np.array([[.5, .5, 1, 1], [.0, .0, .5, .5]], + np.float32)), + fields.InputDataFields.groundtruth_classes: + tf.constant(np.array([1, 2], np.int32)), + fields.InputDataFields.groundtruth_keypoints: + tf.constant([[[0.1, 0.2]], [[0.3, 0.4]]]), + } - tensor_dict = { - fields.InputDataFields.image: - tf.constant(np.random.rand(100, 50, 3).astype(np.float32)), - fields.InputDataFields.groundtruth_boxes: - tf.constant(np.array([[.5, .5, 1, 1], [.0, .0, .5, .5]], - np.float32)), - fields.InputDataFields.groundtruth_classes: - tf.constant(np.array([1, 2], np.int32)), - fields.InputDataFields.groundtruth_keypoints: - tf.constant([[[0.1, 0.2]], [[0.3, 0.4]]]), - } - - num_classes = 3 - input_transformation_fn = functools.partial( - inputs.transform_input_data, - model_preprocess_fn=_fake_resize50_preprocess_fn, - image_resizer_fn=_fake_image_resizer_fn, - num_classes=num_classes,) - - with self.test_session() as sess: - transformed_inputs = sess.run( - input_transformation_fn(tensor_dict=tensor_dict)) + num_classes = 3 + input_transformation_fn = functools.partial( + inputs.transform_input_data, + model_preprocess_fn=_fake_resize50_preprocess_fn, + image_resizer_fn=_fake_image_resizer_fn, + num_classes=num_classes,) + transformed_inputs = input_transformation_fn(tensor_dict) + return (transformed_inputs[fields.InputDataFields.groundtruth_boxes], + transformed_inputs[fields.InputDataFields.groundtruth_keypoints]) + groundtruth_boxes, groundtruth_keypoints = self.execute_cpu(graph_fn, []) self.assertAllClose( - transformed_inputs[fields.InputDataFields.groundtruth_boxes], + groundtruth_boxes, [[.5, .25, 1., .5], [.0, .0, .5, .25]]) self.assertAllClose( - transformed_inputs[fields.InputDataFields.groundtruth_keypoints], + groundtruth_keypoints, [[[.1, .1]], [[.3, .2]]]) def test_groundtruth_keypoint_weights(self): - tensor_dict = { - fields.InputDataFields.image: - tf.constant(np.random.rand(100, 50, 3).astype(np.float32)), - fields.InputDataFields.groundtruth_boxes: - tf.constant(np.array([[.5, .5, 1, 1], [.0, .0, .5, .5]], - np.float32)), - fields.InputDataFields.groundtruth_classes: - tf.constant(np.array([1, 2], np.int32)), - fields.InputDataFields.groundtruth_keypoints: - tf.constant([[[0.1, 0.2], [0.3, 0.4]], - [[0.5, 0.6], [0.7, 0.8]]]), - fields.InputDataFields.groundtruth_keypoint_visibilities: - tf.constant([[True, False], [True, True]]), - } - - num_classes = 3 - keypoint_type_weight = [1.0, 2.0] - input_transformation_fn = functools.partial( - inputs.transform_input_data, - model_preprocess_fn=_fake_resize50_preprocess_fn, - image_resizer_fn=_fake_image_resizer_fn, - num_classes=num_classes, - keypoint_type_weight=keypoint_type_weight) + def graph_fn(): + tensor_dict = { + fields.InputDataFields.image: + tf.constant(np.random.rand(100, 50, 3).astype(np.float32)), + fields.InputDataFields.groundtruth_boxes: + tf.constant(np.array([[.5, .5, 1, 1], [.0, .0, .5, .5]], + np.float32)), + fields.InputDataFields.groundtruth_classes: + tf.constant(np.array([1, 2], np.int32)), + fields.InputDataFields.groundtruth_keypoints: + tf.constant([[[0.1, 0.2], [0.3, 0.4]], + [[0.5, 0.6], [0.7, 0.8]]]), + fields.InputDataFields.groundtruth_keypoint_visibilities: + tf.constant([[True, False], [True, True]]), + } - with self.test_session() as sess: - transformed_inputs = sess.run( - input_transformation_fn(tensor_dict=tensor_dict)) + num_classes = 3 + keypoint_type_weight = [1.0, 2.0] + input_transformation_fn = functools.partial( + inputs.transform_input_data, + model_preprocess_fn=_fake_resize50_preprocess_fn, + image_resizer_fn=_fake_image_resizer_fn, + num_classes=num_classes, + keypoint_type_weight=keypoint_type_weight) + transformed_inputs = input_transformation_fn(tensor_dict=tensor_dict) + return (transformed_inputs[fields.InputDataFields.groundtruth_keypoints], + transformed_inputs[fields.InputDataFields. + groundtruth_keypoint_weights]) + + groundtruth_keypoints, groundtruth_keypoint_weights = self.execute_cpu( + graph_fn, []) self.assertAllClose( - transformed_inputs[fields.InputDataFields.groundtruth_keypoints], + groundtruth_keypoints, [[[0.1, 0.1], [0.3, 0.2]], [[0.5, 0.3], [0.7, 0.4]]]) self.assertAllClose( - transformed_inputs[fields.InputDataFields.groundtruth_keypoint_weights], + groundtruth_keypoint_weights, [[1.0, 0.0], [1.0, 2.0]]) def test_groundtruth_keypoint_weights_default(self): - tensor_dict = { - fields.InputDataFields.image: - tf.constant(np.random.rand(100, 50, 3).astype(np.float32)), - fields.InputDataFields.groundtruth_boxes: - tf.constant(np.array([[.5, .5, 1, 1], [.0, .0, .5, .5]], - np.float32)), - fields.InputDataFields.groundtruth_classes: - tf.constant(np.array([1, 2], np.int32)), - fields.InputDataFields.groundtruth_keypoints: - tf.constant([[[0.1, 0.2], [0.3, 0.4]], - [[0.5, 0.6], [0.7, 0.8]]]), - } - - num_classes = 3 - input_transformation_fn = functools.partial( - inputs.transform_input_data, - model_preprocess_fn=_fake_resize50_preprocess_fn, - image_resizer_fn=_fake_image_resizer_fn, - num_classes=num_classes) + def graph_fn(): + tensor_dict = { + fields.InputDataFields.image: + tf.constant(np.random.rand(100, 50, 3).astype(np.float32)), + fields.InputDataFields.groundtruth_boxes: + tf.constant(np.array([[.5, .5, 1, 1], [.0, .0, .5, .5]], + np.float32)), + fields.InputDataFields.groundtruth_classes: + tf.constant(np.array([1, 2], np.int32)), + fields.InputDataFields.groundtruth_keypoints: + tf.constant([[[0.1, 0.2], [0.3, 0.4]], + [[0.5, 0.6], [0.7, 0.8]]]), + } - with self.test_session() as sess: - transformed_inputs = sess.run( - input_transformation_fn(tensor_dict=tensor_dict)) + num_classes = 3 + input_transformation_fn = functools.partial( + inputs.transform_input_data, + model_preprocess_fn=_fake_resize50_preprocess_fn, + image_resizer_fn=_fake_image_resizer_fn, + num_classes=num_classes) + transformed_inputs = input_transformation_fn(tensor_dict=tensor_dict) + return (transformed_inputs[fields.InputDataFields.groundtruth_keypoints], + transformed_inputs[fields.InputDataFields. + groundtruth_keypoint_weights]) + groundtruth_keypoints, groundtruth_keypoint_weights = self.execute_cpu( + graph_fn, []) self.assertAllClose( - transformed_inputs[fields.InputDataFields.groundtruth_keypoints], + groundtruth_keypoints, [[[0.1, 0.1], [0.3, 0.2]], [[0.5, 0.3], [0.7, 0.4]]]) self.assertAllClose( - transformed_inputs[fields.InputDataFields.groundtruth_keypoint_weights], + groundtruth_keypoint_weights, [[1.0, 1.0], [1.0, 1.0]]) @@ -1303,15 +1299,15 @@ class PadInputDataToStaticShapesFnTest(test_case.TestCase): def test_pad_images_boxes_and_classes(self): input_tensor_dict = { fields.InputDataFields.image: - tf.placeholder(tf.float32, [None, None, 3]), + tf.random.uniform([3, 3, 3]), fields.InputDataFields.groundtruth_boxes: - tf.placeholder(tf.float32, [None, 4]), + tf.random.uniform([2, 4]), fields.InputDataFields.groundtruth_classes: - tf.placeholder(tf.int32, [None, 3]), + tf.random.uniform([2, 3], minval=0, maxval=2, dtype=tf.int32), fields.InputDataFields.true_image_shape: - tf.placeholder(tf.int32, [3]), + tf.constant([3, 3, 3]), fields.InputDataFields.original_image_spatial_shape: - tf.placeholder(tf.int32, [2]) + tf.constant([3, 3]) } padded_tensor_dict = inputs.pad_input_data_to_static_shapes( tensor_dict=input_tensor_dict, @@ -1336,69 +1332,35 @@ class PadInputDataToStaticShapesFnTest(test_case.TestCase): .shape.as_list(), [3, 3]) def test_clip_boxes_and_classes(self): - input_tensor_dict = { - fields.InputDataFields.groundtruth_boxes: - tf.placeholder(tf.float32, [None, 4]), - fields.InputDataFields.groundtruth_classes: - tf.placeholder(tf.int32, [None, 3]), - fields.InputDataFields.num_groundtruth_boxes: - tf.placeholder(tf.int32, []) - } - padded_tensor_dict = inputs.pad_input_data_to_static_shapes( - tensor_dict=input_tensor_dict, - max_num_boxes=3, - num_classes=3, - spatial_image_shape=[5, 6]) - - self.assertAllEqual( - padded_tensor_dict[fields.InputDataFields.groundtruth_boxes] - .shape.as_list(), [3, 4]) - self.assertAllEqual( - padded_tensor_dict[fields.InputDataFields.groundtruth_classes] - .shape.as_list(), [3, 3]) - - with self.test_session() as sess: - out_tensor_dict = sess.run( - padded_tensor_dict, - feed_dict={ - input_tensor_dict[fields.InputDataFields.groundtruth_boxes]: - np.random.rand(5, 4), - input_tensor_dict[fields.InputDataFields.groundtruth_classes]: - np.random.rand(2, 3), - input_tensor_dict[fields.InputDataFields.num_groundtruth_boxes]: - 5, - }) - - self.assertAllEqual( - out_tensor_dict[fields.InputDataFields.groundtruth_boxes].shape, [3, 4]) - self.assertAllEqual( - out_tensor_dict[fields.InputDataFields.groundtruth_classes].shape, - [3, 3]) - self.assertEqual( - out_tensor_dict[fields.InputDataFields.num_groundtruth_boxes], - 3) - - def test_do_not_pad_dynamic_images(self): - input_tensor_dict = { - fields.InputDataFields.image: - tf.placeholder(tf.float32, [None, None, 3]), - } - padded_tensor_dict = inputs.pad_input_data_to_static_shapes( - tensor_dict=input_tensor_dict, - max_num_boxes=3, - num_classes=3, - spatial_image_shape=[None, None]) - - self.assertAllEqual( - padded_tensor_dict[fields.InputDataFields.image].shape.as_list(), - [None, None, 3]) + def graph_fn(): + input_tensor_dict = { + fields.InputDataFields.groundtruth_boxes: + tf.random.uniform([5, 4]), + fields.InputDataFields.groundtruth_classes: + tf.random.uniform([2, 3], maxval=10, dtype=tf.int32), + fields.InputDataFields.num_groundtruth_boxes: + tf.constant(5) + } + padded_tensor_dict = inputs.pad_input_data_to_static_shapes( + tensor_dict=input_tensor_dict, + max_num_boxes=3, + num_classes=3, + spatial_image_shape=[5, 6]) + return (padded_tensor_dict[fields.InputDataFields.groundtruth_boxes], + padded_tensor_dict[fields.InputDataFields.groundtruth_classes], + padded_tensor_dict[fields.InputDataFields.num_groundtruth_boxes]) + (groundtruth_boxes, groundtruth_classes, + num_groundtruth_boxes) = self.execute_cpu(graph_fn, []) + self.assertAllEqual(groundtruth_boxes.shape, [3, 4]) + self.assertAllEqual(groundtruth_classes.shape, [3, 3]) + self.assertEqual(num_groundtruth_boxes, 3) def test_images_and_additional_channels(self): input_tensor_dict = { fields.InputDataFields.image: - tf.placeholder(tf.float32, [None, None, 5]), + test_utils.image_with_dynamic_shape(4, 3, 5), fields.InputDataFields.image_additional_channels: - tf.placeholder(tf.float32, [None, None, 2]), + test_utils.image_with_dynamic_shape(4, 3, 2), } padded_tensor_dict = inputs.pad_input_data_to_static_shapes( tensor_dict=input_tensor_dict, @@ -1418,11 +1380,11 @@ class PadInputDataToStaticShapesFnTest(test_case.TestCase): def test_images_and_additional_channels_errors(self): input_tensor_dict = { fields.InputDataFields.image: - tf.placeholder(tf.float32, [None, None, 3]), + test_utils.image_with_dynamic_shape(10, 10, 3), fields.InputDataFields.image_additional_channels: - tf.placeholder(tf.float32, [None, None, 2]), + test_utils.image_with_dynamic_shape(10, 10, 2), fields.InputDataFields.original_image: - tf.placeholder(tf.float32, [None, None, 3]), + test_utils.image_with_dynamic_shape(10, 10, 3), } with self.assertRaises(ValueError): _ = inputs.pad_input_data_to_static_shapes( @@ -1434,7 +1396,7 @@ class PadInputDataToStaticShapesFnTest(test_case.TestCase): def test_gray_images(self): input_tensor_dict = { fields.InputDataFields.image: - tf.placeholder(tf.float32, [None, None, 1]), + test_utils.image_with_dynamic_shape(4, 4, 1), } padded_tensor_dict = inputs.pad_input_data_to_static_shapes( tensor_dict=input_tensor_dict, @@ -1449,9 +1411,9 @@ class PadInputDataToStaticShapesFnTest(test_case.TestCase): def test_gray_images_and_additional_channels(self): input_tensor_dict = { fields.InputDataFields.image: - tf.placeholder(tf.float32, [None, None, 3]), + test_utils.image_with_dynamic_shape(4, 4, 3), fields.InputDataFields.image_additional_channels: - tf.placeholder(tf.float32, [None, None, 2]), + test_utils.image_with_dynamic_shape(4, 4, 2), } # pad_input_data_to_static_shape assumes that image is already concatenated # with additional channels. @@ -1469,11 +1431,14 @@ class PadInputDataToStaticShapesFnTest(test_case.TestCase): .shape.as_list(), [5, 6, 2]) def test_keypoints(self): + keypoints = test_utils.keypoints_with_dynamic_shape(10, 16, 4) + visibilities = tf.cast(tf.random.uniform(tf.shape(keypoints)[:-1], minval=0, + maxval=2, dtype=tf.int32), tf.bool) input_tensor_dict = { fields.InputDataFields.groundtruth_keypoints: - tf.placeholder(tf.float32, [None, 16, 4]), + test_utils.keypoints_with_dynamic_shape(10, 16, 4), fields.InputDataFields.groundtruth_keypoint_visibilities: - tf.placeholder(tf.bool, [None, 16]), + visibilities } padded_tensor_dict = inputs.pad_input_data_to_static_shapes( tensor_dict=input_tensor_dict, @@ -1493,39 +1458,76 @@ class PadInputDataToStaticShapesFnTest(test_case.TestCase): context_memory_size = 8 context_feature_length = 10 max_num_context_features = 20 - input_tensor_dict = { - fields.InputDataFields.context_features: - tf.placeholder(tf.float32, - [context_memory_size, context_feature_length]), - fields.InputDataFields.context_feature_length: - tf.placeholder(tf.float32, []) - } - padded_tensor_dict = inputs.pad_input_data_to_static_shapes( - tensor_dict=input_tensor_dict, - max_num_boxes=3, - num_classes=3, - spatial_image_shape=[5, 6], - max_num_context_features=max_num_context_features, - context_feature_length=context_feature_length) + def graph_fn(): + input_tensor_dict = { + fields.InputDataFields.context_features: + tf.ones([context_memory_size, context_feature_length]), + fields.InputDataFields.context_feature_length: + tf.constant(context_feature_length) + } + padded_tensor_dict = inputs.pad_input_data_to_static_shapes( + tensor_dict=input_tensor_dict, + max_num_boxes=3, + num_classes=3, + spatial_image_shape=[5, 6], + max_num_context_features=max_num_context_features, + context_feature_length=context_feature_length) - self.assertAllEqual( - padded_tensor_dict[ - fields.InputDataFields.context_features].shape.as_list(), - [max_num_context_features, context_feature_length]) + self.assertAllEqual( + padded_tensor_dict[ + fields.InputDataFields.context_features].shape.as_list(), + [max_num_context_features, context_feature_length]) + return padded_tensor_dict[fields.InputDataFields.valid_context_size] - with self.test_session() as sess: - feed_dict = { - input_tensor_dict[fields.InputDataFields.context_features]: - np.ones([context_memory_size, context_feature_length], - dtype=np.float32), - input_tensor_dict[fields.InputDataFields.context_feature_length]: - context_feature_length + valid_context_size = self.execute_cpu(graph_fn, []) + self.assertEqual(valid_context_size, context_memory_size) + + +class NegativeSizeTest(test_case.TestCase): + """Test for inputs and related funcitons.""" + + def test_negative_size_error(self): + """Test that error is raised for negative size boxes.""" + + def graph_fn(): + tensors = { + fields.InputDataFields.image: tf.zeros((128, 128, 3)), + fields.InputDataFields.groundtruth_classes: + tf.constant([1, 1], tf.int32), + fields.InputDataFields.groundtruth_boxes: + tf.constant([[0.5, 0.5, 0.4, 0.5]], tf.float32) } - padded_tensor_dict_out = sess.run(padded_tensor_dict, feed_dict=feed_dict) + tensors = inputs.transform_input_data( + tensors, _fake_model_preprocessor_fn, _fake_image_resizer_fn, + num_classes=10) + return tensors[fields.InputDataFields.groundtruth_boxes] + with self.assertRaises(tf.errors.InvalidArgumentError): + self.execute_cpu(graph_fn, []) + + def test_negative_size_no_assert(self): + """Test that negative size boxes are filtered out without assert. + + This test simulates the behaviour when we run on TPU and Assert ops are + not supported. + """ - self.assertEqual( - padded_tensor_dict_out[fields.InputDataFields.valid_context_size], - context_memory_size) + tensors = { + fields.InputDataFields.image: tf.zeros((128, 128, 3)), + fields.InputDataFields.groundtruth_classes: + tf.constant([1, 1], tf.int32), + fields.InputDataFields.groundtruth_boxes: + tf.constant([[0.5, 0.5, 0.4, 0.5], [0.5, 0.5, 0.6, 0.6]], + tf.float32) + } + + with mock.patch.object(tf, 'Assert') as tf_assert: + tf_assert.return_value = tf.no_op() + tensors = inputs.transform_input_data( + tensors, _fake_model_preprocessor_fn, _fake_image_resizer_fn, + num_classes=10) + + self.assertAllClose(tensors[fields.InputDataFields.groundtruth_boxes], + [[0.5, 0.5, 0.6, 0.6]]) if __name__ == '__main__': diff --git a/research/object_detection/legacy/trainer_test.py b/research/object_detection/legacy/trainer_tf1_test.py similarity index 98% rename from research/object_detection/legacy/trainer_test.py rename to research/object_detection/legacy/trainer_tf1_test.py index 3a5d07304..5b3f01c01 100644 --- a/research/object_detection/legacy/trainer_test.py +++ b/research/object_detection/legacy/trainer_tf1_test.py @@ -14,7 +14,7 @@ # ============================================================================== """Tests for object_detection.trainer.""" - +import unittest import tensorflow.compat.v1 as tf import tf_slim as slim from google.protobuf import text_format @@ -24,6 +24,7 @@ from object_detection.core import model from object_detection.core import standard_fields as fields from object_detection.legacy import trainer from object_detection.protos import train_pb2 +from object_detection.utils import tf_version NUMBER_OF_CLASSES = 2 @@ -197,6 +198,7 @@ class FakeDetectionModel(model.DetectionModel): pass +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class TrainerTest(tf.test.TestCase): def test_configure_trainer_and_train_two_steps(self): diff --git a/research/object_detection/matchers/bipartite_matcher_test.py b/research/object_detection/matchers/bipartite_matcher_tf1_test.py similarity index 94% rename from research/object_detection/matchers/bipartite_matcher_test.py rename to research/object_detection/matchers/bipartite_matcher_tf1_test.py index 1617cbbc3..314546ad4 100644 --- a/research/object_detection/matchers/bipartite_matcher_test.py +++ b/research/object_detection/matchers/bipartite_matcher_tf1_test.py @@ -14,14 +14,18 @@ # ============================================================================== """Tests for object_detection.core.bipartite_matcher.""" - +import unittest import numpy as np import tensorflow.compat.v1 as tf -from object_detection.matchers import bipartite_matcher from object_detection.utils import test_case +from object_detection.utils import tf_version + +if tf_version.is_tf1(): + from object_detection.matchers import bipartite_matcher # pylint: disable=g-import-not-at-top +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class GreedyBipartiteMatcherTest(test_case.TestCase): def test_get_expected_matches_when_all_rows_are_valid(self): diff --git a/research/object_detection/meta_architectures/center_net_meta_arch.py b/research/object_detection/meta_architectures/center_net_meta_arch.py new file mode 100644 index 000000000..8ae98bb1f --- /dev/null +++ b/research/object_detection/meta_architectures/center_net_meta_arch.py @@ -0,0 +1,2348 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""The CenterNet meta architecture as described in the "Objects as Points" paper [1]. + +[1]: https://arxiv.org/abs/1904.07850 + +""" + +import abc +import collections +import functools +import numpy as np +import tensorflow.compat.v1 as tf +import tensorflow.compat.v2 as tf2 + +from object_detection.core import box_list +from object_detection.core import box_list_ops +from object_detection.core import keypoint_ops +from object_detection.core import model +from object_detection.core import standard_fields as fields +from object_detection.core import target_assigner as cn_assigner +from object_detection.utils import shape_utils + +# Number of channels needed to predict size and offsets. +NUM_OFFSET_CHANNELS = 2 +NUM_SIZE_CHANNELS = 2 + +# Error range for detecting peaks. +PEAK_EPSILON = 1e-6 + +# Constants shared between all keypoint tasks. +UNMATCHED_KEYPOINT_SCORE = 0.1 +KEYPOINT_CANDIDATE_SEARCH_SCALE = 0.3 + + +class CenterNetFeatureExtractor(tf.keras.Model): + """Base class for feature extractors for the CenterNet meta architecture. + + Child classes are expected to override the _output_model property which will + return 1 or more tensors predicted by the feature extractor. + + """ + __metaclass__ = abc.ABCMeta + + def __init__(self, name=None, channel_means=(0., 0., 0.), + channel_stds=(1., 1., 1.), bgr_ordering=False): + """Initializes a CenterNet feature extractor. + + Args: + name: str, the name used for the underlying keras model. + channel_means: A tuple of floats, denoting the mean of each channel + which will be subtracted from it. If None or empty, we use 0s. + channel_stds: A tuple of floats, denoting the standard deviation of each + channel. Each channel will be divided by its standard deviation value. + If None or empty, we use 1s. + bgr_ordering: bool, if set will change the channel ordering to be in the + [blue, red, green] order. + """ + super(CenterNetFeatureExtractor, self).__init__(name=name) + + if channel_means is None or len(channel_means) == 0: # pylint:disable=g-explicit-length-test + channel_means = [0., 0., 0.] + + if channel_stds is None or len(channel_stds) == 0: # pylint:disable=g-explicit-length-test + channel_stds = [1., 1., 1.] + + self._channel_means = channel_means + self._channel_stds = channel_stds + self._bgr_ordering = bgr_ordering + + def preprocess(self, inputs): + """Converts a batch of unscaled images to a scale suitable for the model. + + This method normalizes the image using the given `channel_means` and + `channels_stds` values at initialization time while optionally flipping + the channel order if `bgr_ordering` is set. + + Args: + inputs: a [batch, height, width, channels] float32 tensor + + Returns: + outputs: a [batch, height, width, channels] float32 tensor + + """ + + if self._bgr_ordering: + red, green, blue = tf.unstack(inputs, axis=3) + inputs = tf.stack([blue, green, red], axis=3) + + channel_means = tf.reshape(tf.constant(self._channel_means), + [1, 1, 1, -1]) + channel_stds = tf.reshape(tf.constant(self._channel_stds), + [1, 1, 1, -1]) + + return (inputs - channel_means)/channel_stds + + @property + @abc.abstractmethod + def out_stride(self): + """The stride in the output image of the network.""" + pass + + @property + @abc.abstractmethod + def num_feature_outputs(self): + """Ther number of feature outputs returned by the feature extractor.""" + pass + + +def make_prediction_net(num_out_channels, kernel_size=3, num_filters=256, + bias_fill=None): + """Creates a network to predict the given number of output channels. + + This function is intended to make the prediction heads for the CenterNet + meta architecture. + + Args: + num_out_channels: Number of output channels. + kernel_size: The size of the conv kernel in the intermediate layer + num_filters: The number of filters in the intermediate conv layer. + bias_fill: If not None, is used to initialize the bias in the final conv + layer. + + Returns: + net: A keras module which when called on an input tensor of size + [batch_size, height, width, num_in_channels] returns an output + of size [batch_size, height, width, num_out_channels] + """ + + out_conv = tf.keras.layers.Conv2D(num_out_channels, kernel_size=1) + + if bias_fill is not None: + out_conv.bias_initializer = tf.keras.initializers.constant(bias_fill) + + net = tf.keras.Sequential( + [tf.keras.layers.Conv2D(num_filters, kernel_size=kernel_size, + padding='same'), + tf.keras.layers.ReLU(), + out_conv] + ) + + return net + + +def _to_float32(x): + return tf.cast(x, tf.float32) + + +def _get_shape(tensor, num_dims): + tf.Assert(tensor.get_shape().ndims == num_dims, [tensor]) + return shape_utils.combined_static_and_dynamic_shape(tensor) + + +def _flatten_spatial_dimensions(batch_images): + batch_size, height, width, channels = _get_shape(batch_images, 4) + return tf.reshape(batch_images, [batch_size, height * width, + channels]) + + +def top_k_feature_map_locations(feature_map, max_pool_kernel_size=3, k=100, + per_channel=False): + """Returns the top k scores and their locations in a feature map. + + Given a feature map, the top k values (based on activation) are returned. If + `per_channel` is True, the top k values **per channel** are returned. + + The `max_pool_kernel_size` argument allows for selecting local peaks in a + region. This filtering is done per channel, so nothing prevents two values at + the same location to be returned. + + Args: + feature_map: [batch, height, width, channels] float32 feature map. + max_pool_kernel_size: integer, the max pool kernel size to use to pull off + peak score locations in a neighborhood (independently for each channel). + For example, to make sure no two neighboring values (in the same channel) + are returned, set max_pool_kernel_size=3. If None or 1, will not apply max + pooling. + k: The number of highest scoring locations to return. + per_channel: If True, will return the top k scores and locations per + feature map channel. If False, the top k across the entire feature map + (height x width x channels) are returned. + + Returns: + Tuple of + scores: A [batch, N] float32 tensor with scores from the feature map in + descending order. If per_channel is False, N = k. Otherwise, + N = k * channels, and the first k elements correspond to channel 0, the + second k correspond to channel 1, etc. + y_indices: A [batch, N] int tensor with y indices of the top k feature map + locations. If per_channel is False, N = k. Otherwise, + N = k * channels. + x_indices: A [batch, N] int tensor with x indices of the top k feature map + locations. If per_channel is False, N = k. Otherwise, + N = k * channels. + channel_indices: A [batch, N] int tensor with channel indices of the top k + feature map locations. If per_channel is False, N = k. Otherwise, + N = k * channels. + """ + if not max_pool_kernel_size or max_pool_kernel_size == 1: + feature_map_peaks = feature_map + else: + feature_map_max_pool = tf.nn.max_pool( + feature_map, ksize=max_pool_kernel_size, strides=1, padding='SAME') + + feature_map_peak_mask = tf.math.abs( + feature_map - feature_map_max_pool) < PEAK_EPSILON + + # Zero out everything that is not a peak. + feature_map_peaks = ( + feature_map * _to_float32(feature_map_peak_mask)) + + batch_size, _, width, num_channels = _get_shape(feature_map, 4) + + if per_channel: + # Perform top k over batch and channels. + feature_map_peaks_transposed = tf.transpose(feature_map_peaks, + perm=[0, 3, 1, 2]) + feature_map_peaks_transposed = tf.reshape( + feature_map_peaks_transposed, [batch_size, num_channels, -1]) + scores, peak_flat_indices = tf.math.top_k(feature_map_peaks_transposed, k=k) + # Convert the indices such that they represent the location in the full + # (flattened) feature map of size [batch, height * width * channels]. + channel_idx = tf.range(num_channels)[tf.newaxis, :, tf.newaxis] + peak_flat_indices = num_channels * peak_flat_indices + channel_idx + scores = tf.reshape(scores, [batch_size, -1]) + peak_flat_indices = tf.reshape(peak_flat_indices, [batch_size, -1]) + else: + feature_map_peaks_flat = tf.reshape(feature_map_peaks, [batch_size, -1]) + scores, peak_flat_indices = tf.math.top_k(feature_map_peaks_flat, k=k) + + # Get x, y and channel indices corresponding to the top indices in the flat + # array. + y_indices, x_indices, channel_indices = ( + row_col_channel_indices_from_flattened_indices( + peak_flat_indices, width, num_channels)) + return scores, y_indices, x_indices, channel_indices + + +def prediction_tensors_to_boxes(detection_scores, y_indices, x_indices, + channel_indices, height_width_predictions, + offset_predictions): + """Converts CenterNet class-center, offset and size predictions to boxes. + + Args: + detection_scores: A [batch, num_boxes] float32 tensor with detection + scores in range [0, 1]. + y_indices: A [batch, num_boxes] int32 tensor with y indices corresponding to + object center locations (expressed in output coordinate frame). + x_indices: A [batch, num_boxes] int32 tensor with x indices corresponding to + object center locations (expressed in output coordinate frame). + channel_indices: A [batch, num_boxes] int32 tensor with channel indices + corresponding to object classes. + height_width_predictions: A float tensor of shape [batch_size, height, + width, 2] representing the height and width of a box centered at each + pixel. + offset_predictions: A float tensor of shape [batch_size, height, width, 2] + representing the y and x offsets of a box centered at each pixel. This + helps reduce the error from downsampling. + + Returns: + detection_boxes: A tensor of shape [batch_size, num_boxes, 4] holding the + the raw bounding box coordinates of boxes. + detection_classes: An integer tensor of shape [batch_size, num_boxes] + indicating the predicted class for each box. + detection_scores: A float tensor of shape [batch_size, num_boxes] indicating + the score for each box. + num_detections: An integer tensor of shape [batch_size,] indicating the + number of boxes detected for each sample in the batch. + + """ + _, _, width, _ = _get_shape(height_width_predictions, 4) + + peak_spatial_indices = flattened_indices_from_row_col_indices( + y_indices, x_indices, width) + y_indices = _to_float32(y_indices) + x_indices = _to_float32(x_indices) + + height_width_flat = _flatten_spatial_dimensions(height_width_predictions) + offsets_flat = _flatten_spatial_dimensions(offset_predictions) + + height_width = tf.gather(height_width_flat, peak_spatial_indices, + batch_dims=1) + offsets = tf.gather(offsets_flat, peak_spatial_indices, batch_dims=1) + + heights, widths = tf.unstack(height_width, axis=2) + y_offsets, x_offsets = tf.unstack(offsets, axis=2) + + detection_classes = channel_indices + + num_detections = tf.reduce_sum(tf.to_int32(detection_scores > 0), axis=1) + + boxes = tf.stack([y_indices + y_offsets - heights / 2.0, + x_indices + x_offsets - widths / 2.0, + y_indices + y_offsets + heights / 2.0, + x_indices + x_offsets + widths / 2.0], axis=2) + + return boxes, detection_classes, detection_scores, num_detections + + +def prediction_tensors_to_keypoint_candidates( + keypoint_heatmap_predictions, + keypoint_heatmap_offsets, + keypoint_score_threshold=0.1, + max_pool_kernel_size=1, + max_candidates=20): + """Convert keypoint heatmap predictions and offsets to keypoint candidates. + + Args: + keypoint_heatmap_predictions: A float tensor of shape [batch_size, height, + width, num_keypoints] representing the per-keypoint heatmaps. + keypoint_heatmap_offsets: A float tensor of shape [batch_size, height, + width, 2] (or [batch_size, height, width, 2 * num_keypoints] if + 'per_keypoint_offset' is set True) representing the per-keypoint offsets. + keypoint_score_threshold: float, the threshold for considering a keypoint + a candidate. + max_pool_kernel_size: integer, the max pool kernel size to use to pull off + peak score locations in a neighborhood. For example, to make sure no two + neighboring values for the same keypoint are returned, set + max_pool_kernel_size=3. If None or 1, will not apply any local filtering. + max_candidates: integer, maximum number of keypoint candidates per + keypoint type. + + Returns: + keypoint_candidates: A tensor of shape + [batch_size, max_candidates, num_keypoints, 2] holding the + location of keypoint candidates in [y, x] format (expressed in absolute + coordinates in the output coordinate frame). + keypoint_scores: A float tensor of shape + [batch_size, max_candidates, num_keypoints] with the scores for each + keypoint candidate. The scores come directly from the heatmap predictions. + num_keypoint_candidates: An integer tensor of shape + [batch_size, num_keypoints] with the number of candidates for each + keypoint type, as it's possible to filter some candidates due to the score + threshold. + """ + batch_size, _, width, num_keypoints = _get_shape( + keypoint_heatmap_predictions, 4) + # Get x, y and channel indices corresponding to the top indices in the + # keypoint heatmap predictions. + # Note that the top k candidates are produced for **each keypoint type**. + # Might be worth eventually trying top k in the feature map, independent of + # the keypoint type. + keypoint_scores, y_indices, x_indices, channel_indices = ( + top_k_feature_map_locations(keypoint_heatmap_predictions, + max_pool_kernel_size=max_pool_kernel_size, + k=max_candidates, + per_channel=True)) + + peak_spatial_indices = flattened_indices_from_row_col_indices( + y_indices, x_indices, width) + y_indices = _to_float32(y_indices) + x_indices = _to_float32(x_indices) + + offsets_flat = _flatten_spatial_dimensions(keypoint_heatmap_offsets) + + selected_offsets = tf.gather(offsets_flat, peak_spatial_indices, batch_dims=1) + _, num_indices, num_channels = _get_shape(selected_offsets, 3) + if num_channels > 2: + reshaped_offsets = tf.reshape(selected_offsets, + [batch_size, num_indices, -1, 2]) + offsets = tf.gather(reshaped_offsets, channel_indices, batch_dims=2) + else: + offsets = selected_offsets + y_offsets, x_offsets = tf.unstack(offsets, axis=2) + + keypoint_candidates = tf.stack([y_indices + y_offsets, + x_indices + x_offsets], axis=2) + keypoint_candidates = tf.reshape( + keypoint_candidates, + [batch_size, num_keypoints, max_candidates, 2]) + keypoint_candidates = tf.transpose(keypoint_candidates, [0, 2, 1, 3]) + keypoint_scores = tf.reshape( + keypoint_scores, + [batch_size, num_keypoints, max_candidates]) + keypoint_scores = tf.transpose(keypoint_scores, [0, 2, 1]) + num_candidates = tf.reduce_sum( + tf.to_int32(keypoint_scores >= keypoint_score_threshold), axis=1) + + return keypoint_candidates, keypoint_scores, num_candidates + + +def regressed_keypoints_at_object_centers(regressed_keypoint_predictions, + y_indices, x_indices): + """Returns the regressed keypoints at specified object centers. + + The original keypoint predictions are regressed relative to each feature map + location. The returned keypoints are expressed in absolute coordinates in the + output frame (i.e. the center offsets are added to each individual regressed + set of keypoints). + + Args: + regressed_keypoint_predictions: A float tensor of shape + [batch_size, height, width, 2 * num_keypoints] holding regressed + keypoints. The last dimension has keypoint coordinates ordered as follows: + [y0, x0, y1, x1, ..., y{J-1}, x{J-1}] where J is the number of keypoints. + y_indices: A [batch, num_instances] int tensor holding y indices for object + centers. These indices correspond to locations in the output feature map. + x_indices: A [batch, num_instances] int tensor holding x indices for object + centers. These indices correspond to locations in the output feature map. + + Returns: + A float tensor of shape [batch_size, num_objects, 2 * num_keypoints] where + regressed keypoints are gathered at the provided locations, and converted + to absolute coordinates in the output coordinate frame. + """ + batch_size, _, width, _ = _get_shape(regressed_keypoint_predictions, 4) + flattened_indices = flattened_indices_from_row_col_indices( + y_indices, x_indices, width) + _, num_instances = _get_shape(flattened_indices, 2) + + regressed_keypoints_flat = _flatten_spatial_dimensions( + regressed_keypoint_predictions) + + relative_regressed_keypoints = tf.gather( + regressed_keypoints_flat, flattened_indices, batch_dims=1) + relative_regressed_keypoints = tf.reshape( + relative_regressed_keypoints, + [batch_size, num_instances, -1, 2]) + relative_regressed_keypoints_y, relative_regressed_keypoints_x = tf.unstack( + relative_regressed_keypoints, axis=3) + y_indices = _to_float32(tf.expand_dims(y_indices, axis=-1)) + x_indices = _to_float32(tf.expand_dims(x_indices, axis=-1)) + absolute_regressed_keypoints = tf.stack( + [y_indices + relative_regressed_keypoints_y, + x_indices + relative_regressed_keypoints_x], + axis=3) + return tf.reshape(absolute_regressed_keypoints, + [batch_size, num_instances, -1]) + + +def refine_keypoints(regressed_keypoints, keypoint_candidates, keypoint_scores, + num_keypoint_candidates, bboxes=None, + unmatched_keypoint_score=0.1, box_scale=1.2, + candidate_search_scale=0.3, + candidate_ranking_mode='min_distance'): + """Refines regressed keypoints by snapping to the nearest candidate keypoints. + + The initial regressed keypoints represent a full set of keypoints regressed + from the centers of the objects. The keypoint candidates are estimated + independently from heatmaps, and are not associated with any object instances. + This function refines the regressed keypoints by "snapping" to the + nearest/highest score/highest score-distance ratio (depending on the + candidate_ranking_mode) candidate of the same keypoint type (e.g. "nose"). + If no candidates are nearby, the regressed keypoint remains unchanged. + + In order to snap a regressed keypoint to a candidate keypoint, the following + must be satisfied: + - the candidate keypoint must be of the same type as the regressed keypoint + - the candidate keypoint must not lie outside the predicted boxes (or the + boxes which encloses the regressed keypoints for the instance if `bboxes` is + not provided). Note that the box is scaled by + `regressed_box_scale` in height and width, to provide some margin around the + keypoints + - the distance to the closest candidate keypoint cannot exceed + candidate_search_scale * max(height, width), where height and width refer to + the bounding box for the instance. + + Note that the same candidate keypoint is allowed to snap to regressed + keypoints in difference instances. + + Args: + regressed_keypoints: A float tensor of shape + [batch_size, num_instances, num_keypoints, 2] with the initial regressed + keypoints. + keypoint_candidates: A tensor of shape + [batch_size, max_candidates, num_keypoints, 2] holding the location of + keypoint candidates in [y, x] format (expressed in absolute coordinates in + the output coordinate frame). + keypoint_scores: A float tensor of shape + [batch_size, max_candidates, num_keypoints] indicating the scores for + keypoint candidates. + num_keypoint_candidates: An integer tensor of shape + [batch_size, num_keypoints] indicating the number of valid candidates for + each keypoint type, as there may be padding (dim 1) of + `keypoint_candidates` and `keypoint_scores`. + bboxes: A tensor of shape [batch_size, num_instances, 4] with predicted + bounding boxes for each instance, expressed in the output coordinate + frame. If not provided, boxes will be computed from regressed keypoints. + unmatched_keypoint_score: float, the default score to use for regressed + keypoints that are not successfully snapped to a nearby candidate. + box_scale: float, the multiplier to expand the bounding boxes (either the + provided boxes or those which tightly cover the regressed keypoints) for + an instance. This scale is typically larger than 1.0 when not providing + `bboxes`. + candidate_search_scale: float, the scale parameter that multiplies the + largest dimension of a bounding box. The resulting distance becomes a + search radius for candidates in the vicinity of each regressed keypoint. + candidate_ranking_mode: A string as one of ['min_distance', + 'score_distance_ratio'] indicating how to select the candidate. If invalid + value is provided, an ValueError will be raised. + + Returns: + A tuple with: + refined_keypoints: A float tensor of shape + [batch_size, num_instances, num_keypoints, 2] with the final, refined + keypoints. + refined_scores: A float tensor of shape + [batch_size, num_instances, num_keypoints] with scores associated with all + instances and keypoints in `refined_keypoints`. + + Raises: + ValueError: if provided candidate_ranking_mode is not one of + ['min_distance', 'score_distance_ratio'] + """ + batch_size, num_instances, num_keypoints, _ = ( + shape_utils.combined_static_and_dynamic_shape(regressed_keypoints)) + max_candidates = keypoint_candidates.shape[1] + + # Replace all invalid (i.e. padded) keypoint candidates with NaN. + # This will prevent them from being considered. + range_tiled = tf.tile( + tf.reshape(tf.range(max_candidates), [1, max_candidates, 1]), + [batch_size, 1, num_keypoints]) + num_candidates_tiled = tf.tile(tf.expand_dims(num_keypoint_candidates, 1), + [1, max_candidates, 1]) + invalid_candidates = range_tiled >= num_candidates_tiled + nan_mask = tf.where( + invalid_candidates, + np.nan * tf.ones_like(invalid_candidates, dtype=tf.float32), + tf.ones_like(invalid_candidates, dtype=tf.float32)) + keypoint_candidates_with_nans = tf.math.multiply( + keypoint_candidates, tf.expand_dims(nan_mask, -1)) + + # Pairwise squared distances between regressed keypoints and candidate + # keypoints (for a single keypoint type). + # Shape [batch_size, num_instances, max_candidates, num_keypoints]. + regressed_keypoint_expanded = tf.expand_dims(regressed_keypoints, + axis=2) + keypoint_candidates_expanded = tf.expand_dims( + keypoint_candidates_with_nans, axis=1) + sqrd_distances = tf.math.reduce_sum( + tf.math.squared_difference(regressed_keypoint_expanded, + keypoint_candidates_expanded), + axis=-1) + distances = tf.math.sqrt(sqrd_distances) + + # Determine the candidates that have the minimum distance to the regressed + # keypoints. Shape [batch_size, num_instances, num_keypoints]. + min_distances = tf.math.reduce_min(distances, axis=2) + if candidate_ranking_mode == 'min_distance': + nearby_candidate_inds = tf.math.argmin(distances, axis=2) + elif candidate_ranking_mode == 'score_distance_ratio': + # tiled_keypoint_scores: + # Shape [batch_size, num_instances, max_candidates, num_keypoints]. + tiled_keypoint_scores = tf.tile( + tf.expand_dims(keypoint_scores, axis=1), + multiples=[1, num_instances, 1, 1]) + ranking_scores = tiled_keypoint_scores / (distances + 1e-6) + nearby_candidate_inds = tf.math.argmax(ranking_scores, axis=2) + else: + raise ValueError('Not recognized candidate_ranking_mode: %s' % + candidate_ranking_mode) + + # Gather the coordinates and scores corresponding to the closest candidates. + # Shape of tensors are [batch_size, num_instances, num_keypoints, 2] and + # [batch_size, num_instances, num_keypoints], respectively. + nearby_candidate_coords, nearby_candidate_scores = ( + _gather_candidates_at_indices(keypoint_candidates, keypoint_scores, + nearby_candidate_inds)) + + if bboxes is None: + # Create bboxes from regressed keypoints. + # Shape [batch_size * num_instances, 4]. + regressed_keypoints_flattened = tf.reshape( + regressed_keypoints, [-1, num_keypoints, 2]) + bboxes_flattened = keypoint_ops.keypoints_to_enclosing_bounding_boxes( + regressed_keypoints_flattened) + else: + bboxes_flattened = tf.reshape(bboxes, [-1, 4]) + + # Scale the bounding boxes. + # Shape [batch_size, num_instances, 4]. + boxlist = box_list.BoxList(bboxes_flattened) + boxlist_scaled = box_list_ops.scale_height_width( + boxlist, box_scale, box_scale) + bboxes_scaled = boxlist_scaled.get() + bboxes = tf.reshape(bboxes_scaled, [batch_size, num_instances, 4]) + + # Get ymin, xmin, ymax, xmax bounding box coordinates, tiled per keypoint. + # Shape [batch_size, num_instances, num_keypoints]. + bboxes_tiled = tf.tile(tf.expand_dims(bboxes, 2), [1, 1, num_keypoints, 1]) + ymin, xmin, ymax, xmax = tf.unstack(bboxes_tiled, axis=3) + + # Produce a mask that indicates whether the original regressed keypoint + # should be used instead of a candidate keypoint. + # Shape [batch_size, num_instances, num_keypoints]. + search_radius = ( + tf.math.maximum(ymax - ymin, xmax - xmin) * candidate_search_scale) + mask = (tf.cast(nearby_candidate_coords[:, :, :, 0] < ymin, tf.int32) + + tf.cast(nearby_candidate_coords[:, :, :, 0] > ymax, tf.int32) + + tf.cast(nearby_candidate_coords[:, :, :, 1] < xmin, tf.int32) + + tf.cast(nearby_candidate_coords[:, :, :, 1] > xmax, tf.int32) + + # Filter out the chosen candidate with score lower than unmatched + # keypoint score. + tf.cast(nearby_candidate_scores < + unmatched_keypoint_score, tf.int32) + + tf.cast(min_distances > search_radius, tf.int32)) + mask = mask > 0 + + # Create refined keypoints where candidate keypoints replace original + # regressed keypoints if they are in the vicinity of the regressed keypoints. + # Shape [batch_size, num_instances, num_keypoints, 2]. + refined_keypoints = tf.where( + tf.tile(tf.expand_dims(mask, -1), [1, 1, 1, 2]), + regressed_keypoints, + nearby_candidate_coords) + + # Update keypoints scores. In the case where we use the original regressed + # keypoints, we use a default score of `unmatched_keypoint_score`. + # Shape [batch_size, num_instances, num_keypoints]. + refined_scores = tf.where( + mask, + unmatched_keypoint_score * tf.ones_like(nearby_candidate_scores), + nearby_candidate_scores) + + return refined_keypoints, refined_scores + + +def _pad_to_full_keypoint_dim(keypoint_coords, keypoint_scores, keypoint_inds, + num_total_keypoints): + """Scatter keypoint elements into tensors with full keypoints dimension. + + Args: + keypoint_coords: a [batch_size, num_instances, num_keypoints, 2] float32 + tensor. + keypoint_scores: a [batch_size, num_instances, num_keypoints] float32 + tensor. + keypoint_inds: a list of integers that indicate the keypoint indices for + this specific keypoint class. These indices are used to scatter into + tensors that have a `num_total_keypoints` dimension. + num_total_keypoints: The total number of keypoints that this model predicts. + + Returns: + A tuple with + keypoint_coords_padded: a + [batch_size, num_instances, num_total_keypoints,2] float32 tensor. + keypoint_scores_padded: a [batch_size, num_instances, num_total_keypoints] + float32 tensor. + """ + batch_size, num_instances, _, _ = ( + shape_utils.combined_static_and_dynamic_shape(keypoint_coords)) + kpt_coords_transposed = tf.transpose(keypoint_coords, [2, 0, 1, 3]) + kpt_scores_transposed = tf.transpose(keypoint_scores, [2, 0, 1]) + kpt_inds_tensor = tf.expand_dims(keypoint_inds, axis=-1) + kpt_coords_scattered = tf.scatter_nd( + indices=kpt_inds_tensor, + updates=kpt_coords_transposed, + shape=[num_total_keypoints, batch_size, num_instances, 2]) + kpt_scores_scattered = tf.scatter_nd( + indices=kpt_inds_tensor, + updates=kpt_scores_transposed, + shape=[num_total_keypoints, batch_size, num_instances]) + keypoint_coords_padded = tf.transpose(kpt_coords_scattered, [1, 2, 0, 3]) + keypoint_scores_padded = tf.transpose(kpt_scores_scattered, [1, 2, 0]) + return keypoint_coords_padded, keypoint_scores_padded + + +def _pad_to_full_instance_dim(keypoint_coords, keypoint_scores, instance_inds, + max_instances): + """Scatter keypoint elements into tensors with full instance dimension. + + Args: + keypoint_coords: a [batch_size, num_instances, num_keypoints, 2] float32 + tensor. + keypoint_scores: a [batch_size, num_instances, num_keypoints] float32 + tensor. + instance_inds: a list of integers that indicate the instance indices for + these keypoints. These indices are used to scatter into tensors + that have a `max_instances` dimension. + max_instances: The maximum number of instances detected by the model. + + Returns: + A tuple with + keypoint_coords_padded: a [batch_size, max_instances, num_keypoints, 2] + float32 tensor. + keypoint_scores_padded: a [batch_size, max_instances, num_keypoints] + float32 tensor. + """ + batch_size, _, num_keypoints, _ = ( + shape_utils.combined_static_and_dynamic_shape(keypoint_coords)) + kpt_coords_transposed = tf.transpose(keypoint_coords, [1, 0, 2, 3]) + kpt_scores_transposed = tf.transpose(keypoint_scores, [1, 0, 2]) + instance_inds = tf.expand_dims(instance_inds, axis=-1) + kpt_coords_scattered = tf.scatter_nd( + indices=instance_inds, + updates=kpt_coords_transposed, + shape=[max_instances, batch_size, num_keypoints, 2]) + kpt_scores_scattered = tf.scatter_nd( + indices=instance_inds, + updates=kpt_scores_transposed, + shape=[max_instances, batch_size, num_keypoints]) + keypoint_coords_padded = tf.transpose(kpt_coords_scattered, [1, 0, 2, 3]) + keypoint_scores_padded = tf.transpose(kpt_scores_scattered, [1, 0, 2]) + return keypoint_coords_padded, keypoint_scores_padded + + +def _gather_candidates_at_indices(keypoint_candidates, keypoint_scores, + indices): + """Gathers keypoint candidate coordinates and scores at indices. + + Args: + keypoint_candidates: a float tensor of shape [batch_size, max_candidates, + num_keypoints, 2] with candidate coordinates. + keypoint_scores: a float tensor of shape [batch_size, max_candidates, + num_keypoints] with keypoint scores. + indices: an integer tensor of shape [batch_size, num_indices, num_keypoints] + with indices. + + Returns: + A tuple with + gathered_keypoint_candidates: a float tensor of shape [batch_size, + num_indices, num_keypoints, 2] with gathered coordinates. + gathered_keypoint_scores: a float tensor of shape [batch_size, + num_indices, num_keypoints, 2]. + """ + # Transpose tensors so that all batch dimensions are up front. + keypoint_candidates_transposed = tf.transpose(keypoint_candidates, + [0, 2, 1, 3]) + keypoint_scores_transposed = tf.transpose(keypoint_scores, [0, 2, 1]) + nearby_candidate_inds_transposed = tf.transpose(indices, + [0, 2, 1]) + nearby_candidate_coords_tranposed = tf.gather( + keypoint_candidates_transposed, nearby_candidate_inds_transposed, + batch_dims=2) + nearby_candidate_scores_transposed = tf.gather( + keypoint_scores_transposed, nearby_candidate_inds_transposed, + batch_dims=2) + gathered_keypoint_candidates = tf.transpose(nearby_candidate_coords_tranposed, + [0, 2, 1, 3]) + gathered_keypoint_scores = tf.transpose(nearby_candidate_scores_transposed, + [0, 2, 1]) + return gathered_keypoint_candidates, gathered_keypoint_scores + + +def flattened_indices_from_row_col_indices(row_indices, col_indices, num_cols): + """Get the index in a flattened array given row and column indices.""" + return (row_indices * num_cols) + col_indices + + +def row_col_channel_indices_from_flattened_indices(indices, num_cols, + num_channels): + """Computes row, column and channel indices from flattened indices. + + Args: + indices: An integer tensor of any shape holding the indices in the flattened + space. + num_cols: Number of columns in the image (width). + num_channels: Number of channels in the image. + + Returns: + row_indices: The row indices corresponding to each of the input indices. + Same shape as indices. + col_indices: The column indices corresponding to each of the input indices. + Same shape as indices. + channel_indices. The channel indices corresponding to each of the input + indices. + + """ + row_indices = (indices // num_channels) // num_cols + col_indices = (indices // num_channels) % num_cols + channel_indices = indices % num_channels + + return row_indices, col_indices, channel_indices + + +def get_valid_anchor_weights_in_flattened_image(true_image_shapes, height, + width): + """Computes valid anchor weights for an image assuming pixels will be flattened. + + This function is useful when we only want to penalize valid areas in the + image in the case when padding is used. The function assumes that the loss + function will be applied after flattening the spatial dimensions and returns + anchor weights accordingly. + + Args: + true_image_shapes: An integer tensor of shape [batch_size, 3] representing + the true image shape (without padding) for each sample in the batch. + height: height of the prediction from the network. + width: width of the prediction from the network. + + Returns: + valid_anchor_weights: a float tensor of shape [batch_size, height * width] + with 1s in locations where the spatial coordinates fall within the height + and width in true_image_shapes. + """ + + indices = tf.reshape(tf.range(height * width), [1, -1]) + batch_size = tf.shape(true_image_shapes)[0] + batch_indices = tf.ones((batch_size, 1), dtype=tf.int32) * indices + + y_coords, x_coords, _ = row_col_channel_indices_from_flattened_indices( + batch_indices, width, 1) + + max_y, max_x = true_image_shapes[:, 0], true_image_shapes[:, 1] + max_x = _to_float32(tf.expand_dims(max_x, 1)) + max_y = _to_float32(tf.expand_dims(max_y, 1)) + + x_coords = _to_float32(x_coords) + y_coords = _to_float32(y_coords) + + valid_mask = tf.math.logical_and(x_coords < max_x, y_coords < max_y) + + return _to_float32(valid_mask) + + +def convert_strided_predictions_to_normalized_boxes(boxes, stride, + true_image_shapes): + """Converts predictions in the output space to normalized boxes. + + Boxes falling outside the valid image boundary are clipped to be on the + boundary. + + Args: + boxes: A tensor of shape [batch_size, num_boxes, 4] holding the raw + coordinates of boxes in the model's output space. + stride: The stride in the output space. + true_image_shapes: A tensor of shape [batch_size, 3] representing the true + shape of the input not considering padding. + + Returns: + boxes: A tensor of shape [batch_size, num_boxes, 4] representing the + coordinates of the normalized boxes. + """ + + def _normalize_boxlist(args): + + boxes, height, width = args + boxes = box_list_ops.scale(boxes, stride, stride) + boxes = box_list_ops.to_normalized_coordinates(boxes, height, width) + boxes = box_list_ops.clip_to_window(boxes, [0., 0., 1., 1.], + filter_nonoverlapping=False) + return boxes + + box_lists = [box_list.BoxList(boxes) for boxes in tf.unstack(boxes, axis=0)] + true_heights, true_widths, _ = tf.unstack(true_image_shapes, axis=1) + + true_heights_list = tf.unstack(true_heights, axis=0) + true_widths_list = tf.unstack(true_widths, axis=0) + + box_lists = list(map(_normalize_boxlist, + zip(box_lists, true_heights_list, true_widths_list))) + boxes = tf.stack([box_list_instance.get() for + box_list_instance in box_lists], axis=0) + + return boxes + + +def convert_strided_predictions_to_normalized_keypoints( + keypoint_coords, keypoint_scores, stride, true_image_shapes, + clip_out_of_frame_keypoints=False): + """Converts predictions in the output space to normalized keypoints. + + If clip_out_of_frame_keypoints=False, keypoint coordinates falling outside + the valid image boundary are normalized but not clipped; If + clip_out_of_frame_keypoints=True, keypoint coordinates falling outside the + valid image boundary are clipped to the closest image boundary and the scores + will be set to 0.0. + + Args: + keypoint_coords: A tensor of shape + [batch_size, num_instances, num_keypoints, 2] holding the raw coordinates + of keypoints in the model's output space. + keypoint_scores: A tensor of shape + [batch_size, num_instances, num_keypoints] holding the keypoint scores. + stride: The stride in the output space. + true_image_shapes: A tensor of shape [batch_size, 3] representing the true + shape of the input not considering padding. + clip_out_of_frame_keypoints: A boolean indicating whether keypoints outside + the image boundary should be clipped. If True, keypoint coords will be + clipped to image boundary. If False, keypoints are normalized but not + filtered based on their location. + + Returns: + keypoint_coords_normalized: A tensor of shape + [batch_size, num_instances, num_keypoints, 2] representing the coordinates + of the normalized keypoints. + keypoint_scores: A tensor of shape + [batch_size, num_instances, num_keypoints] representing the updated + keypoint scores. + """ + # Flatten keypoints and scores. + batch_size, _, _, _ = ( + shape_utils.combined_static_and_dynamic_shape(keypoint_coords)) + + # Scale and normalize keypoints. + true_heights, true_widths, _ = tf.unstack(true_image_shapes, axis=1) + yscale = float(stride) / tf.cast(true_heights, tf.float32) + xscale = float(stride) / tf.cast(true_widths, tf.float32) + yx_scale = tf.stack([yscale, xscale], axis=1) + keypoint_coords_normalized = keypoint_coords * tf.reshape( + yx_scale, [batch_size, 1, 1, 2]) + + if clip_out_of_frame_keypoints: + # Determine the keypoints that are in the true image regions. + valid_indices = tf.logical_and( + tf.logical_and(keypoint_coords_normalized[:, :, :, 0] >= 0.0, + keypoint_coords_normalized[:, :, :, 0] <= 1.0), + tf.logical_and(keypoint_coords_normalized[:, :, :, 1] >= 0.0, + keypoint_coords_normalized[:, :, :, 1] <= 1.0)) + batch_window = tf.tile( + tf.constant([[0.0, 0.0, 1.0, 1.0]], dtype=tf.float32), + multiples=[batch_size, 1]) + def clip_to_window(inputs): + keypoints, window = inputs + return keypoint_ops.clip_to_window(keypoints, window) + keypoint_coords_normalized = tf.map_fn( + clip_to_window, (keypoint_coords_normalized, batch_window), + dtype=tf.float32, back_prop=False) + keypoint_scores = tf.where(valid_indices, keypoint_scores, + tf.zeros_like(keypoint_scores)) + return keypoint_coords_normalized, keypoint_scores + + +def convert_strided_predictions_to_instance_masks( + boxes, classes, masks, stride, mask_height, mask_width, + true_image_shapes, score_threshold=0.5): + """Converts predicted full-image masks into instance masks. + + For each predicted detection box: + * Crop and resize the predicted mask based on the detected bounding box + coordinates and class prediction. Uses bilinear resampling. + * Binarize the mask using the provided score threshold. + + Args: + boxes: A tensor of shape [batch, max_detections, 4] holding the predicted + boxes, in normalized coordinates (relative to the true image dimensions). + classes: An integer tensor of shape [batch, max_detections] containing the + detected class for each box (0-indexed). + masks: A [batch, output_height, output_width, num_classes] float32 + tensor with class probabilities. + stride: The stride in the output space. + mask_height: The desired resized height for instance masks. + mask_width: The desired resized width for instance masks. + true_image_shapes: A tensor of shape [batch, 3] representing the true + shape of the inputs not considering padding. + score_threshold: The threshold at which to convert predicted mask + into foreground pixels. + + Returns: + A [batch_size, max_detections, mask_height, mask_width] uint8 tensor with + predicted foreground mask for each instance. The masks take values in + {0, 1}. + """ + _, output_height, output_width, _ = ( + shape_utils.combined_static_and_dynamic_shape(masks)) + input_height = stride * output_height + input_width = stride * output_width + + # Boxes are in normalized coordinates relative to true image shapes. Convert + # coordinates to be normalized relative to input image shapes (since masks + # may still have padding). + # Then crop and resize each mask. + def crop_and_threshold_masks(args): + """Crops masks based on detection boxes.""" + boxes, classes, masks, true_height, true_width = args + boxlist = box_list.BoxList(boxes) + y_scale = true_height / input_height + x_scale = true_width / input_width + boxlist = box_list_ops.scale(boxlist, y_scale, x_scale) + boxes = boxlist.get() + # Convert masks from [input_height, input_width, num_classes] to + # [num_classes, input_height, input_width, 1]. + masks_4d = tf.transpose(masks, perm=[2, 0, 1])[:, :, :, tf.newaxis] + cropped_masks = tf2.image.crop_and_resize( + masks_4d, + boxes=boxes, + box_indices=classes, + crop_size=[mask_height, mask_width], + method='bilinear') + masks_3d = tf.squeeze(cropped_masks, axis=3) + masks_binarized = tf.math.greater_equal(masks_3d, score_threshold) + return tf.cast(masks_binarized, tf.uint8) + + true_heights, true_widths, _ = tf.unstack(true_image_shapes, axis=1) + masks_for_image = shape_utils.static_or_dynamic_map_fn( + crop_and_threshold_masks, + elems=[boxes, classes, masks, true_heights, true_widths], + dtype=tf.uint8, + back_prop=False) + masks = tf.stack(masks_for_image, axis=0) + return masks + + +class ObjectDetectionParams( + collections.namedtuple('ObjectDetectionParams', [ + 'localization_loss', 'scale_loss_weight', 'offset_loss_weight', + 'task_loss_weight' + ])): + """Namedtuple to host object detection related parameters. + + This is a wrapper class over the fields that are either the hyper-parameters + or the loss functions needed for the object detection task. The class is + immutable after constructed. Please see the __new__ function for detailed + information for each fields. + """ + + __slots__ = () + + def __new__(cls, + localization_loss, + scale_loss_weight, + offset_loss_weight, + task_loss_weight=1.0): + """Constructor with default values for ObjectDetectionParams. + + Args: + localization_loss: a object_detection.core.losses.Loss object to compute + the loss for the center offset and height/width predictions in + CenterNet. + scale_loss_weight: float, The weight for localizing box size. Note that + the scale loss is dependent on the input image size, since we penalize + the raw height and width. This constant may need to be adjusted + depending on the input size. + offset_loss_weight: float, The weight for localizing center offsets. + task_loss_weight: float, the weight of the object detection loss. + + Returns: + An initialized ObjectDetectionParams namedtuple. + """ + return super(ObjectDetectionParams, + cls).__new__(cls, localization_loss, scale_loss_weight, + offset_loss_weight, task_loss_weight) + + +class KeypointEstimationParams( + collections.namedtuple('KeypointEstimationParams', [ + 'task_name', 'class_id', 'keypoint_indices', 'classification_loss', + 'localization_loss', 'keypoint_labels', 'keypoint_std_dev', + 'keypoint_heatmap_loss_weight', 'keypoint_offset_loss_weight', + 'keypoint_regression_loss_weight', 'keypoint_candidate_score_threshold', + 'heatmap_bias_init', 'num_candidates_per_keypoint', 'task_loss_weight', + 'peak_max_pool_kernel_size', 'unmatched_keypoint_score', 'box_scale', + 'candidate_search_scale', 'candidate_ranking_mode', + 'offset_peak_radius', 'per_keypoint_offset' + ])): + """Namedtuple to host object detection related parameters. + + This is a wrapper class over the fields that are either the hyper-parameters + or the loss functions needed for the keypoint estimation task. The class is + immutable after constructed. Please see the __new__ function for detailed + information for each fields. + """ + + __slots__ = () + + def __new__(cls, + task_name, + class_id, + keypoint_indices, + classification_loss, + localization_loss, + keypoint_labels=None, + keypoint_std_dev=None, + keypoint_heatmap_loss_weight=1.0, + keypoint_offset_loss_weight=1.0, + keypoint_regression_loss_weight=1.0, + keypoint_candidate_score_threshold=0.1, + heatmap_bias_init=-2.19, + num_candidates_per_keypoint=100, + task_loss_weight=1.0, + peak_max_pool_kernel_size=3, + unmatched_keypoint_score=0.1, + box_scale=1.2, + candidate_search_scale=0.3, + candidate_ranking_mode='min_distance', + offset_peak_radius=0, + per_keypoint_offset=False): + """Constructor with default values for KeypointEstimationParams. + + Args: + task_name: string, the name of the task this namedtuple corresponds to. + Note that it should be an unique identifier of the task. + class_id: int, the ID of the class that contains the target keypoints to + considered in this task. For example, if the task is human pose + estimation, the class id should correspond to the "human" class. Note + that the ID is 0-based, meaning that class 0 corresponds to the first + non-background object class. + keypoint_indices: A list of integers representing the indicies of the + keypoints to be considered in this task. This is used to retrieve the + subset of the keypoints from gt_keypoints that should be considered in + this task. + classification_loss: an object_detection.core.losses.Loss object to + compute the loss for the class predictions in CenterNet. + localization_loss: an object_detection.core.losses.Loss object to compute + the loss for the center offset and height/width predictions in + CenterNet. + keypoint_labels: A list of strings representing the label text of each + keypoint, e.g. "nose", 'left_shoulder". Note that the length of this + list should be equal to keypoint_indices. + keypoint_std_dev: A list of float represent the standard deviation of the + Gaussian kernel used to generate the keypoint heatmap. It is to provide + the flexibility of using different sizes of Gaussian kernel for each + keypoint class. + keypoint_heatmap_loss_weight: float, The weight for the keypoint heatmap. + keypoint_offset_loss_weight: float, The weight for the keypoint offsets + loss. + keypoint_regression_loss_weight: float, The weight for keypoint regression + loss. Note that the loss is dependent on the input image size, since we + penalize the raw height and width. This constant may need to be adjusted + depending on the input size. + keypoint_candidate_score_threshold: float, The heatmap score threshold for + a keypoint to become a valid candidate. + heatmap_bias_init: float, the initial value of bias in the convolutional + kernel of the class prediction head. If set to None, the bias is + initialized with zeros. + num_candidates_per_keypoint: The maximum number of candidates to retrieve + for each keypoint. + task_loss_weight: float, the weight of the keypoint estimation loss. + peak_max_pool_kernel_size: Max pool kernel size to use to pull off peak + score locations in a neighborhood (independently for each keypoint + types). + unmatched_keypoint_score: The default score to use for regressed keypoints + that are not successfully snapped to a nearby candidate. + box_scale: The multiplier to expand the bounding boxes (either the + provided boxes or those which tightly cover the regressed keypoints). + candidate_search_scale: The scale parameter that multiplies the largest + dimension of a bounding box. The resulting distance becomes a search + radius for candidates in the vicinity of each regressed keypoint. + candidate_ranking_mode: One of ['min_distance', 'score_distance_ratio'] + indicating how to select the keypoint candidate. + offset_peak_radius: The radius (in the unit of output pixel) around + groundtruth heatmap peak to assign the offset targets. If set 0, then + the offset target will only be assigned to the heatmap peak (same + behavior as the original paper). + per_keypoint_offset: A bool indicates whether to assign offsets for each + keypoint channel separately. If set False, the output offset target has + the shape [batch_size, out_height, out_width, 2] (same behavior as the + original paper). If set True, the output offset target has the shape + [batch_size, out_height, out_width, 2 * num_keypoints] (recommended when + the offset_peak_radius is not zero). + + Returns: + An initialized KeypointEstimationParams namedtuple. + """ + return super(KeypointEstimationParams, cls).__new__( + cls, task_name, class_id, keypoint_indices, classification_loss, + localization_loss, keypoint_labels, keypoint_std_dev, + keypoint_heatmap_loss_weight, keypoint_offset_loss_weight, + keypoint_regression_loss_weight, keypoint_candidate_score_threshold, + heatmap_bias_init, num_candidates_per_keypoint, task_loss_weight, + peak_max_pool_kernel_size, unmatched_keypoint_score, box_scale, + candidate_search_scale, candidate_ranking_mode, offset_peak_radius, + per_keypoint_offset) + + +class ObjectCenterParams( + collections.namedtuple('ObjectCenterParams', [ + 'classification_loss', 'object_center_loss_weight', 'heatmap_bias_init', + 'min_box_overlap_iou', 'max_box_predictions', 'use_only_known_classes' + ])): + """Namedtuple to store object center prediction related parameters.""" + + __slots__ = () + + def __new__(cls, + classification_loss, + object_center_loss_weight, + heatmap_bias_init=-2.19, + min_box_overlap_iou=0.7, + max_box_predictions=100, + use_labeled_classes=False): + """Constructor with default values for ObjectCenterParams. + + Args: + classification_loss: an object_detection.core.losses.Loss object to + compute the loss for the class predictions in CenterNet. + object_center_loss_weight: float, The weight for the object center loss. + heatmap_bias_init: float, the initial value of bias in the convolutional + kernel of the object center prediction head. If set to None, the bias is + initialized with zeros. + min_box_overlap_iou: float, the minimum IOU overlap that predicted boxes + need have with groundtruth boxes to not be penalized. This is used for + computing the class specific center heatmaps. + max_box_predictions: int, the maximum number of boxes to predict. + use_labeled_classes: boolean, compute the loss only labeled classes. + + Returns: + An initialized ObjectCenterParams namedtuple. + """ + return super(ObjectCenterParams, + cls).__new__(cls, classification_loss, + object_center_loss_weight, heatmap_bias_init, + min_box_overlap_iou, max_box_predictions, + use_labeled_classes) + + +class MaskParams( + collections.namedtuple('MaskParams', [ + 'classification_loss', 'task_loss_weight', 'mask_height', 'mask_width', + 'score_threshold', 'heatmap_bias_init' + ])): + """Namedtuple to store mask prediction related parameters.""" + + __slots__ = () + + def __new__(cls, + classification_loss, + task_loss_weight=1.0, + mask_height=256, + mask_width=256, + score_threshold=0.5, + heatmap_bias_init=-2.19): + """Constructor with default values for MaskParams. + + Args: + classification_loss: an object_detection.core.losses.Loss object to + compute the loss for the semantic segmentation predictions in CenterNet. + task_loss_weight: float, The loss weight for the segmentation task. + mask_height: The height of the resized instance segmentation mask. + mask_width: The width of the resized instance segmentation mask. + score_threshold: The threshold at which to convert predicted mask + probabilities (after passing through sigmoid) into foreground pixels. + heatmap_bias_init: float, the initial value of bias in the convolutional + kernel of the semantic segmentation prediction head. If set to None, the + bias is initialized with zeros. + + Returns: + An initialized MaskParams namedtuple. + """ + return super(MaskParams, + cls).__new__(cls, classification_loss, + task_loss_weight, mask_height, mask_width, + score_threshold, heatmap_bias_init) + + +# The following constants are used to generate the keys of the +# (prediction, loss, target assigner,...) dictionaries used in CenterNetMetaArch +# class. +DETECTION_TASK = 'detection_task' +OBJECT_CENTER = 'object_center' +BOX_SCALE = 'box/scale' +BOX_OFFSET = 'box/offset' +KEYPOINT_REGRESSION = 'keypoint/regression' +KEYPOINT_HEATMAP = 'keypoint/heatmap' +KEYPOINT_OFFSET = 'keypoint/offset' +SEGMENTATION_TASK = 'segmentation_task' +SEGMENTATION_HEATMAP = 'segmentation/heatmap' +LOSS_KEY_PREFIX = 'Loss' + + +def get_keypoint_name(task_name, head_name): + return '%s/%s' % (task_name, head_name) + + +def get_num_instances_from_weights(groundtruth_weights_list): + """Computes the number of instances/boxes from the weights in a batch. + + Args: + groundtruth_weights_list: A list of float tensors with shape + [max_num_instances] representing whether there is an actual instance in + the image (with non-zero value) or is padded to match the + max_num_instances (with value 0.0). The list represents the batch + dimension. + + Returns: + A scalar integer tensor incidating how many instances/boxes are in the + images in the batch. Note that this function is usually used to normalize + the loss so the minimum return value is 1 to avoid weird behavior. + """ + num_instances = tf.reduce_sum( + [tf.math.count_nonzero(w) for w in groundtruth_weights_list]) + num_instances = tf.maximum(num_instances, 1) + return num_instances + + +class CenterNetMetaArch(model.DetectionModel): + """The CenterNet meta architecture [1]. + + [1]: https://arxiv.org/abs/1904.07850 + """ + + def __init__(self, + is_training, + add_summaries, + num_classes, + feature_extractor, + image_resizer_fn, + object_center_params, + object_detection_params=None, + keypoint_params_dict=None, + mask_params=None): + """Initializes a CenterNet model. + + Args: + is_training: Set to True if this model is being built for training. + add_summaries: Whether to add tf summaries in the model. + num_classes: int, The number of classes that the model should predict. + feature_extractor: A CenterNetFeatureExtractor to use to extract features + from an image. + image_resizer_fn: a callable for image resizing. This callable always + takes a rank-3 image tensor (corresponding to a single image) and + returns a rank-3 image tensor, possibly with new spatial dimensions and + a 1-D tensor of shape [3] indicating shape of true image within the + resized image tensor as the resized image tensor could be padded. See + builders/image_resizer_builder.py. + object_center_params: An ObjectCenterParams namedtuple. This object holds + the hyper-parameters for object center prediction. This is required by + either object detection or keypoint estimation tasks. + object_detection_params: An ObjectDetectionParams namedtuple. This object + holds the hyper-parameters necessary for object detection. Please see + the class definition for more details. + keypoint_params_dict: A dictionary that maps from task name to the + corresponding KeypointEstimationParams namedtuple. This object holds the + hyper-parameters necessary for multiple keypoint estimations. Please + see the class definition for more details. + mask_params: A MaskParams namedtuple. This object + holds the hyper-parameters for segmentation. Please see the class + definition for more details. + """ + assert object_detection_params or keypoint_params_dict + # Shorten the name for convenience and better formatting. + self._is_training = is_training + # The Objects as Points paper attaches loss functions to multiple + # (`num_feature_outputs`) feature maps in the the backbone. E.g. + # for the hourglass backbone, `num_feature_outputs` is 2. + self._feature_extractor = feature_extractor + self._num_feature_outputs = feature_extractor.num_feature_outputs + self._stride = self._feature_extractor.out_stride + self._image_resizer_fn = image_resizer_fn + self._center_params = object_center_params + self._od_params = object_detection_params + self._kp_params_dict = keypoint_params_dict + self._mask_params = mask_params + + # Construct the prediction head nets. + self._prediction_head_dict = self._construct_prediction_heads( + num_classes, + self._num_feature_outputs, + class_prediction_bias_init=self._center_params.heatmap_bias_init) + # Initialize the target assigners. + self._target_assigner_dict = self._initialize_target_assigners( + stride=self._stride, + min_box_overlap_iou=self._center_params.min_box_overlap_iou) + + # Will be used in VOD single_frame_meta_arch for tensor reshape. + self._batched_prediction_tensor_names = [] + + super(CenterNetMetaArch, self).__init__(num_classes) + + @property + def batched_prediction_tensor_names(self): + if not self._batched_prediction_tensor_names: + raise RuntimeError('Must call predict() method to get batched prediction ' + 'tensor names.') + return self._batched_prediction_tensor_names + + def _construct_prediction_heads(self, num_classes, num_feature_outputs, + class_prediction_bias_init): + """Constructs the prediction heads based on the specific parameters. + + Args: + num_classes: An integer indicating how many classes in total to predict. + num_feature_outputs: An integer indicating how many feature outputs to use + for calculating the loss. The Objects as Points paper attaches loss + functions to multiple (`num_feature_outputs`) feature maps in the the + backbone. E.g. for the hourglass backbone, `num_feature_outputs` is 2. + class_prediction_bias_init: float, the initial value of bias in the + convolutional kernel of the class prediction head. If set to None, the + bias is initialized with zeros. + + Returns: + A dictionary of keras modules generated by calling make_prediction_net + function. + """ + prediction_heads = {} + prediction_heads[OBJECT_CENTER] = [ + make_prediction_net(num_classes, bias_fill=class_prediction_bias_init) + for _ in range(num_feature_outputs) + ] + if self._od_params is not None: + prediction_heads[BOX_SCALE] = [ + make_prediction_net(NUM_SIZE_CHANNELS) + for _ in range(num_feature_outputs) + ] + prediction_heads[BOX_OFFSET] = [ + make_prediction_net(NUM_OFFSET_CHANNELS) + for _ in range(num_feature_outputs) + ] + if self._kp_params_dict is not None: + for task_name, kp_params in self._kp_params_dict.items(): + num_keypoints = len(kp_params.keypoint_indices) + prediction_heads[get_keypoint_name(task_name, KEYPOINT_HEATMAP)] = [ + make_prediction_net( + num_keypoints, bias_fill=kp_params.heatmap_bias_init) + for _ in range(num_feature_outputs) + ] + prediction_heads[get_keypoint_name(task_name, KEYPOINT_REGRESSION)] = [ + make_prediction_net(NUM_OFFSET_CHANNELS * num_keypoints) + for _ in range(num_feature_outputs) + ] + if kp_params.per_keypoint_offset: + prediction_heads[get_keypoint_name(task_name, KEYPOINT_OFFSET)] = [ + make_prediction_net(NUM_OFFSET_CHANNELS * num_keypoints) + for _ in range(num_feature_outputs) + ] + else: + prediction_heads[get_keypoint_name(task_name, KEYPOINT_OFFSET)] = [ + make_prediction_net(NUM_OFFSET_CHANNELS) + for _ in range(num_feature_outputs) + ] + if self._mask_params is not None: + prediction_heads[SEGMENTATION_HEATMAP] = [ + make_prediction_net(num_classes, + bias_fill=class_prediction_bias_init) + for _ in range(num_feature_outputs)] + return prediction_heads + + def _initialize_target_assigners(self, stride, min_box_overlap_iou): + """Initializes the target assigners and puts them in a dictionary. + + Args: + stride: An integer indicating the stride of the image. + min_box_overlap_iou: float, the minimum IOU overlap that predicted boxes + need have with groundtruth boxes to not be penalized. This is used for + computing the class specific center heatmaps. + + Returns: + A dictionary of initialized target assigners for each task. + """ + target_assigners = {} + target_assigners[OBJECT_CENTER] = ( + cn_assigner.CenterNetCenterHeatmapTargetAssigner( + stride, min_box_overlap_iou)) + if self._od_params is not None: + target_assigners[DETECTION_TASK] = ( + cn_assigner.CenterNetBoxTargetAssigner(stride)) + if self._kp_params_dict is not None: + for task_name, kp_params in self._kp_params_dict.items(): + target_assigners[task_name] = ( + cn_assigner.CenterNetKeypointTargetAssigner( + stride=stride, + class_id=kp_params.class_id, + keypoint_indices=kp_params.keypoint_indices, + keypoint_std_dev=kp_params.keypoint_std_dev, + peak_radius=kp_params.offset_peak_radius, + per_keypoint_offset=kp_params.per_keypoint_offset)) + if self._mask_params is not None: + target_assigners[SEGMENTATION_TASK] = ( + cn_assigner.CenterNetMaskTargetAssigner(stride)) + + return target_assigners + + def _compute_object_center_loss(self, input_height, input_width, + object_center_predictions, per_pixel_weights): + """Computes the object center loss. + + Args: + input_height: An integer scalar tensor representing input image height. + input_width: An integer scalar tensor representing input image width. + object_center_predictions: A list of float tensors of shape [batch_size, + out_height, out_width, num_classes] representing the object center + feature maps. + per_pixel_weights: A float tensor of shape [batch_size, + out_height * out_width, 1] with 1s in locations where the spatial + coordinates fall within the height and width in true_image_shapes. + + Returns: + A float scalar tensor representing the object center loss per instance. + """ + gt_boxes_list = self.groundtruth_lists(fields.BoxListFields.boxes) + gt_classes_list = self.groundtruth_lists(fields.BoxListFields.classes) + gt_weights_list = self.groundtruth_lists(fields.BoxListFields.weights) + + if self._center_params.use_only_known_classes: + gt_labeled_classes_list = self.groundtruth_lists( + fields.InputDataFields.groundtruth_labeled_classes) + batch_labeled_classes = tf.stack(gt_labeled_classes_list, axis=0) + batch_labeled_classes_shape = tf.shape(batch_labeled_classes) + batch_labeled_classes = tf.reshape( + batch_labeled_classes, + [batch_labeled_classes_shape[0], 1, batch_labeled_classes_shape[-1]]) + per_pixel_weights = per_pixel_weights * batch_labeled_classes + + # Convert the groundtruth to targets. + assigner = self._target_assigner_dict[OBJECT_CENTER] + heatmap_targets = assigner.assign_center_targets_from_boxes( + height=input_height, + width=input_width, + gt_boxes_list=gt_boxes_list, + gt_classes_list=gt_classes_list, + gt_weights_list=gt_weights_list) + + flattened_heatmap_targets = _flatten_spatial_dimensions(heatmap_targets) + num_boxes = _to_float32(get_num_instances_from_weights(gt_weights_list)) + + loss = 0.0 + object_center_loss = self._center_params.classification_loss + # Loop through each feature output head. + for pred in object_center_predictions: + pred = _flatten_spatial_dimensions(pred) + loss += object_center_loss( + pred, flattened_heatmap_targets, weights=per_pixel_weights) + loss_per_instance = tf.reduce_sum(loss) / ( + float(len(object_center_predictions)) * num_boxes) + return loss_per_instance + + def _compute_object_detection_losses(self, input_height, input_width, + prediction_dict, per_pixel_weights): + """Computes the weighted object detection losses. + + This wrapper function calls the function which computes the losses for + object detection task and applies corresponding weights to the losses. + + Args: + input_height: An integer scalar tensor representing input image height. + input_width: An integer scalar tensor representing input image width. + prediction_dict: A dictionary holding predicted tensors output by + "predict" function. See "predict" function for more detailed + description. + per_pixel_weights: A float tensor of shape [batch_size, + out_height * out_width, 1] with 1s in locations where the spatial + coordinates fall within the height and width in true_image_shapes. + + Returns: + A dictionary of scalar float tensors representing the weighted losses for + object detection task: + BOX_SCALE: the weighted scale (height/width) loss. + BOX_OFFSET: the weighted object offset loss. + """ + od_scale_loss, od_offset_loss = self._compute_box_scale_and_offset_loss( + scale_predictions=prediction_dict[BOX_SCALE], + offset_predictions=prediction_dict[BOX_OFFSET], + input_height=input_height, + input_width=input_width) + loss_dict = {} + loss_dict[BOX_SCALE] = ( + self._od_params.scale_loss_weight * od_scale_loss) + loss_dict[BOX_OFFSET] = ( + self._od_params.offset_loss_weight * od_offset_loss) + return loss_dict + + def _compute_box_scale_and_offset_loss(self, input_height, input_width, + scale_predictions, offset_predictions): + """Computes the scale loss of the object detection task. + + Args: + input_height: An integer scalar tensor representing input image height. + input_width: An integer scalar tensor representing input image width. + scale_predictions: A list of float tensors of shape [batch_size, + out_height, out_width, 2] representing the prediction heads of the model + for object scale (i.e height and width). + offset_predictions: A list of float tensors of shape [batch_size, + out_height, out_width, 2] representing the prediction heads of the model + for object offset. + + Returns: + A tuple of two losses: + scale_loss: A float scalar tensor representing the object height/width + loss normalized by total number of boxes. + offset_loss: A float scalar tensor representing the object offset loss + normalized by total number of boxes + """ + # TODO(vighneshb) Explore a size invariant version of scale loss. + gt_boxes_list = self.groundtruth_lists(fields.BoxListFields.boxes) + gt_weights_list = self.groundtruth_lists(fields.BoxListFields.weights) + num_boxes = _to_float32(get_num_instances_from_weights(gt_weights_list)) + num_predictions = float(len(scale_predictions)) + + assigner = self._target_assigner_dict[DETECTION_TASK] + (batch_indices, batch_height_width_targets, batch_offset_targets, + batch_weights) = assigner.assign_size_and_offset_targets( + height=input_height, + width=input_width, + gt_boxes_list=gt_boxes_list, + gt_weights_list=gt_weights_list) + batch_weights = tf.expand_dims(batch_weights, -1) + + scale_loss = 0 + offset_loss = 0 + localization_loss_fn = self._od_params.localization_loss + for scale_pred, offset_pred in zip(scale_predictions, offset_predictions): + # Compute the scale loss. + scale_pred = cn_assigner.get_batch_predictions_from_indices( + scale_pred, batch_indices) + scale_loss += localization_loss_fn( + scale_pred, batch_height_width_targets, weights=batch_weights) + # Compute the offset loss. + offset_pred = cn_assigner.get_batch_predictions_from_indices( + offset_pred, batch_indices) + offset_loss += localization_loss_fn( + offset_pred, batch_offset_targets, weights=batch_weights) + scale_loss = tf.reduce_sum(scale_loss) / ( + num_predictions * num_boxes) + offset_loss = tf.reduce_sum(offset_loss) / ( + num_predictions * num_boxes) + return scale_loss, offset_loss + + def _compute_keypoint_estimation_losses(self, task_name, input_height, + input_width, prediction_dict, + per_pixel_weights): + """Computes the weighted keypoint losses.""" + kp_params = self._kp_params_dict[task_name] + heatmap_key = get_keypoint_name(task_name, KEYPOINT_HEATMAP) + offset_key = get_keypoint_name(task_name, KEYPOINT_OFFSET) + regression_key = get_keypoint_name(task_name, KEYPOINT_REGRESSION) + heatmap_loss = self._compute_kp_heatmap_loss( + input_height=input_height, + input_width=input_width, + task_name=task_name, + heatmap_predictions=prediction_dict[heatmap_key], + classification_loss_fn=kp_params.classification_loss, + per_pixel_weights=per_pixel_weights) + offset_loss = self._compute_kp_offset_loss( + input_height=input_height, + input_width=input_width, + task_name=task_name, + offset_predictions=prediction_dict[offset_key], + localization_loss_fn=kp_params.localization_loss) + reg_loss = self._compute_kp_regression_loss( + input_height=input_height, + input_width=input_width, + task_name=task_name, + regression_predictions=prediction_dict[regression_key], + localization_loss_fn=kp_params.localization_loss) + + loss_dict = {} + loss_dict[heatmap_key] = ( + kp_params.keypoint_heatmap_loss_weight * heatmap_loss) + loss_dict[offset_key] = ( + kp_params.keypoint_offset_loss_weight * offset_loss) + loss_dict[regression_key] = ( + kp_params.keypoint_regression_loss_weight * reg_loss) + return loss_dict + + def _compute_kp_heatmap_loss(self, input_height, input_width, task_name, + heatmap_predictions, classification_loss_fn, + per_pixel_weights): + """Computes the heatmap loss of the keypoint estimation task. + + Args: + input_height: An integer scalar tensor representing input image height. + input_width: An integer scalar tensor representing input image width. + task_name: A string representing the name of the keypoint task. + heatmap_predictions: A list of float tensors of shape [batch_size, + out_height, out_width, num_keypoints] representing the prediction heads + of the model for keypoint heatmap. + classification_loss_fn: An object_detection.core.losses.Loss object to + compute the loss for the class predictions in CenterNet. + per_pixel_weights: A float tensor of shape [batch_size, + out_height * out_width, 1] with 1s in locations where the spatial + coordinates fall within the height and width in true_image_shapes. + + Returns: + loss: A float scalar tensor representing the object keypoint heatmap loss + normalized by number of instances. + """ + gt_keypoints_list = self.groundtruth_lists(fields.BoxListFields.keypoints) + gt_classes_list = self.groundtruth_lists(fields.BoxListFields.classes) + gt_weights_list = self.groundtruth_lists(fields.BoxListFields.weights) + gt_boxes_list = self.groundtruth_lists(fields.BoxListFields.boxes) + + assigner = self._target_assigner_dict[task_name] + (keypoint_heatmap, num_instances_per_kp_type, + valid_mask_batch) = assigner.assign_keypoint_heatmap_targets( + height=input_height, + width=input_width, + gt_keypoints_list=gt_keypoints_list, + gt_weights_list=gt_weights_list, + gt_classes_list=gt_classes_list, + gt_boxes_list=gt_boxes_list) + flattened_valid_mask = _flatten_spatial_dimensions( + tf.expand_dims(valid_mask_batch, axis=-1)) + flattened_heapmap_targets = _flatten_spatial_dimensions(keypoint_heatmap) + # Sum over the number of instances per keypoint types to get the total + # number of keypoints. Note that this is used to normalized the loss and we + # keep the minimum value to be 1 to avoid generating weird loss value when + # no keypoint is in the image batch. + num_instances = tf.maximum( + tf.cast(tf.reduce_sum(num_instances_per_kp_type), dtype=tf.float32), + 1.0) + loss = 0.0 + # Loop through each feature output head. + for pred in heatmap_predictions: + pred = _flatten_spatial_dimensions(pred) + unweighted_loss = classification_loss_fn( + pred, + flattened_heapmap_targets, + weights=tf.ones_like(per_pixel_weights)) + # Apply the weights after the loss function to have full control over it. + loss += unweighted_loss * per_pixel_weights * flattened_valid_mask + loss = tf.reduce_sum(loss) / ( + float(len(heatmap_predictions)) * num_instances) + return loss + + def _compute_kp_offset_loss(self, input_height, input_width, task_name, + offset_predictions, localization_loss_fn): + """Computes the offset loss of the keypoint estimation task. + + Args: + input_height: An integer scalar tensor representing input image height. + input_width: An integer scalar tensor representing input image width. + task_name: A string representing the name of the keypoint task. + offset_predictions: A list of float tensors of shape [batch_size, + out_height, out_width, 2] representing the prediction heads of the model + for keypoint offset. + localization_loss_fn: An object_detection.core.losses.Loss object to + compute the loss for the keypoint offset predictions in CenterNet. + + Returns: + loss: A float scalar tensor representing the keypoint offset loss + normalized by number of total keypoints. + """ + gt_keypoints_list = self.groundtruth_lists(fields.BoxListFields.keypoints) + gt_classes_list = self.groundtruth_lists(fields.BoxListFields.classes) + gt_weights_list = self.groundtruth_lists(fields.BoxListFields.weights) + + assigner = self._target_assigner_dict[task_name] + (batch_indices, batch_offsets, + batch_weights) = assigner.assign_keypoints_offset_targets( + height=input_height, + width=input_width, + gt_keypoints_list=gt_keypoints_list, + gt_weights_list=gt_weights_list, + gt_classes_list=gt_classes_list) + + # Keypoint offset loss. + loss = 0.0 + for prediction in offset_predictions: + batch_size, out_height, out_width, channels = _get_shape(prediction, 4) + if channels > 2: + prediction = tf.reshape( + prediction, shape=[batch_size, out_height, out_width, -1, 2]) + prediction = cn_assigner.get_batch_predictions_from_indices( + prediction, batch_indices) + # The dimensions passed are not as per the doc string but the loss + # still computes the correct value. + unweighted_loss = localization_loss_fn( + prediction, + batch_offsets, + weights=tf.expand_dims(tf.ones_like(batch_weights), -1)) + # Apply the weights after the loss function to have full control over it. + loss += batch_weights * tf.reduce_sum(unweighted_loss, axis=1) + + loss = tf.reduce_sum(loss) / ( + float(len(offset_predictions)) * + tf.maximum(tf.reduce_sum(batch_weights), 1.0)) + return loss + + def _compute_kp_regression_loss(self, input_height, input_width, task_name, + regression_predictions, localization_loss_fn): + """Computes the keypoint regression loss of the keypoint estimation task. + + Args: + input_height: An integer scalar tensor representing input image height. + input_width: An integer scalar tensor representing input image width. + task_name: A string representing the name of the keypoint task. + regression_predictions: A list of float tensors of shape [batch_size, + out_height, out_width, 2 * num_keypoints] representing the prediction + heads of the model for keypoint regression offset. + localization_loss_fn: An object_detection.core.losses.Loss object to + compute the loss for the keypoint regression offset predictions in + CenterNet. + + Returns: + loss: A float scalar tensor representing the keypoint regression offset + loss normalized by number of total keypoints. + """ + gt_boxes_list = self.groundtruth_lists(fields.BoxListFields.boxes) + gt_keypoints_list = self.groundtruth_lists(fields.BoxListFields.keypoints) + gt_classes_list = self.groundtruth_lists(fields.BoxListFields.classes) + gt_weights_list = self.groundtruth_lists(fields.BoxListFields.weights) + # keypoint regression offset loss. + assigner = self._target_assigner_dict[task_name] + (batch_indices, batch_regression_offsets, + batch_weights) = assigner.assign_joint_regression_targets( + height=input_height, + width=input_width, + gt_keypoints_list=gt_keypoints_list, + gt_classes_list=gt_classes_list, + gt_weights_list=gt_weights_list, + gt_boxes_list=gt_boxes_list) + + loss = 0.0 + for prediction in regression_predictions: + batch_size, out_height, out_width, _ = _get_shape(prediction, 4) + reshaped_prediction = tf.reshape( + prediction, shape=[batch_size, out_height, out_width, -1, 2]) + reg_prediction = cn_assigner.get_batch_predictions_from_indices( + reshaped_prediction, batch_indices) + unweighted_loss = localization_loss_fn( + reg_prediction, + batch_regression_offsets, + weights=tf.expand_dims(tf.ones_like(batch_weights), -1)) + # Apply the weights after the loss function to have full control over it. + loss += batch_weights * tf.reduce_sum(unweighted_loss, axis=1) + + loss = tf.reduce_sum(loss) / ( + float(len(regression_predictions)) * + tf.maximum(tf.reduce_sum(batch_weights), 1.0)) + return loss + + def _compute_segmentation_losses(self, prediction_dict, per_pixel_weights): + """Computes all the losses associated with segmentation. + + Args: + prediction_dict: The dictionary returned from the predict() method. + per_pixel_weights: A float tensor of shape [batch_size, + out_height * out_width, 1] with 1s in locations where the spatial + coordinates fall within the height and width in true_image_shapes. + + Returns: + A dictionary with segmentation losses. + """ + segmentation_heatmap = prediction_dict[SEGMENTATION_HEATMAP] + mask_loss = self._compute_mask_loss( + segmentation_heatmap, per_pixel_weights) + losses = { + SEGMENTATION_HEATMAP: mask_loss + } + return losses + + def _compute_mask_loss(self, segmentation_predictions, + per_pixel_weights): + """Computes the mask loss. + + Args: + segmentation_predictions: A list of float32 tensors of shape [batch_size, + out_height, out_width, num_classes]. + per_pixel_weights: A float tensor of shape [batch_size, + out_height * out_width, 1] with 1s in locations where the spatial + coordinates fall within the height and width in true_image_shapes. + + Returns: + A float scalar tensor representing the mask loss. + """ + gt_masks_list = self.groundtruth_lists(fields.BoxListFields.masks) + gt_classes_list = self.groundtruth_lists(fields.BoxListFields.classes) + + # Convert the groundtruth to targets. + assigner = self._target_assigner_dict[SEGMENTATION_TASK] + heatmap_targets = assigner.assign_segmentation_targets( + gt_masks_list=gt_masks_list, + gt_classes_list=gt_classes_list) + + flattened_heatmap_targets = _flatten_spatial_dimensions(heatmap_targets) + + loss = 0.0 + mask_loss_fn = self._mask_params.classification_loss + total_pixels_in_loss = tf.reduce_sum(per_pixel_weights) + + # Loop through each feature output head. + for pred in segmentation_predictions: + pred = _flatten_spatial_dimensions(pred) + loss += mask_loss_fn( + pred, flattened_heatmap_targets, weights=per_pixel_weights) + # TODO(ronnyvotel): Consider other ways to normalize loss. + total_loss = tf.reduce_sum(loss) / ( + float(len(segmentation_predictions)) * total_pixels_in_loss) + return total_loss + + def preprocess(self, inputs): + outputs = shape_utils.resize_images_and_return_shapes( + inputs, self._image_resizer_fn) + resized_inputs, true_image_shapes = outputs + + return (self._feature_extractor.preprocess(resized_inputs), + true_image_shapes) + + def predict(self, preprocessed_inputs, _): + """Predicts CenterNet prediction tensors given an input batch. + + Feature extractors are free to produce predictions from multiple feature + maps and therefore we return a dictionary mapping strings to lists. + E.g. the hourglass backbone produces two feature maps. + + Args: + preprocessed_inputs: a [batch, height, width, channels] float32 tensor + representing a batch of images. + + Returns: + prediction_dict: a dictionary holding predicted tensors with + 'preprocessed_inputs' - The input image after being resized and + preprocessed by the feature extractor. + 'object_center' - A list of size num_feature_outputs containing + float tensors of size [batch_size, output_height, output_width, + num_classes] representing the predicted object center heatmap logits. + 'box/scale' - [optional] A list of size num_feature_outputs holding + float tensors of size [batch_size, output_height, output_width, 2] + representing the predicted box height and width at each output + location. This field exists only when object detection task is + specified. + 'box/offset' - [optional] A list of size num_feature_outputs holding + float tensors of size [batch_size, output_height, output_width, 2] + representing the predicted y and x offsets at each output location. + '$TASK_NAME/keypoint_heatmap' - [optional] A list of size + num_feature_outputs holding float tensors of size [batch_size, + output_height, output_width, num_keypoints] representing the predicted + keypoint heatmap logits. + '$TASK_NAME/keypoint_offset' - [optional] A list of size + num_feature_outputs holding float tensors of size [batch_size, + output_height, output_width, 2] representing the predicted keypoint + offsets at each output location. + '$TASK_NAME/keypoint_regression' - [optional] A list of size + num_feature_outputs holding float tensors of size [batch_size, + output_height, output_width, 2 * num_keypoints] representing the + predicted keypoint regression at each output location. + 'segmentation/heatmap' - [optional] A list of size num_feature_outputs + holding float tensors of size [batch_size, output_height, + output_width, num_classes] representing the mask logits. + Note the $TASK_NAME is provided by the KeypointEstimation namedtuple + used to differentiate between different keypoint tasks. + """ + features_list = self._feature_extractor(preprocessed_inputs) + + predictions = {} + for head_name, heads in self._prediction_head_dict.items(): + predictions[head_name] = [ + head(feature) for (feature, head) in zip(features_list, heads) + ] + predictions['preprocessed_inputs'] = preprocessed_inputs + + self._batched_prediction_tensor_names = predictions.keys() + return predictions + + def loss(self, prediction_dict, true_image_shapes, scope=None): + """Computes scalar loss tensors with respect to provided groundtruth. + + This function implements the various CenterNet losses. + + Args: + prediction_dict: a dictionary holding predicted tensors returned by + "predict" function. + true_image_shapes: int32 tensor of shape [batch, 3] where each row is of + the form [height, width, channels] indicating the shapes of true images + in the resized images, as resized images can be padded with zeros. + scope: Optional scope name. + + Returns: + A dictionary mapping the keys ['Loss/object_center', 'Loss/box/scale', + 'Loss/box/offset', 'Loss/$TASK_NAME/keypoint/heatmap', + 'Loss/$TASK_NAME/keypoint/offset', + 'Loss/$TASK_NAME/keypoint/regression', 'Loss/segmentation/heatmap'] to + scalar tensors corresponding to the losses for different tasks. Note the + $TASK_NAME is provided by the KeypointEstimation namedtuple used to + differentiate between different keypoint tasks. + """ + + _, input_height, input_width, _ = _get_shape( + prediction_dict['preprocessed_inputs'], 4) + + output_height, output_width = (input_height // self._stride, + input_width // self._stride) + + # TODO(vighneshb) Explore whether using floor here is safe. + output_true_image_shapes = tf.ceil( + tf.to_float(true_image_shapes) / self._stride) + valid_anchor_weights = get_valid_anchor_weights_in_flattened_image( + output_true_image_shapes, output_height, output_width) + valid_anchor_weights = tf.expand_dims(valid_anchor_weights, 2) + + object_center_loss = self._compute_object_center_loss( + object_center_predictions=prediction_dict[OBJECT_CENTER], + input_height=input_height, + input_width=input_width, + per_pixel_weights=valid_anchor_weights) + losses = { + OBJECT_CENTER: + self._center_params.object_center_loss_weight * object_center_loss + } + if self._od_params is not None: + od_losses = self._compute_object_detection_losses( + input_height=input_height, + input_width=input_width, + prediction_dict=prediction_dict, + per_pixel_weights=valid_anchor_weights) + for key in od_losses: + od_losses[key] = od_losses[key] * self._od_params.task_loss_weight + losses.update(od_losses) + + if self._kp_params_dict is not None: + for task_name, params in self._kp_params_dict.items(): + kp_losses = self._compute_keypoint_estimation_losses( + task_name=task_name, + input_height=input_height, + input_width=input_width, + prediction_dict=prediction_dict, + per_pixel_weights=valid_anchor_weights) + for key in kp_losses: + kp_losses[key] = kp_losses[key] * params.task_loss_weight + losses.update(kp_losses) + + if self._mask_params is not None: + seg_losses = self._compute_segmentation_losses( + prediction_dict=prediction_dict, + per_pixel_weights=valid_anchor_weights) + for key in seg_losses: + seg_losses[key] = seg_losses[key] * self._mask_params.task_loss_weight + losses.update(seg_losses) + + # Prepend the LOSS_KEY_PREFIX to the keys in the dictionary such that the + # losses will be grouped together in Tensorboard. + return dict([('%s/%s' % (LOSS_KEY_PREFIX, key), val) + for key, val in losses.items()]) + + def postprocess(self, prediction_dict, true_image_shapes, **params): + """Produces boxes given a prediction dict returned by predict(). + + Although predict returns a list of tensors, only the last tensor in + each list is used for making box predictions. + + Args: + prediction_dict: a dictionary holding predicted tensors from "predict" + function. + true_image_shapes: int32 tensor of shape [batch, 3] where each row is of + the form [height, width, channels] indicating the shapes of true images + in the resized images, as resized images can be padded with zeros. + **params: Currently ignored. + + Returns: + detections: a dictionary containing the following fields + detection_boxes - A tensor of shape [batch, max_detections, 4] + holding the predicted boxes. + detection_scores: A tensor of shape [batch, max_detections] holding + the predicted score for each box. + detection_classes: An integer tensor of shape [batch, max_detections] + containing the detected class for each box. + num_detections: An integer tensor of shape [batch] containing the + number of detected boxes for each sample in the batch. + detection_keypoints: (Optional) A float tensor of shape [batch, + max_detections, num_keypoints, 2] with normalized keypoints. Any + invalid keypoints have their coordinates and scores set to 0.0. + detection_keypoint_scores: (Optional) A float tensor of shape [batch, + max_detection, num_keypoints] with scores for each keypoint. + detection_masks: (Optional) An int tensor of shape [batch, + max_detections, mask_height, mask_width] with binarized masks for each + detection. + """ + object_center_prob = tf.nn.sigmoid(prediction_dict[OBJECT_CENTER][-1]) + # Get x, y and channel indices corresponding to the top indices in the class + # center predictions. + detection_scores, y_indices, x_indices, channel_indices = ( + top_k_feature_map_locations( + object_center_prob, max_pool_kernel_size=3, + k=self._center_params.max_box_predictions)) + + boxes_strided, classes, scores, num_detections = ( + prediction_tensors_to_boxes( + detection_scores, y_indices, x_indices, channel_indices, + prediction_dict[BOX_SCALE][-1], prediction_dict[BOX_OFFSET][-1])) + + boxes = convert_strided_predictions_to_normalized_boxes( + boxes_strided, self._stride, true_image_shapes) + + postprocess_dict = { + fields.DetectionResultFields.detection_boxes: boxes, + fields.DetectionResultFields.detection_scores: scores, + fields.DetectionResultFields.detection_classes: classes, + fields.DetectionResultFields.num_detections: num_detections, + } + + if self._kp_params_dict: + keypoints, keypoint_scores = self._postprocess_keypoints( + prediction_dict, classes, y_indices, x_indices, + boxes_strided, num_detections) + keypoints, keypoint_scores = ( + convert_strided_predictions_to_normalized_keypoints( + keypoints, keypoint_scores, self._stride, true_image_shapes, + clip_out_of_frame_keypoints=True)) + postprocess_dict.update({ + fields.DetectionResultFields.detection_keypoints: keypoints, + fields.DetectionResultFields.detection_keypoint_scores: + keypoint_scores + }) + + if self._mask_params: + masks = tf.nn.sigmoid(prediction_dict[SEGMENTATION_HEATMAP][-1]) + instance_masks = convert_strided_predictions_to_instance_masks( + boxes, classes, masks, self._stride, self._mask_params.mask_height, + self._mask_params.mask_width, true_image_shapes, + self._mask_params.score_threshold) + postprocess_dict.update({ + fields.DetectionResultFields.detection_masks: + instance_masks + }) + return postprocess_dict + + def _postprocess_keypoints(self, prediction_dict, classes, y_indices, + x_indices, boxes, num_detections): + """Performs postprocessing on keypoint predictions. + + Args: + prediction_dict: a dictionary holding predicted tensors, returned from the + predict() method. This dictionary should contain keypoint prediction + feature maps for each keypoint task. + classes: A [batch_size, max_detections] int tensor with class indices for + all detected objects. + y_indices: A [batch_size, max_detections] int tensor with y indices for + all object centers. + x_indices: A [batch_size, max_detections] int tensor with x indices for + all object centers. + boxes: A [batch_size, max_detections, 4] float32 tensor with bounding + boxes in (un-normalized) output space. + num_detections: A [batch_size] int tensor with the number of valid + detections for each image. + + Returns: + A tuple of + keypoints: a [batch_size, max_detection, num_total_keypoints, 2] float32 + tensor with keypoints in the output (strided) coordinate frame. + keypoint_scores: a [batch_size, max_detections, num_total_keypoints] + float32 tensor with keypoint scores. + """ + total_num_keypoints = sum(len(kp_dict.keypoint_indices) for kp_dict + in self._kp_params_dict.values()) + batch_size, max_detections, _ = _get_shape(boxes, 3) + kpt_coords_for_example_list = [] + kpt_scores_for_example_list = [] + for ex_ind in range(batch_size): + kpt_coords_for_class_list = [] + kpt_scores_for_class_list = [] + instance_inds_for_class_list = [] + for task_name, kp_params in self._kp_params_dict.items(): + keypoint_heatmap = prediction_dict[ + get_keypoint_name(task_name, KEYPOINT_HEATMAP)][-1] + keypoint_offsets = prediction_dict[ + get_keypoint_name(task_name, KEYPOINT_OFFSET)][-1] + keypoint_regression = prediction_dict[ + get_keypoint_name(task_name, KEYPOINT_REGRESSION)][-1] + instance_inds = self._get_instance_indices( + classes, num_detections, ex_ind, kp_params.class_id) + + def true_fn( + keypoint_heatmap, keypoint_offsets, keypoint_regression, + classes, y_indices, x_indices, boxes, instance_inds, + ex_ind, kp_params): + """Logics to execute when instance_inds is not an empty set.""" + # Postprocess keypoints and scores for class and single image. Shapes + # are [1, num_instances_i, num_keypoints_i, 2] and + # [1, num_instances_i, num_keypoints_i], respectively. Note that + # num_instances_i and num_keypoints_i refers to the number of + # instances and keypoints for class i, respectively. + kpt_coords_for_class, kpt_scores_for_class = ( + self._postprocess_keypoints_for_class_and_image( + keypoint_heatmap, keypoint_offsets, keypoint_regression, + classes, y_indices, x_indices, boxes, instance_inds, + ex_ind, kp_params)) + # Expand keypoint dimension (with padding) so that coordinates and + # scores have shape [1, num_instances_i, num_total_keypoints, 2] and + # [1, num_instances_i, num_total_keypoints], respectively. + kpts_coords_for_class_padded, kpt_scores_for_class_padded = ( + _pad_to_full_keypoint_dim( + kpt_coords_for_class, kpt_scores_for_class, + kp_params.keypoint_indices, total_num_keypoints)) + return kpts_coords_for_class_padded, kpt_scores_for_class_padded + + def false_fn(): + """Logics to execute when the instance_inds is an empty set.""" + return (tf.zeros([1, 0, total_num_keypoints, 2], dtype=tf.float32), + tf.zeros([1, 0, total_num_keypoints], dtype=tf.float32)) + + true_fn = functools.partial( + true_fn, keypoint_heatmap, keypoint_offsets, keypoint_regression, + classes, y_indices, x_indices, boxes, instance_inds, ex_ind, + kp_params) + results = tf.cond(tf.size(instance_inds) > 0, true_fn, false_fn) + + kpt_coords_for_class_list.append(results[0]) + kpt_scores_for_class_list.append(results[1]) + instance_inds_for_class_list.append(instance_inds) + + # Concatenate all keypoints across all classes (single example). + kpt_coords_for_example = tf.concat(kpt_coords_for_class_list, axis=1) + kpt_scores_for_example = tf.concat(kpt_scores_for_class_list, axis=1) + instance_inds_for_example = tf.concat(instance_inds_for_class_list, + axis=0) + + if tf.size(instance_inds_for_example) > 0: + # Scatter into tensor where instances align with original detection + # instances. New shape of keypoint coordinates and scores are + # [1, max_detections, num_total_keypoints, 2] and + # [1, max_detections, num_total_keypoints], respectively. + kpt_coords_for_example_all_det, kpt_scores_for_example_all_det = ( + _pad_to_full_instance_dim( + kpt_coords_for_example, kpt_scores_for_example, + instance_inds_for_example, + self._center_params.max_box_predictions)) + else: + kpt_coords_for_example_all_det = tf.zeros( + [1, max_detections, total_num_keypoints, 2], dtype=tf.float32) + kpt_scores_for_example_all_det = tf.zeros( + [1, max_detections, total_num_keypoints], dtype=tf.float32) + + kpt_coords_for_example_list.append(kpt_coords_for_example_all_det) + kpt_scores_for_example_list.append(kpt_scores_for_example_all_det) + + # Concatenate all keypoints and scores from all examples in the batch. + # Shapes are [batch_size, max_detections, num_total_keypoints, 2] and + # [batch_size, max_detections, num_total_keypoints], respectively. + keypoints = tf.concat(kpt_coords_for_example_list, axis=0) + keypoint_scores = tf.concat(kpt_scores_for_example_list, axis=0) + + return keypoints, keypoint_scores + + def _get_instance_indices(self, classes, num_detections, batch_index, + class_id): + """Gets the instance indices that match the target class ID. + + Args: + classes: A [batch_size, max_detections] int tensor with class indices for + all detected objects. + num_detections: A [batch_size] int tensor with the number of valid + detections for each image. + batch_index: An integer specifying the index for an example in the batch. + class_id: Class id + + Returns: + instance_inds: A [num_instances] int tensor where each element indicates + the instance location within the `classes` tensor. This is useful to + associate the refined keypoints with the original detections (i.e. + boxes) + """ + classes = classes[batch_index:batch_index+1, ...] + _, max_detections = shape_utils.combined_static_and_dynamic_shape( + classes) + # Get the detection indices corresponding to the target class. + valid_detections_with_kpt_class = tf.math.logical_and( + tf.range(max_detections) < num_detections[batch_index], + classes[0] == class_id) + instance_inds = tf.where(valid_detections_with_kpt_class)[:, 0] + return instance_inds + + def _postprocess_keypoints_for_class_and_image( + self, keypoint_heatmap, keypoint_offsets, keypoint_regression, classes, + y_indices, x_indices, boxes, indices_with_kpt_class, batch_index, + kp_params): + """Postprocess keypoints for a single image and class. + + This function performs the following postprocessing operations on a single + image and single keypoint class: + - Converts keypoints scores to range [0, 1] with sigmoid. + - Determines the detections that correspond to the specified keypoint class. + - Gathers the regressed keypoints at the detection (i.e. box) centers. + - Gathers keypoint candidates from the keypoint heatmaps. + - Snaps regressed keypoints to nearby keypoint candidates. + + Args: + keypoint_heatmap: A [batch_size, height, width, num_keypoints] float32 + tensor with keypoint heatmaps. + keypoint_offsets: A [batch_size, height, width, 2] float32 tensor with + local offsets to keypoint centers. + keypoint_regression: A [batch_size, height, width, 2 * num_keypoints] + float32 tensor with regressed offsets to all keypoints. + classes: A [batch_size, max_detections] int tensor with class indices for + all detected objects. + y_indices: A [batch_size, max_detections] int tensor with y indices for + all object centers. + x_indices: A [batch_size, max_detections] int tensor with x indices for + all object centers. + boxes: A [batch_size, max_detections, 4] float32 tensor with detected + boxes in the output (strided) frame. + indices_with_kpt_class: A [num_instances] int tensor where each element + indicates the instance location within the `classes` tensor. This is + useful to associate the refined keypoints with the original detections + (i.e. boxes) + batch_index: An integer specifying the index for an example in the batch. + kp_params: A `KeypointEstimationParams` object with parameters for a + single keypoint class. + + Returns: + A tuple of + refined_keypoints: A [1, num_instances, num_keypoints, 2] float32 tensor + with refined keypoints for a single class in a single image, expressed + in the output (strided) coordinate frame. Note that `num_instances` is a + dynamic dimension, and corresponds to the number of valid detections + for the specific class. + refined_scores: A [1, num_instances, num_keypoints] float32 tensor with + keypoint scores. + """ + keypoint_indices = kp_params.keypoint_indices + num_keypoints = len(keypoint_indices) + + keypoint_heatmap = tf.nn.sigmoid( + keypoint_heatmap[batch_index:batch_index+1, ...]) + keypoint_offsets = keypoint_offsets[batch_index:batch_index+1, ...] + keypoint_regression = keypoint_regression[batch_index:batch_index+1, ...] + y_indices = y_indices[batch_index:batch_index+1, ...] + x_indices = x_indices[batch_index:batch_index+1, ...] + + # Gather the feature map locations corresponding to the object class. + y_indices_for_kpt_class = tf.gather(y_indices, indices_with_kpt_class, + axis=1) + x_indices_for_kpt_class = tf.gather(x_indices, indices_with_kpt_class, + axis=1) + boxes_for_kpt_class = tf.gather(boxes, indices_with_kpt_class, axis=1) + + # Gather the regressed keypoints. Final tensor has shape + # [1, num_instances, num_keypoints, 2]. + regressed_keypoints_for_objects = regressed_keypoints_at_object_centers( + keypoint_regression, y_indices_for_kpt_class, x_indices_for_kpt_class) + regressed_keypoints_for_objects = tf.reshape( + regressed_keypoints_for_objects, [1, -1, num_keypoints, 2]) + + # Get the candidate keypoints and scores. + # The shape of keypoint_candidates and keypoint_scores is: + # [1, num_candidates_per_keypoint, num_keypoints, 2] and + # [1, num_candidates_per_keypoint, num_keypoints], respectively. + keypoint_candidates, keypoint_scores, num_keypoint_candidates = ( + prediction_tensors_to_keypoint_candidates( + keypoint_heatmap, keypoint_offsets, + keypoint_score_threshold=( + kp_params.keypoint_candidate_score_threshold), + max_pool_kernel_size=kp_params.peak_max_pool_kernel_size, + max_candidates=kp_params.num_candidates_per_keypoint)) + + # Get the refined keypoints and scores, of shape + # [1, num_instances, num_keypoints, 2] and + # [1, num_instances, num_keypoints], respectively. + refined_keypoints, refined_scores = refine_keypoints( + regressed_keypoints_for_objects, keypoint_candidates, keypoint_scores, + num_keypoint_candidates, bboxes=boxes_for_kpt_class, + unmatched_keypoint_score=kp_params.unmatched_keypoint_score, + box_scale=kp_params.box_scale, + candidate_search_scale=kp_params.candidate_search_scale, + candidate_ranking_mode=kp_params.candidate_ranking_mode) + + return refined_keypoints, refined_scores + + def regularization_losses(self): + return [] + + def restore_map(self, fine_tune_checkpoint_type='classification', + load_all_detection_checkpoint_vars=False): + + if fine_tune_checkpoint_type == 'classification': + return {'feature_extractor': self._feature_extractor.get_base_model()} + + if fine_tune_checkpoint_type == 'detection': + return {'feature_extractor': self._feature_extractor.get_model()} + + else: + raise ValueError('Unknown fine tune checkpoint type - {}'.format( + fine_tune_checkpoint_type)) + + def updates(self): + raise RuntimeError('This model is intended to be used with model_lib_v2 ' + 'which does not support updates()') diff --git a/research/object_detection/meta_architectures/center_net_meta_arch_tf2_test.py b/research/object_detection/meta_architectures/center_net_meta_arch_tf2_test.py new file mode 100644 index 000000000..247ffd1bd --- /dev/null +++ b/research/object_detection/meta_architectures/center_net_meta_arch_tf2_test.py @@ -0,0 +1,1683 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for the CenterNet Meta architecture code.""" + +from __future__ import division + +import functools +import unittest +from absl.testing import parameterized +import numpy as np +import tensorflow.compat.v1 as tf + +from object_detection.core import losses +from object_detection.core import preprocessor +from object_detection.core import standard_fields as fields +from object_detection.core import target_assigner as cn_assigner +from object_detection.meta_architectures import center_net_meta_arch as cnma +from object_detection.models import center_net_resnet_feature_extractor +from object_detection.utils import test_case +from object_detection.utils import tf_version + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class CenterNetMetaArchPredictionHeadTest(test_case.TestCase): + """Test CenterNet meta architecture prediction head.""" + + def test_prediction_head(self): + head = cnma.make_prediction_net(num_out_channels=7) + output = head(np.zeros((4, 128, 128, 8))) + + self.assertEqual((4, 128, 128, 7), output.shape) + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class CenterNetMetaArchHelpersTest(test_case.TestCase, parameterized.TestCase): + """Test for CenterNet meta architecture related functions.""" + + def test_row_col_indices_from_flattened_indices(self): + """Tests that the computation of row, col, channel indices is correct.""" + + r_grid, c_grid, ch_grid = (np.zeros((5, 4, 3), dtype=np.int), + np.zeros((5, 4, 3), dtype=np.int), + np.zeros((5, 4, 3), dtype=np.int)) + + r_grid[..., 0] = r_grid[..., 1] = r_grid[..., 2] = np.array( + [[0, 0, 0, 0], + [1, 1, 1, 1], + [2, 2, 2, 2], + [3, 3, 3, 3], + [4, 4, 4, 4]] + ) + + c_grid[..., 0] = c_grid[..., 1] = c_grid[..., 2] = np.array( + [[0, 1, 2, 3], + [0, 1, 2, 3], + [0, 1, 2, 3], + [0, 1, 2, 3], + [0, 1, 2, 3]] + ) + + for i in range(3): + ch_grid[..., i] = i + + indices = np.arange(60) + ri, ci, chi = cnma.row_col_channel_indices_from_flattened_indices( + indices, 4, 3) + + np.testing.assert_array_equal(ri, r_grid.flatten()) + np.testing.assert_array_equal(ci, c_grid.flatten()) + np.testing.assert_array_equal(chi, ch_grid.flatten()) + + def test_flattened_indices_from_row_col_indices(self): + + r = np.array( + [[0, 0, 0, 0], + [1, 1, 1, 1], + [2, 2, 2, 2]] + ) + + c = np.array( + [[0, 1, 2, 3], + [0, 1, 2, 3], + [0, 1, 2, 3]] + ) + + idx = cnma.flattened_indices_from_row_col_indices(r, c, 4) + np.testing.assert_array_equal(np.arange(12), idx.flatten()) + + def test_get_valid_anchor_weights_in_flattened_image(self): + """Tests that the anchor weights are valid upon flattening out.""" + + valid_weights = np.zeros((2, 5, 5), dtype=np.float) + + valid_weights[0, :3, :4] = 1.0 + valid_weights[1, :2, :2] = 1.0 + + def graph_fn(): + true_image_shapes = tf.constant([[3, 4], [2, 2]]) + w = cnma.get_valid_anchor_weights_in_flattened_image( + true_image_shapes, 5, 5) + return w + + w = self.execute(graph_fn, []) + np.testing.assert_allclose(w, valid_weights.reshape(2, -1)) + self.assertEqual((2, 25), w.shape) + + def test_convert_strided_predictions_to_normalized_boxes(self): + """Tests that boxes have correct coordinates in normalized input space.""" + + def graph_fn(): + boxes = np.zeros((2, 3, 4), dtype=np.float32) + + boxes[0] = [[10, 20, 30, 40], [20, 30, 50, 100], [50, 60, 100, 180]] + boxes[1] = [[-5, -5, 5, 5], [45, 60, 110, 120], [150, 150, 200, 250]] + + true_image_shapes = tf.constant([[100, 90, 3], [150, 150, 3]]) + + clipped_boxes = ( + cnma.convert_strided_predictions_to_normalized_boxes( + boxes, 2, true_image_shapes)) + return clipped_boxes + + clipped_boxes = self.execute(graph_fn, []) + + expected_boxes = np.zeros((2, 3, 4), dtype=np.float32) + expected_boxes[0] = [[0.2, 4./9, 0.6, 8./9], [0.4, 2./3, 1, 1], + [1, 1, 1, 1]] + expected_boxes[1] = [[0., 0, 1./15, 1./15], [3./5, 4./5, 1, 1], + [1, 1, 1, 1]] + + np.testing.assert_allclose(expected_boxes, clipped_boxes) + + @parameterized.parameters( + {'clip_to_window': True}, + {'clip_to_window': False} + ) + def test_convert_strided_predictions_to_normalized_keypoints( + self, clip_to_window): + """Tests that keypoints have correct coordinates in normalized coords.""" + + keypoint_coords_np = np.array( + [ + # Example 0. + [ + [[-10., 8.], [60., 22.], [60., 120.]], + [[20., 20.], [0., 0.], [0., 0.]], + ], + # Example 1. + [ + [[40., 50.], [20., 160.], [200., 150.]], + [[10., 0.], [40., 10.], [0., 0.]], + ], + ], dtype=np.float32) + keypoint_scores_np = np.array( + [ + # Example 0. + [ + [1.0, 0.9, 0.2], + [0.7, 0.0, 0.0], + ], + # Example 1. + [ + [1.0, 1.0, 0.2], + [0.7, 0.6, 0.0], + ], + ], dtype=np.float32) + + def graph_fn(): + keypoint_coords = tf.constant(keypoint_coords_np, dtype=tf.float32) + keypoint_scores = tf.constant(keypoint_scores_np, dtype=tf.float32) + true_image_shapes = tf.constant([[320, 400, 3], [640, 640, 3]]) + stride = 4 + + keypoint_coords_out, keypoint_scores_out = ( + cnma.convert_strided_predictions_to_normalized_keypoints( + keypoint_coords, keypoint_scores, stride, true_image_shapes, + clip_to_window)) + return keypoint_coords_out, keypoint_scores_out + + keypoint_coords_out, keypoint_scores_out = self.execute(graph_fn, []) + + if clip_to_window: + expected_keypoint_coords_np = np.array( + [ + # Example 0. + [ + [[0.0, 0.08], [0.75, 0.22], [0.75, 1.0]], + [[0.25, 0.2], [0., 0.], [0.0, 0.0]], + ], + # Example 1. + [ + [[0.25, 0.3125], [0.125, 1.0], [1.0, 0.9375]], + [[0.0625, 0.], [0.25, 0.0625], [0., 0.]], + ], + ], dtype=np.float32) + expected_keypoint_scores_np = np.array( + [ + # Example 0. + [ + [0.0, 0.9, 0.0], + [0.7, 0.0, 0.0], + ], + # Example 1. + [ + [1.0, 1.0, 0.0], + [0.7, 0.6, 0.0], + ], + ], dtype=np.float32) + else: + expected_keypoint_coords_np = np.array( + [ + # Example 0. + [ + [[-0.125, 0.08], [0.75, 0.22], [0.75, 1.2]], + [[0.25, 0.2], [0., 0.], [0., 0.]], + ], + # Example 1. + [ + [[0.25, 0.3125], [0.125, 1.0], [1.25, 0.9375]], + [[0.0625, 0.], [0.25, 0.0625], [0., 0.]], + ], + ], dtype=np.float32) + expected_keypoint_scores_np = np.array( + [ + # Example 0. + [ + [1.0, 0.9, 0.2], + [0.7, 0.0, 0.0], + ], + # Example 1. + [ + [1.0, 1.0, 0.2], + [0.7, 0.6, 0.0], + ], + ], dtype=np.float32) + np.testing.assert_allclose(expected_keypoint_coords_np, keypoint_coords_out) + np.testing.assert_allclose(expected_keypoint_scores_np, keypoint_scores_out) + + def test_convert_strided_predictions_to_instance_masks(self): + + def graph_fn(): + boxes = tf.constant( + [ + [[0.5, 0.5, 1.0, 1.0], + [0.0, 0.5, 0.5, 1.0], + [0.0, 0.0, 0.0, 0.0]], + ], tf.float32) + classes = tf.constant( + [ + [0, 1, 0], + ], tf.int32) + masks_np = np.zeros((1, 4, 4, 2), dtype=np.float32) + masks_np[0, :, 2:, 0] = 1 # Class 0. + masks_np[0, :, :3, 1] = 1 # Class 1. + masks = tf.constant(masks_np) + true_image_shapes = tf.constant([[6, 8, 3]]) + instance_masks = cnma.convert_strided_predictions_to_instance_masks( + boxes, classes, masks, stride=2, mask_height=2, mask_width=2, + true_image_shapes=true_image_shapes) + return instance_masks + + instance_masks = self.execute_cpu(graph_fn, []) + + expected_instance_masks = np.array( + [ + [ + # Mask 0 (class 0). + [[1, 1], + [1, 1]], + # Mask 1 (class 1). + [[1, 0], + [1, 0]], + # Mask 2 (class 0). + [[0, 0], + [0, 0]], + ] + ]) + np.testing.assert_array_equal(expected_instance_masks, instance_masks) + + def test_top_k_feature_map_locations(self): + feature_map_np = np.zeros((2, 3, 3, 2), dtype=np.float32) + feature_map_np[0, 2, 0, 1] = 1.0 + feature_map_np[0, 2, 1, 1] = 0.9 # Get's filtered due to max pool. + feature_map_np[0, 0, 1, 0] = 0.7 + feature_map_np[0, 2, 2, 0] = 0.5 + feature_map_np[0, 2, 2, 1] = -0.3 + feature_map_np[1, 2, 1, 1] = 0.7 + feature_map_np[1, 1, 0, 0] = 0.4 + feature_map_np[1, 1, 2, 0] = 0.1 + + def graph_fn(): + feature_map = tf.constant(feature_map_np) + scores, y_inds, x_inds, channel_inds = ( + cnma.top_k_feature_map_locations( + feature_map, max_pool_kernel_size=3, k=3)) + return scores, y_inds, x_inds, channel_inds + + scores, y_inds, x_inds, channel_inds = self.execute(graph_fn, []) + + np.testing.assert_allclose([1.0, 0.7, 0.5], scores[0]) + np.testing.assert_array_equal([2, 0, 2], y_inds[0]) + np.testing.assert_array_equal([0, 1, 2], x_inds[0]) + np.testing.assert_array_equal([1, 0, 0], channel_inds[0]) + + np.testing.assert_allclose([0.7, 0.4, 0.1], scores[1]) + np.testing.assert_array_equal([2, 1, 1], y_inds[1]) + np.testing.assert_array_equal([1, 0, 2], x_inds[1]) + np.testing.assert_array_equal([1, 0, 0], channel_inds[1]) + + def test_top_k_feature_map_locations_no_pooling(self): + feature_map_np = np.zeros((2, 3, 3, 2), dtype=np.float32) + feature_map_np[0, 2, 0, 1] = 1.0 + feature_map_np[0, 2, 1, 1] = 0.9 + feature_map_np[0, 0, 1, 0] = 0.7 + feature_map_np[0, 2, 2, 0] = 0.5 + feature_map_np[0, 2, 2, 1] = -0.3 + feature_map_np[1, 2, 1, 1] = 0.7 + feature_map_np[1, 1, 0, 0] = 0.4 + feature_map_np[1, 1, 2, 0] = 0.1 + + def graph_fn(): + feature_map = tf.constant(feature_map_np) + scores, y_inds, x_inds, channel_inds = ( + cnma.top_k_feature_map_locations( + feature_map, max_pool_kernel_size=1, k=3)) + return scores, y_inds, x_inds, channel_inds + + scores, y_inds, x_inds, channel_inds = self.execute(graph_fn, []) + + np.testing.assert_allclose([1.0, 0.9, 0.7], scores[0]) + np.testing.assert_array_equal([2, 2, 0], y_inds[0]) + np.testing.assert_array_equal([0, 1, 1], x_inds[0]) + np.testing.assert_array_equal([1, 1, 0], channel_inds[0]) + + np.testing.assert_allclose([0.7, 0.4, 0.1], scores[1]) + np.testing.assert_array_equal([2, 1, 1], y_inds[1]) + np.testing.assert_array_equal([1, 0, 2], x_inds[1]) + np.testing.assert_array_equal([1, 0, 0], channel_inds[1]) + + def test_top_k_feature_map_locations_per_channel(self): + feature_map_np = np.zeros((2, 3, 3, 2), dtype=np.float32) + feature_map_np[0, 2, 0, 0] = 1.0 # Selected. + feature_map_np[0, 2, 1, 0] = 0.9 # Get's filtered due to max pool. + feature_map_np[0, 0, 1, 0] = 0.7 # Selected. + feature_map_np[0, 2, 2, 1] = 0.5 # Selected. + feature_map_np[0, 0, 0, 1] = 0.3 # Selected. + feature_map_np[1, 2, 1, 0] = 0.7 # Selected. + feature_map_np[1, 1, 0, 0] = 0.4 # Get's filtered due to max pool. + feature_map_np[1, 1, 2, 0] = 0.3 # Get's filtered due to max pool. + feature_map_np[1, 1, 0, 1] = 0.8 # Selected. + feature_map_np[1, 1, 2, 1] = 0.3 # Selected. + + def graph_fn(): + feature_map = tf.constant(feature_map_np) + scores, y_inds, x_inds, channel_inds = ( + cnma.top_k_feature_map_locations( + feature_map, max_pool_kernel_size=3, k=2, per_channel=True)) + return scores, y_inds, x_inds, channel_inds + + scores, y_inds, x_inds, channel_inds = self.execute(graph_fn, []) + + np.testing.assert_allclose([1.0, 0.7, 0.5, 0.3], scores[0]) + np.testing.assert_array_equal([2, 0, 2, 0], y_inds[0]) + np.testing.assert_array_equal([0, 1, 2, 0], x_inds[0]) + np.testing.assert_array_equal([0, 0, 1, 1], channel_inds[0]) + + np.testing.assert_allclose([0.7, 0.0, 0.8, 0.3], scores[1]) + np.testing.assert_array_equal([2, 0, 1, 1], y_inds[1]) + np.testing.assert_array_equal([1, 0, 0, 2], x_inds[1]) + np.testing.assert_array_equal([0, 0, 1, 1], channel_inds[1]) + + def test_box_prediction(self): + + class_pred = np.zeros((3, 128, 128, 5), dtype=np.float32) + hw_pred = np.zeros((3, 128, 128, 2), dtype=np.float32) + offset_pred = np.zeros((3, 128, 128, 2), dtype=np.float32) + + # Sample 1, 2 boxes + class_pred[0, 10, 20] = [0.3, .7, 0.0, 0.0, 0.0] + hw_pred[0, 10, 20] = [40, 60] + offset_pred[0, 10, 20] = [1, 2] + + class_pred[0, 50, 60] = [0.55, 0.0, 0.0, 0.0, 0.45] + hw_pred[0, 50, 60] = [50, 50] + offset_pred[0, 50, 60] = [0, 0] + + # Sample 2, 2 boxes (at same location) + class_pred[1, 100, 100] = [0.0, 0.1, 0.9, 0.0, 0.0] + hw_pred[1, 100, 100] = [10, 10] + offset_pred[1, 100, 100] = [1, 3] + + # Sample 3, 3 boxes + class_pred[2, 60, 90] = [0.0, 0.0, 0.0, 0.2, 0.8] + hw_pred[2, 60, 90] = [40, 30] + offset_pred[2, 60, 90] = [0, 0] + + class_pred[2, 65, 95] = [0.0, 0.7, 0.3, 0.0, 0.0] + hw_pred[2, 65, 95] = [20, 20] + offset_pred[2, 65, 95] = [1, 2] + + class_pred[2, 75, 85] = [1.0, 0.0, 0.0, 0.0, 0.0] + hw_pred[2, 75, 85] = [21, 25] + offset_pred[2, 75, 85] = [5, 2] + + def graph_fn(): + class_pred_tensor = tf.constant(class_pred) + hw_pred_tensor = tf.constant(hw_pred) + offset_pred_tensor = tf.constant(offset_pred) + + detection_scores, y_indices, x_indices, channel_indices = ( + cnma.top_k_feature_map_locations( + class_pred_tensor, max_pool_kernel_size=3, k=2)) + + boxes, classes, scores, num_dets = cnma.prediction_tensors_to_boxes( + detection_scores, y_indices, x_indices, channel_indices, + hw_pred_tensor, offset_pred_tensor) + return boxes, classes, scores, num_dets + + boxes, classes, scores, num_dets = self.execute(graph_fn, []) + + np.testing.assert_array_equal(num_dets, [2, 2, 2]) + + np.testing.assert_allclose( + [[-9, -8, 31, 52], [25, 35, 75, 85]], boxes[0]) + np.testing.assert_allclose( + [[96, 98, 106, 108], [96, 98, 106, 108]], boxes[1]) + np.testing.assert_allclose( + [[69.5, 74.5, 90.5, 99.5], [40, 75, 80, 105]], boxes[2]) + + np.testing.assert_array_equal(classes[0], [1, 0]) + np.testing.assert_array_equal(classes[1], [2, 1]) + np.testing.assert_array_equal(classes[2], [0, 4]) + + np.testing.assert_allclose(scores[0], [.7, .55]) + np.testing.assert_allclose(scores[1][:1], [.9]) + np.testing.assert_allclose(scores[2], [1., .8]) + + def test_keypoint_candidate_prediction(self): + keypoint_heatmap_np = np.zeros((2, 3, 3, 2), dtype=np.float32) + keypoint_heatmap_np[0, 0, 0, 0] = 1.0 + keypoint_heatmap_np[0, 2, 1, 0] = 0.7 + keypoint_heatmap_np[0, 1, 1, 0] = 0.6 + keypoint_heatmap_np[0, 0, 2, 1] = 0.7 + keypoint_heatmap_np[0, 1, 1, 1] = 0.3 # Filtered by low score. + keypoint_heatmap_np[0, 2, 2, 1] = 0.2 + keypoint_heatmap_np[1, 1, 0, 0] = 0.6 + keypoint_heatmap_np[1, 2, 1, 0] = 0.5 + keypoint_heatmap_np[1, 0, 0, 0] = 0.4 + keypoint_heatmap_np[1, 0, 0, 1] = 1.0 + keypoint_heatmap_np[1, 0, 1, 1] = 0.9 + keypoint_heatmap_np[1, 2, 0, 1] = 0.8 + + keypoint_heatmap_offsets_np = np.zeros((2, 3, 3, 2), dtype=np.float32) + keypoint_heatmap_offsets_np[0, 0, 0] = [0.5, 0.25] + keypoint_heatmap_offsets_np[0, 2, 1] = [-0.25, 0.5] + keypoint_heatmap_offsets_np[0, 1, 1] = [0.0, 0.0] + keypoint_heatmap_offsets_np[0, 0, 2] = [1.0, 0.0] + keypoint_heatmap_offsets_np[0, 2, 2] = [1.0, 1.0] + keypoint_heatmap_offsets_np[1, 1, 0] = [0.25, 0.5] + keypoint_heatmap_offsets_np[1, 2, 1] = [0.5, 0.0] + keypoint_heatmap_offsets_np[1, 0, 0] = [0.0, -0.5] + keypoint_heatmap_offsets_np[1, 0, 1] = [0.5, -0.5] + keypoint_heatmap_offsets_np[1, 2, 0] = [-1.0, -0.5] + + def graph_fn(): + keypoint_heatmap = tf.constant(keypoint_heatmap_np, dtype=tf.float32) + keypoint_heatmap_offsets = tf.constant( + keypoint_heatmap_offsets_np, dtype=tf.float32) + + keypoint_cands, keypoint_scores, num_keypoint_candidates = ( + cnma.prediction_tensors_to_keypoint_candidates( + keypoint_heatmap, + keypoint_heatmap_offsets, + keypoint_score_threshold=0.5, + max_pool_kernel_size=1, + max_candidates=2)) + return keypoint_cands, keypoint_scores, num_keypoint_candidates + + (keypoint_cands, keypoint_scores, + num_keypoint_candidates) = self.execute(graph_fn, []) + + expected_keypoint_candidates = [ + [ # Example 0. + [[0.5, 0.25], [1.0, 2.0]], # Keypoint 1. + [[1.75, 1.5], [1.0, 1.0]], # Keypoint 2. + ], + [ # Example 1. + [[1.25, 0.5], [0.0, -0.5]], # Keypoint 1. + [[2.5, 1.0], [0.5, 0.5]], # Keypoint 2. + ], + ] + expected_keypoint_scores = [ + [ # Example 0. + [1.0, 0.7], # Keypoint 1. + [0.7, 0.3], # Keypoint 2. + ], + [ # Example 1. + [0.6, 1.0], # Keypoint 1. + [0.5, 0.9], # Keypoint 2. + ], + ] + expected_num_keypoint_candidates = [ + [2, 1], + [2, 2] + ] + np.testing.assert_allclose(expected_keypoint_candidates, keypoint_cands) + np.testing.assert_allclose(expected_keypoint_scores, keypoint_scores) + np.testing.assert_array_equal(expected_num_keypoint_candidates, + num_keypoint_candidates) + + def test_keypoint_candidate_prediction_per_keypoints(self): + keypoint_heatmap_np = np.zeros((2, 3, 3, 2), dtype=np.float32) + keypoint_heatmap_np[0, 0, 0, 0] = 1.0 + keypoint_heatmap_np[0, 2, 1, 0] = 0.7 + keypoint_heatmap_np[0, 1, 1, 0] = 0.6 + keypoint_heatmap_np[0, 0, 2, 1] = 0.7 + keypoint_heatmap_np[0, 1, 1, 1] = 0.3 # Filtered by low score. + keypoint_heatmap_np[0, 2, 2, 1] = 0.2 + keypoint_heatmap_np[1, 1, 0, 0] = 0.6 + keypoint_heatmap_np[1, 2, 1, 0] = 0.5 + keypoint_heatmap_np[1, 0, 0, 0] = 0.4 + keypoint_heatmap_np[1, 0, 0, 1] = 1.0 + keypoint_heatmap_np[1, 0, 1, 1] = 0.9 + keypoint_heatmap_np[1, 2, 0, 1] = 0.8 + + keypoint_heatmap_offsets_np = np.zeros((2, 3, 3, 4), dtype=np.float32) + keypoint_heatmap_offsets_np[0, 0, 0] = [0.5, 0.25, 0.0, 0.0] + keypoint_heatmap_offsets_np[0, 2, 1] = [-0.25, 0.5, 0.0, 0.0] + keypoint_heatmap_offsets_np[0, 1, 1] = [0.0, 0.0, 0.0, 0.0] + keypoint_heatmap_offsets_np[0, 0, 2] = [0.0, 0.0, 1.0, 0.0] + keypoint_heatmap_offsets_np[0, 2, 2] = [0.0, 0.0, 1.0, 1.0] + keypoint_heatmap_offsets_np[1, 1, 0] = [0.25, 0.5, 0.0, 0.0] + keypoint_heatmap_offsets_np[1, 2, 1] = [0.5, 0.0, 0.0, 0.0] + keypoint_heatmap_offsets_np[1, 0, 0] = [0.0, 0.0, 0.0, -0.5] + keypoint_heatmap_offsets_np[1, 0, 1] = [0.0, 0.0, 0.5, -0.5] + keypoint_heatmap_offsets_np[1, 2, 0] = [0.0, 0.0, -1.0, -0.5] + + def graph_fn(): + keypoint_heatmap = tf.constant(keypoint_heatmap_np, dtype=tf.float32) + keypoint_heatmap_offsets = tf.constant( + keypoint_heatmap_offsets_np, dtype=tf.float32) + + keypoint_cands, keypoint_scores, num_keypoint_candidates = ( + cnma.prediction_tensors_to_keypoint_candidates( + keypoint_heatmap, + keypoint_heatmap_offsets, + keypoint_score_threshold=0.5, + max_pool_kernel_size=1, + max_candidates=2)) + return keypoint_cands, keypoint_scores, num_keypoint_candidates + + (keypoint_cands, keypoint_scores, + num_keypoint_candidates) = self.execute(graph_fn, []) + + expected_keypoint_candidates = [ + [ # Example 0. + [[0.5, 0.25], [1.0, 2.0]], # Candidate 1 of keypoint 1, 2. + [[1.75, 1.5], [1.0, 1.0]], # Candidate 2 of keypoint 1, 2. + ], + [ # Example 1. + [[1.25, 0.5], [0.0, -0.5]], # Candidate 1 of keypoint 1, 2. + [[2.5, 1.0], [0.5, 0.5]], # Candidate 2 of keypoint 1, 2. + ], + ] + expected_keypoint_scores = [ + [ # Example 0. + [1.0, 0.7], # Candidate 1 scores of keypoint 1, 2. + [0.7, 0.3], # Candidate 2 scores of keypoint 1, 2. + ], + [ # Example 1. + [0.6, 1.0], # Candidate 1 scores of keypoint 1, 2. + [0.5, 0.9], # Candidate 2 scores of keypoint 1, 2. + ], + ] + expected_num_keypoint_candidates = [ + [2, 1], + [2, 2] + ] + np.testing.assert_allclose(expected_keypoint_candidates, keypoint_cands) + np.testing.assert_allclose(expected_keypoint_scores, keypoint_scores) + np.testing.assert_array_equal(expected_num_keypoint_candidates, + num_keypoint_candidates) + + def test_regressed_keypoints_at_object_centers(self): + batch_size = 2 + num_keypoints = 5 + num_instances = 6 + regressed_keypoint_feature_map_np = np.random.randn( + batch_size, 10, 10, 2 * num_keypoints).astype(np.float32) + y_indices = np.random.choice(10, (batch_size, num_instances)) + x_indices = np.random.choice(10, (batch_size, num_instances)) + offsets = np.stack([y_indices, x_indices], axis=2).astype(np.float32) + + def graph_fn(): + regressed_keypoint_feature_map = tf.constant( + regressed_keypoint_feature_map_np, dtype=tf.float32) + + gathered_regressed_keypoints = ( + cnma.regressed_keypoints_at_object_centers( + regressed_keypoint_feature_map, + tf.constant(y_indices, dtype=tf.int32), + tf.constant(x_indices, dtype=tf.int32))) + return gathered_regressed_keypoints + + gathered_regressed_keypoints = self.execute(graph_fn, []) + + expected_gathered_keypoints_0 = regressed_keypoint_feature_map_np[ + 0, y_indices[0], x_indices[0], :] + expected_gathered_keypoints_1 = regressed_keypoint_feature_map_np[ + 1, y_indices[1], x_indices[1], :] + expected_gathered_keypoints = np.stack([ + expected_gathered_keypoints_0, + expected_gathered_keypoints_1], axis=0) + expected_gathered_keypoints = np.reshape( + expected_gathered_keypoints, + [batch_size, num_instances, num_keypoints, 2]) + expected_gathered_keypoints += np.expand_dims(offsets, axis=2) + expected_gathered_keypoints = np.reshape( + expected_gathered_keypoints, + [batch_size, num_instances, -1]) + np.testing.assert_allclose(expected_gathered_keypoints, + gathered_regressed_keypoints) + + @parameterized.parameters( + {'candidate_ranking_mode': 'min_distance'}, + {'candidate_ranking_mode': 'score_distance_ratio'}, + ) + def test_refine_keypoints(self, candidate_ranking_mode): + regressed_keypoints_np = np.array( + [ + # Example 0. + [ + [[2.0, 2.0], [6.0, 10.0], [14.0, 7.0]], # Instance 0. + [[0.0, 6.0], [3.0, 3.0], [5.0, 7.0]], # Instance 1. + ], + # Example 1. + [ + [[6.0, 2.0], [0.0, 0.0], [0.1, 0.1]], # Instance 0. + [[6.0, 2.5], [5.0, 5.0], [9.0, 3.0]], # Instance 1. + ], + ], dtype=np.float32) + keypoint_candidates_np = np.array( + [ + # Example 0. + [ + [[2.0, 2.5], [6.0, 10.5], [4.0, 7.0]], # Candidate 0. + [[1.0, 8.0], [0.0, 0.0], [2.0, 2.0]], # Candidate 1. + [[0.0, 0.0], [0.0, 0.0], [0.0, 0.0]], # Candidate 2. + ], + # Example 1. + [ + [[6.0, 1.5], [0.1, 0.4], [0.0, 0.0]], # Candidate 0. + [[1.0, 4.0], [0.0, 0.3], [0.0, 0.0]], # Candidate 1. + [[0.0, 0.0], [0.1, 0.3], [0.0, 0.0]], # Candidate 2. + ] + ], dtype=np.float32) + keypoint_scores_np = np.array( + [ + # Example 0. + [ + [0.8, 0.9, 1.0], # Candidate 0. + [0.6, 0.1, 0.9], # Candidate 1. + [0.0, 0.0, 0.0], # Candidate 1. + ], + # Example 1. + [ + [0.7, 0.3, 0.0], # Candidate 0. + [0.6, 0.1, 0.0], # Candidate 1. + [0.0, 0.28, 0.0], # Candidate 1. + ] + ], dtype=np.float32) + num_keypoints_candidates_np = np.array( + [ + # Example 0. + [2, 2, 2], + # Example 1. + [2, 3, 0], + ], dtype=np.int32) + unmatched_keypoint_score = 0.1 + + def graph_fn(): + regressed_keypoints = tf.constant( + regressed_keypoints_np, dtype=tf.float32) + keypoint_candidates = tf.constant( + keypoint_candidates_np, dtype=tf.float32) + keypoint_scores = tf.constant(keypoint_scores_np, dtype=tf.float32) + num_keypoint_candidates = tf.constant(num_keypoints_candidates_np, + dtype=tf.int32) + refined_keypoints, refined_scores = cnma.refine_keypoints( + regressed_keypoints, keypoint_candidates, keypoint_scores, + num_keypoint_candidates, bboxes=None, + unmatched_keypoint_score=unmatched_keypoint_score, + box_scale=1.2, candidate_search_scale=0.3, + candidate_ranking_mode=candidate_ranking_mode) + return refined_keypoints, refined_scores + + refined_keypoints, refined_scores = self.execute(graph_fn, []) + + if candidate_ranking_mode == 'min_distance': + expected_refined_keypoints = np.array( + [ + # Example 0. + [ + [[2.0, 2.5], [6.0, 10.5], [14.0, 7.0]], # Instance 0. + [[0.0, 6.0], [3.0, 3.0], [4.0, 7.0]], # Instance 1. + ], + # Example 1. + [ + [[6.0, 1.5], [0.0, 0.3], [0.1, 0.1]], # Instance 0. + [[6.0, 2.5], [5.0, 5.0], [9.0, 3.0]], # Instance 1. + ], + ], dtype=np.float32) + expected_refined_scores = np.array( + [ + # Example 0. + [ + [0.8, 0.9, unmatched_keypoint_score], # Instance 0. + [unmatched_keypoint_score, # Instance 1. + unmatched_keypoint_score, 1.0], + ], + # Example 1. + [ + [0.7, 0.1, unmatched_keypoint_score], # Instance 0. + [unmatched_keypoint_score, # Instance 1. + 0.1, unmatched_keypoint_score], + ], + ], dtype=np.float32) + else: + expected_refined_keypoints = np.array( + [ + # Example 0. + [ + [[2.0, 2.5], [6.0, 10.5], [14.0, 7.0]], # Instance 0. + [[0.0, 6.0], [3.0, 3.0], [4.0, 7.0]], # Instance 1. + ], + # Example 1. + [ + [[6.0, 1.5], [0.1, 0.3], [0.1, 0.1]], # Instance 0. + [[6.0, 2.5], [5.0, 5.0], [9.0, 3.0]], # Instance 1. + ], + ], dtype=np.float32) + expected_refined_scores = np.array( + [ + # Example 0. + [ + [0.8, 0.9, unmatched_keypoint_score], # Instance 0. + [unmatched_keypoint_score, # Instance 1. + unmatched_keypoint_score, 1.0], + ], + # Example 1. + [ + [0.7, 0.28, unmatched_keypoint_score], # Instance 0. + [unmatched_keypoint_score, # Instance 1. + 0.1, unmatched_keypoint_score], + ], + ], dtype=np.float32) + + np.testing.assert_allclose(expected_refined_keypoints, refined_keypoints) + np.testing.assert_allclose(expected_refined_scores, refined_scores) + + def test_refine_keypoints_with_bboxes(self): + regressed_keypoints_np = np.array( + [ + # Example 0. + [ + [[2.0, 2.0], [6.0, 10.0], [14.0, 7.0]], # Instance 0. + [[0.0, 6.0], [3.0, 3.0], [5.0, 7.0]], # Instance 1. + ], + # Example 1. + [ + [[6.0, 2.0], [0.0, 0.0], [0.1, 0.1]], # Instance 0. + [[6.0, 2.5], [5.0, 5.0], [9.0, 3.0]], # Instance 1. + ], + ], dtype=np.float32) + keypoint_candidates_np = np.array( + [ + # Example 0. + [ + [[2.0, 2.5], [6.0, 10.5], [4.0, 7.0]], # Candidate 0. + [[1.0, 8.0], [0.0, 0.0], [2.0, 2.0]], # Candidate 1. + ], + # Example 1. + [ + [[6.0, 1.5], [5.0, 5.0], [0.0, 0.0]], # Candidate 0. + [[1.0, 4.0], [0.0, 0.3], [0.0, 0.0]], # Candidate 1. + ] + ], dtype=np.float32) + keypoint_scores_np = np.array( + [ + # Example 0. + [ + [0.8, 0.9, 1.0], # Candidate 0. + [0.6, 0.1, 0.9], # Candidate 1. + ], + # Example 1. + [ + [0.7, 0.4, 0.0], # Candidate 0. + [0.6, 0.1, 0.0], # Candidate 1. + ] + ], dtype=np.float32) + num_keypoints_candidates_np = np.array( + [ + # Example 0. + [2, 2, 2], + # Example 1. + [2, 2, 0], + ], dtype=np.int32) + bboxes_np = np.array( + [ + # Example 0. + [ + [2.0, 2.0, 14.0, 10.0], # Instance 0. + [0.0, 3.0, 5.0, 7.0], # Instance 1. + ], + # Example 1. + [ + [0.0, 0.0, 6.0, 2.0], # Instance 0. + [5.0, 1.4, 9.0, 5.0], # Instance 1. + ], + ], dtype=np.float32) + unmatched_keypoint_score = 0.1 + + def graph_fn(): + regressed_keypoints = tf.constant( + regressed_keypoints_np, dtype=tf.float32) + keypoint_candidates = tf.constant( + keypoint_candidates_np, dtype=tf.float32) + keypoint_scores = tf.constant(keypoint_scores_np, dtype=tf.float32) + num_keypoint_candidates = tf.constant(num_keypoints_candidates_np, + dtype=tf.int32) + bboxes = tf.constant(bboxes_np, dtype=tf.float32) + refined_keypoints, refined_scores = cnma.refine_keypoints( + regressed_keypoints, keypoint_candidates, keypoint_scores, + num_keypoint_candidates, bboxes=bboxes, + unmatched_keypoint_score=unmatched_keypoint_score, + box_scale=1.0, candidate_search_scale=0.3) + return refined_keypoints, refined_scores + + refined_keypoints, refined_scores = self.execute(graph_fn, []) + + expected_refined_keypoints = np.array( + [ + # Example 0. + [ + [[2.0, 2.5], [6.0, 10.0], [14.0, 7.0]], # Instance 0. + [[0.0, 6.0], [3.0, 3.0], [4.0, 7.0]], # Instance 1. + ], + # Example 1. + [ + [[6.0, 1.5], [0.0, 0.3], [0.1, 0.1]], # Instance 0. + [[6.0, 1.5], [5.0, 5.0], [9.0, 3.0]], # Instance 1. + ], + ], dtype=np.float32) + expected_refined_scores = np.array( + [ + # Example 0. + [ + [0.8, unmatched_keypoint_score, # Instance 0. + unmatched_keypoint_score], + [unmatched_keypoint_score, # Instance 1. + unmatched_keypoint_score, 1.0], + ], + # Example 1. + [ + [0.7, 0.1, unmatched_keypoint_score], # Instance 0. + [0.7, 0.4, unmatched_keypoint_score], # Instance 1. + ], + ], dtype=np.float32) + + np.testing.assert_allclose(expected_refined_keypoints, refined_keypoints) + np.testing.assert_allclose(expected_refined_scores, refined_scores) + + def test_pad_to_full_keypoint_dim(self): + batch_size = 4 + num_instances = 8 + num_keypoints = 2 + keypoint_inds = [1, 3] + num_total_keypoints = 5 + + kpt_coords_np = np.random.randn(batch_size, num_instances, num_keypoints, 2) + kpt_scores_np = np.random.randn(batch_size, num_instances, num_keypoints) + + def graph_fn(): + kpt_coords = tf.constant(kpt_coords_np) + kpt_scores = tf.constant(kpt_scores_np) + kpt_coords_padded, kpt_scores_padded = ( + cnma._pad_to_full_keypoint_dim( + kpt_coords, kpt_scores, keypoint_inds, num_total_keypoints)) + return kpt_coords_padded, kpt_scores_padded + + kpt_coords_padded, kpt_scores_padded = self.execute(graph_fn, []) + + self.assertAllEqual([batch_size, num_instances, num_total_keypoints, 2], + kpt_coords_padded.shape) + self.assertAllEqual([batch_size, num_instances, num_total_keypoints], + kpt_scores_padded.shape) + + for i, kpt_ind in enumerate(keypoint_inds): + np.testing.assert_allclose(kpt_coords_np[:, :, i, :], + kpt_coords_padded[:, :, kpt_ind, :]) + np.testing.assert_allclose(kpt_scores_np[:, :, i], + kpt_scores_padded[:, :, kpt_ind]) + + def test_pad_to_full_instance_dim(self): + batch_size = 4 + max_instances = 8 + num_keypoints = 6 + num_instances = 2 + instance_inds = [1, 3] + + kpt_coords_np = np.random.randn(batch_size, num_instances, num_keypoints, 2) + kpt_scores_np = np.random.randn(batch_size, num_instances, num_keypoints) + + def graph_fn(): + kpt_coords = tf.constant(kpt_coords_np) + kpt_scores = tf.constant(kpt_scores_np) + kpt_coords_padded, kpt_scores_padded = ( + cnma._pad_to_full_instance_dim( + kpt_coords, kpt_scores, instance_inds, max_instances)) + return kpt_coords_padded, kpt_scores_padded + + kpt_coords_padded, kpt_scores_padded = self.execute(graph_fn, []) + + self.assertAllEqual([batch_size, max_instances, num_keypoints, 2], + kpt_coords_padded.shape) + self.assertAllEqual([batch_size, max_instances, num_keypoints], + kpt_scores_padded.shape) + + for i, inst_ind in enumerate(instance_inds): + np.testing.assert_allclose(kpt_coords_np[:, i, :, :], + kpt_coords_padded[:, inst_ind, :, :]) + np.testing.assert_allclose(kpt_scores_np[:, i, :], + kpt_scores_padded[:, inst_ind, :]) + + +# Common parameters for setting up testing examples across tests. +_NUM_CLASSES = 10 +_KEYPOINT_INDICES = [0, 1, 2, 3] +_NUM_KEYPOINTS = len(_KEYPOINT_INDICES) +_TASK_NAME = 'human_pose' + + +def get_fake_center_params(): + """Returns the fake object center parameter namedtuple.""" + return cnma.ObjectCenterParams( + classification_loss=losses.WeightedSigmoidClassificationLoss(), + object_center_loss_weight=1.0, + min_box_overlap_iou=1.0, + max_box_predictions=5, + use_labeled_classes=False) + + +def get_fake_od_params(): + """Returns the fake object detection parameter namedtuple.""" + return cnma.ObjectDetectionParams( + localization_loss=losses.L1LocalizationLoss(), + offset_loss_weight=1.0, + scale_loss_weight=0.1) + + +def get_fake_kp_params(): + """Returns the fake keypoint estimation parameter namedtuple.""" + return cnma.KeypointEstimationParams( + task_name=_TASK_NAME, + class_id=1, + keypoint_indices=_KEYPOINT_INDICES, + keypoint_std_dev=[0.00001] * len(_KEYPOINT_INDICES), + classification_loss=losses.WeightedSigmoidClassificationLoss(), + localization_loss=losses.L1LocalizationLoss(), + keypoint_candidate_score_threshold=0.1) + + +def get_fake_mask_params(): + """Returns the fake mask estimation parameter namedtuple.""" + return cnma.MaskParams( + classification_loss=losses.WeightedSoftmaxClassificationLoss(), + task_loss_weight=1.0, + mask_height=4, + mask_width=4) + + +def build_center_net_meta_arch(build_resnet=False): + """Builds the CenterNet meta architecture.""" + if build_resnet: + feature_extractor = ( + center_net_resnet_feature_extractor.CenterNetResnetFeatureExtractor( + 'resnet_v2_101')) + else: + feature_extractor = DummyFeatureExtractor( + channel_means=(1.0, 2.0, 3.0), + channel_stds=(10., 20., 30.), + bgr_ordering=False, + num_feature_outputs=2, + stride=4) + image_resizer_fn = functools.partial( + preprocessor.resize_to_range, + min_dimension=128, + max_dimension=128, + pad_to_max_dimesnion=True) + return cnma.CenterNetMetaArch( + is_training=True, + add_summaries=False, + num_classes=_NUM_CLASSES, + feature_extractor=feature_extractor, + image_resizer_fn=image_resizer_fn, + object_center_params=get_fake_center_params(), + object_detection_params=get_fake_od_params(), + keypoint_params_dict={_TASK_NAME: get_fake_kp_params()}, + mask_params=get_fake_mask_params()) + + +def _logit(p): + return np.log( + (p + np.finfo(np.float32).eps) / (1 - p + np.finfo(np.float32).eps)) + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class CenterNetMetaArchLibTest(test_case.TestCase): + """Test for CenterNet meta architecture related functions.""" + + def test_get_keypoint_name(self): + self.assertEqual('human_pose/keypoint_offset', + cnma.get_keypoint_name('human_pose', 'keypoint_offset')) + + def test_get_num_instances_from_weights(self): + weight1 = tf.constant([0.0, 0.0, 0.0], dtype=tf.float32) + weight2 = tf.constant([0.5, 0.9, 0.0], dtype=tf.float32) + weight3 = tf.constant([0.0, 0.0, 1.0], dtype=tf.float32) + + def graph_fn_1(): + # Total of three elements with non-zero values. + num_instances = cnma.get_num_instances_from_weights( + [weight1, weight2, weight3]) + return num_instances + num_instances = self.execute(graph_fn_1, []) + self.assertAlmostEqual(3, num_instances) + + # No non-zero value in the weights. Return minimum value: 1. + def graph_fn_2(): + # Total of three elements with non-zero values. + num_instances = cnma.get_num_instances_from_weights([weight1, weight1]) + return num_instances + num_instances = self.execute(graph_fn_2, []) + self.assertAlmostEqual(1, num_instances) + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class CenterNetMetaArchTest(test_case.TestCase, parameterized.TestCase): + """Tests for the CenterNet meta architecture.""" + + def test_construct_prediction_heads(self): + model = build_center_net_meta_arch() + fake_feature_map = np.zeros((4, 128, 128, 8)) + + # Check the dictionary contains expected keys and corresponding heads with + # correct dimensions. + # "object center" head: + output = model._prediction_head_dict[cnma.OBJECT_CENTER][-1]( + fake_feature_map) + self.assertEqual((4, 128, 128, _NUM_CLASSES), output.shape) + + # "object scale" (height/width) head: + output = model._prediction_head_dict[cnma.BOX_SCALE][-1](fake_feature_map) + self.assertEqual((4, 128, 128, 2), output.shape) + + # "object offset" head: + output = model._prediction_head_dict[cnma.BOX_OFFSET][-1](fake_feature_map) + self.assertEqual((4, 128, 128, 2), output.shape) + + # "keypoint offset" head: + output = model._prediction_head_dict[ + cnma.get_keypoint_name(_TASK_NAME, cnma.KEYPOINT_OFFSET)][-1]( + fake_feature_map) + self.assertEqual((4, 128, 128, 2), output.shape) + + # "keypoint heatmap" head: + output = model._prediction_head_dict[cnma.get_keypoint_name( + _TASK_NAME, cnma.KEYPOINT_HEATMAP)][-1]( + fake_feature_map) + self.assertEqual((4, 128, 128, _NUM_KEYPOINTS), output.shape) + + # "keypoint regression" head: + output = model._prediction_head_dict[cnma.get_keypoint_name( + _TASK_NAME, cnma.KEYPOINT_REGRESSION)][-1]( + fake_feature_map) + self.assertEqual((4, 128, 128, 2 * _NUM_KEYPOINTS), output.shape) + + # "mask" head: + output = model._prediction_head_dict[cnma.SEGMENTATION_HEATMAP][-1]( + fake_feature_map) + self.assertEqual((4, 128, 128, _NUM_CLASSES), output.shape) + + def test_initialize_target_assigners(self): + model = build_center_net_meta_arch() + assigner_dict = model._initialize_target_assigners( + stride=2, + min_box_overlap_iou=0.7) + + # Check whether the correponding target assigner class is initialized. + # object center target assigner: + self.assertIsInstance(assigner_dict[cnma.OBJECT_CENTER], + cn_assigner.CenterNetCenterHeatmapTargetAssigner) + + # object detection target assigner: + self.assertIsInstance(assigner_dict[cnma.DETECTION_TASK], + cn_assigner.CenterNetBoxTargetAssigner) + + # keypoint estimation target assigner: + self.assertIsInstance(assigner_dict[_TASK_NAME], + cn_assigner.CenterNetKeypointTargetAssigner) + + # mask estimation target assigner: + self.assertIsInstance(assigner_dict[cnma.SEGMENTATION_TASK], + cn_assigner.CenterNetMaskTargetAssigner) + + def test_predict(self): + """Test the predict function.""" + + model = build_center_net_meta_arch() + def graph_fn(): + prediction_dict = model.predict(tf.zeros([2, 128, 128, 3]), None) + return prediction_dict + + prediction_dict = self.execute(graph_fn, []) + + self.assertEqual(prediction_dict['preprocessed_inputs'].shape, + (2, 128, 128, 3)) + self.assertEqual(prediction_dict[cnma.OBJECT_CENTER][0].shape, + (2, 32, 32, _NUM_CLASSES)) + self.assertEqual(prediction_dict[cnma.BOX_SCALE][0].shape, + (2, 32, 32, 2)) + self.assertEqual(prediction_dict[cnma.BOX_OFFSET][0].shape, + (2, 32, 32, 2)) + self.assertEqual(prediction_dict[cnma.SEGMENTATION_HEATMAP][0].shape, + (2, 32, 32, _NUM_CLASSES)) + + def test_loss(self): + """Test the loss function.""" + groundtruth_dict = get_fake_groundtruth_dict(16, 32, 4) + model = build_center_net_meta_arch() + model.provide_groundtruth( + groundtruth_boxes_list=groundtruth_dict[fields.BoxListFields.boxes], + groundtruth_weights_list=groundtruth_dict[fields.BoxListFields.weights], + groundtruth_classes_list=groundtruth_dict[fields.BoxListFields.classes], + groundtruth_keypoints_list=groundtruth_dict[ + fields.BoxListFields.keypoints], + groundtruth_masks_list=groundtruth_dict[ + fields.BoxListFields.masks]) + + prediction_dict = get_fake_prediction_dict( + input_height=16, input_width=32, stride=4) + + def graph_fn(): + loss_dict = model.loss(prediction_dict, + tf.constant([[16, 24, 3], [16, 24, 3]])) + return loss_dict + + loss_dict = self.execute(graph_fn, []) + + # The prediction and groundtruth are curated to produce very low loss. + self.assertGreater( + 0.01, loss_dict['%s/%s' % (cnma.LOSS_KEY_PREFIX, cnma.OBJECT_CENTER)]) + self.assertGreater( + 0.01, loss_dict['%s/%s' % (cnma.LOSS_KEY_PREFIX, cnma.BOX_SCALE)]) + self.assertGreater( + 0.01, loss_dict['%s/%s' % (cnma.LOSS_KEY_PREFIX, cnma.BOX_OFFSET)]) + self.assertGreater( + 0.01, + loss_dict['%s/%s' % + (cnma.LOSS_KEY_PREFIX, + cnma.get_keypoint_name(_TASK_NAME, cnma.KEYPOINT_HEATMAP))]) + self.assertGreater( + 0.01, + loss_dict['%s/%s' % + (cnma.LOSS_KEY_PREFIX, + cnma.get_keypoint_name(_TASK_NAME, cnma.KEYPOINT_OFFSET))]) + self.assertGreater( + 0.01, loss_dict['%s/%s' % (cnma.LOSS_KEY_PREFIX, + cnma.get_keypoint_name( + _TASK_NAME, cnma.KEYPOINT_REGRESSION))]) + self.assertGreater( + 0.01, loss_dict['%s/%s' % (cnma.LOSS_KEY_PREFIX, + cnma.SEGMENTATION_HEATMAP)]) + + @parameterized.parameters( + {'target_class_id': 1}, + {'target_class_id': 2}, + ) + def test_postprocess(self, target_class_id): + """Test the postprocess function.""" + model = build_center_net_meta_arch() + max_detection = model._center_params.max_box_predictions + num_keypoints = len(model._kp_params_dict[_TASK_NAME].keypoint_indices) + + class_center = np.zeros((1, 32, 32, 10), dtype=np.float32) + height_width = np.zeros((1, 32, 32, 2), dtype=np.float32) + offset = np.zeros((1, 32, 32, 2), dtype=np.float32) + keypoint_heatmaps = np.zeros((1, 32, 32, num_keypoints), dtype=np.float32) + keypoint_offsets = np.zeros((1, 32, 32, 2), dtype=np.float32) + keypoint_regression = np.random.randn(1, 32, 32, num_keypoints * 2) + + class_probs = np.zeros(10) + class_probs[target_class_id] = _logit(0.75) + class_center[0, 16, 16] = class_probs + height_width[0, 16, 16] = [5, 10] + offset[0, 16, 16] = [.25, .5] + keypoint_regression[0, 16, 16] = [ + -1., -1., + -1., 1., + 1., -1., + 1., 1.] + keypoint_heatmaps[0, 14, 14, 0] = _logit(0.9) + keypoint_heatmaps[0, 14, 18, 1] = _logit(0.9) + keypoint_heatmaps[0, 18, 14, 2] = _logit(0.9) + keypoint_heatmaps[0, 18, 18, 3] = _logit(0.05) # Note the low score. + + segmentation_heatmap = np.zeros((1, 32, 32, 10), dtype=np.float32) + segmentation_heatmap[:, 14:18, 14:18, target_class_id] = 1.0 + segmentation_heatmap = _logit(segmentation_heatmap) + + class_center = tf.constant(class_center) + height_width = tf.constant(height_width) + offset = tf.constant(offset) + keypoint_heatmaps = tf.constant(keypoint_heatmaps, dtype=tf.float32) + keypoint_offsets = tf.constant(keypoint_offsets, dtype=tf.float32) + keypoint_regression = tf.constant(keypoint_regression, dtype=tf.float32) + segmentation_heatmap = tf.constant(segmentation_heatmap, dtype=tf.float32) + + prediction_dict = { + cnma.OBJECT_CENTER: [class_center], + cnma.BOX_SCALE: [height_width], + cnma.BOX_OFFSET: [offset], + cnma.get_keypoint_name(_TASK_NAME, cnma.KEYPOINT_HEATMAP): + [keypoint_heatmaps], + cnma.get_keypoint_name(_TASK_NAME, cnma.KEYPOINT_OFFSET): + [keypoint_offsets], + cnma.get_keypoint_name(_TASK_NAME, cnma.KEYPOINT_REGRESSION): + [keypoint_regression], + cnma.SEGMENTATION_HEATMAP: [segmentation_heatmap], + } + + def graph_fn(): + detections = model.postprocess(prediction_dict, + tf.constant([[128, 128, 3]])) + return detections + + detections = self.execute_cpu(graph_fn, []) + + self.assertAllClose(detections['detection_boxes'][0, 0], + np.array([55, 46, 75, 86]) / 128.0) + self.assertAllClose(detections['detection_scores'][0], + [.75, .5, .5, .5, .5]) + self.assertEqual(detections['detection_classes'][0, 0], target_class_id) + self.assertEqual(detections['num_detections'], [5]) + self.assertAllEqual([1, max_detection, num_keypoints, 2], + detections['detection_keypoints'].shape) + self.assertAllEqual([1, max_detection, num_keypoints], + detections['detection_keypoint_scores'].shape) + self.assertAllEqual([1, max_detection, 4, 4], + detections['detection_masks'].shape) + + # There should be some section of the first mask (correspond to the only + # detection) with non-zero mask values. + self.assertGreater(np.sum(detections['detection_masks'][0, 0, :, :] > 0), 0) + self.assertAllEqual( + detections['detection_masks'][0, 1:, :, :], + np.zeros_like(detections['detection_masks'][0, 1:, :, :])) + + if target_class_id == 1: + expected_kpts_for_obj_0 = np.array( + [[14., 14.], [14., 18.], [18., 14.], [17., 17.]]) / 32. + expected_kpt_scores_for_obj_0 = np.array( + [0.9, 0.9, 0.9, cnma.UNMATCHED_KEYPOINT_SCORE]) + np.testing.assert_allclose(detections['detection_keypoints'][0][0], + expected_kpts_for_obj_0, rtol=1e-6) + np.testing.assert_allclose(detections['detection_keypoint_scores'][0][0], + expected_kpt_scores_for_obj_0, rtol=1e-6) + else: + # All keypoint outputs should be zeros. + np.testing.assert_allclose( + detections['detection_keypoints'][0][0], + np.zeros([num_keypoints, 2], np.float), + rtol=1e-6) + np.testing.assert_allclose( + detections['detection_keypoint_scores'][0][0], + np.zeros([num_keypoints], np.float), + rtol=1e-6) + + def test_get_instance_indices(self): + classes = tf.constant([[0, 1, 2, 0], [2, 1, 2, 2]], dtype=tf.int32) + num_detections = tf.constant([1, 3], dtype=tf.int32) + batch_index = 1 + class_id = 2 + model = build_center_net_meta_arch() + valid_indices = model._get_instance_indices( + classes, num_detections, batch_index, class_id) + self.assertAllEqual(valid_indices.numpy(), [0, 2]) + + +def get_fake_prediction_dict(input_height, input_width, stride): + """Prepares the fake prediction dictionary.""" + output_height = input_height // stride + output_width = input_width // stride + object_center = np.zeros((2, output_height, output_width, _NUM_CLASSES), + dtype=np.float32) + # Box center: + # y: floor((0.54 + 0.56) / 2 * 4) = 2, + # x: floor((0.54 + 0.56) / 2 * 8) = 4 + object_center[0, 2, 4, 1] = 1.0 + object_center = _logit(object_center) + + # Box size: + # height: (0.56 - 0.54) * 4 = 0.08 + # width: (0.56 - 0.54) * 8 = 0.16 + object_scale = np.zeros((2, output_height, output_width, 2), dtype=np.float32) + object_scale[0, 2, 4] = 0.08, 0.16 + + # Box center offset coordinate (0.55, 0.55): + # y-offset: 0.55 * 4 - 2 = 0.2 + # x-offset: 0.55 * 8 - 4 = 0.4 + object_offset = np.zeros((2, output_height, output_width, 2), + dtype=np.float32) + object_offset[0, 2, 4] = 0.2, 0.4 + + keypoint_heatmap = np.zeros((2, output_height, output_width, _NUM_KEYPOINTS), + dtype=np.float32) + keypoint_heatmap[0, 2, 4, 1] = 1.0 + keypoint_heatmap[0, 2, 4, 3] = 1.0 + keypoint_heatmap = _logit(keypoint_heatmap) + + keypoint_offset = np.zeros((2, output_height, output_width, 2), + dtype=np.float32) + keypoint_offset[0, 2, 4] = 0.2, 0.4 + + keypoint_regression = np.zeros( + (2, output_height, output_width, 2 * _NUM_KEYPOINTS), dtype=np.float32) + keypoint_regression[0, 2, 4] = 0.0, 0.0, 0.2, 0.4, 0.0, 0.0, 0.2, 0.4 + + mask_heatmap = np.zeros((2, output_height, output_width, _NUM_CLASSES), + dtype=np.float32) + mask_heatmap[0, 2, 4, 1] = 1.0 + mask_heatmap = _logit(mask_heatmap) + + prediction_dict = { + 'preprocessed_inputs': + tf.zeros((2, input_height, input_width, 3)), + cnma.OBJECT_CENTER: [ + tf.constant(object_center), + tf.constant(object_center) + ], + cnma.BOX_SCALE: [ + tf.constant(object_scale), + tf.constant(object_scale) + ], + cnma.BOX_OFFSET: [ + tf.constant(object_offset), + tf.constant(object_offset) + ], + cnma.get_keypoint_name(_TASK_NAME, cnma.KEYPOINT_HEATMAP): [ + tf.constant(keypoint_heatmap), + tf.constant(keypoint_heatmap) + ], + cnma.get_keypoint_name(_TASK_NAME, cnma.KEYPOINT_OFFSET): [ + tf.constant(keypoint_offset), + tf.constant(keypoint_offset) + ], + cnma.get_keypoint_name(_TASK_NAME, cnma.KEYPOINT_REGRESSION): [ + tf.constant(keypoint_regression), + tf.constant(keypoint_regression) + ], + cnma.SEGMENTATION_HEATMAP: [ + tf.constant(mask_heatmap), + tf.constant(mask_heatmap) + ] + } + return prediction_dict + + +def get_fake_groundtruth_dict(input_height, input_width, stride): + """Prepares the fake groundtruth dictionary.""" + # A small box with center at (0.55, 0.55). + boxes = [ + tf.constant([[0.54, 0.54, 0.56, 0.56]]), + tf.constant([[0.0, 0.0, 0.5, 0.5]]), + ] + classes = [ + tf.one_hot([1], depth=_NUM_CLASSES), + tf.one_hot([0], depth=_NUM_CLASSES), + ] + weights = [ + tf.constant([1.]), + tf.constant([0.]), + ] + keypoints = [ + tf.tile( + tf.expand_dims( + tf.constant([[float('nan'), 0.55, + float('nan'), 0.55, 0.55, 0.0]]), + axis=2), + multiples=[1, 1, 2]), + tf.tile( + tf.expand_dims( + tf.constant([[float('nan'), 0.55, + float('nan'), 0.55, 0.55, 0.0]]), + axis=2), + multiples=[1, 1, 2]), + ] + labeled_classes = [ + tf.one_hot([1], depth=_NUM_CLASSES) + tf.one_hot([2], depth=_NUM_CLASSES), + tf.one_hot([0], depth=_NUM_CLASSES) + tf.one_hot([1], depth=_NUM_CLASSES), + ] + mask = np.zeros((1, input_height, input_width), dtype=np.float32) + mask[0, 8:8+stride, 16:16+stride] = 1 + masks = [ + tf.constant(mask), + tf.zeros_like(mask), + ] + groundtruth_dict = { + fields.BoxListFields.boxes: boxes, + fields.BoxListFields.weights: weights, + fields.BoxListFields.classes: classes, + fields.BoxListFields.keypoints: keypoints, + fields.BoxListFields.masks: masks, + fields.InputDataFields.groundtruth_labeled_classes: labeled_classes, + } + return groundtruth_dict + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class CenterNetMetaComputeLossTest(test_case.TestCase): + """Test for CenterNet loss compuation related functions.""" + + def setUp(self): + self.model = build_center_net_meta_arch() + self.classification_loss_fn = self.model._center_params.classification_loss + self.localization_loss_fn = self.model._od_params.localization_loss + self.true_image_shapes = tf.constant([[16, 24, 3], [16, 24, 3]]) + self.input_height = 16 + self.input_width = 32 + self.stride = 4 + self.per_pixel_weights = self.get_per_pixel_weights(self.true_image_shapes, + self.input_height, + self.input_width, + self.stride) + self.prediction_dict = get_fake_prediction_dict(self.input_height, + self.input_width, + self.stride) + self.model._groundtruth_lists = get_fake_groundtruth_dict( + self.input_height, self.input_width, self.stride) + super(CenterNetMetaComputeLossTest, self).setUp() + + def get_per_pixel_weights(self, true_image_shapes, input_height, input_width, + stride): + output_height, output_width = (input_height // stride, + input_width // stride) + + # TODO(vighneshb) Explore whether using floor here is safe. + output_true_image_shapes = tf.ceil(tf.to_float(true_image_shapes) / stride) + per_pixel_weights = cnma.get_valid_anchor_weights_in_flattened_image( + output_true_image_shapes, output_height, output_width) + per_pixel_weights = tf.expand_dims(per_pixel_weights, 2) + return per_pixel_weights + + def test_compute_object_center_loss(self): + def graph_fn(): + loss = self.model._compute_object_center_loss( + object_center_predictions=self.prediction_dict[cnma.OBJECT_CENTER], + input_height=self.input_height, + input_width=self.input_width, + per_pixel_weights=self.per_pixel_weights) + return loss + + loss = self.execute(graph_fn, []) + + # The prediction and groundtruth are curated to produce very low loss. + self.assertGreater(0.01, loss) + + default_value = self.model._center_params.use_only_known_classes + self.model._center_params = ( + self.model._center_params._replace(use_only_known_classes=True)) + loss = self.model._compute_object_center_loss( + object_center_predictions=self.prediction_dict[cnma.OBJECT_CENTER], + input_height=self.input_height, + input_width=self.input_width, + per_pixel_weights=self.per_pixel_weights) + self.model._center_params = ( + self.model._center_params._replace( + use_only_known_classes=default_value)) + + # The prediction and groundtruth are curated to produce very low loss. + self.assertGreater(0.01, loss) + + def test_compute_box_scale_and_offset_loss(self): + def graph_fn(): + scale_loss, offset_loss = self.model._compute_box_scale_and_offset_loss( + scale_predictions=self.prediction_dict[cnma.BOX_SCALE], + offset_predictions=self.prediction_dict[cnma.BOX_OFFSET], + input_height=self.input_height, + input_width=self.input_width) + return scale_loss, offset_loss + + scale_loss, offset_loss = self.execute(graph_fn, []) + + # The prediction and groundtruth are curated to produce very low loss. + self.assertGreater(0.01, scale_loss) + self.assertGreater(0.01, offset_loss) + + def test_compute_kp_heatmap_loss(self): + def graph_fn(): + loss = self.model._compute_kp_heatmap_loss( + input_height=self.input_height, + input_width=self.input_width, + task_name=_TASK_NAME, + heatmap_predictions=self.prediction_dict[cnma.get_keypoint_name( + _TASK_NAME, cnma.KEYPOINT_HEATMAP)], + classification_loss_fn=self.classification_loss_fn, + per_pixel_weights=self.per_pixel_weights) + return loss + + loss = self.execute(graph_fn, []) + + # The prediction and groundtruth are curated to produce very low loss. + self.assertGreater(0.01, loss) + + def test_compute_kp_offset_loss(self): + def graph_fn(): + loss = self.model._compute_kp_offset_loss( + input_height=self.input_height, + input_width=self.input_width, + task_name=_TASK_NAME, + offset_predictions=self.prediction_dict[cnma.get_keypoint_name( + _TASK_NAME, cnma.KEYPOINT_OFFSET)], + localization_loss_fn=self.localization_loss_fn) + return loss + + loss = self.execute(graph_fn, []) + + # The prediction and groundtruth are curated to produce very low loss. + self.assertGreater(0.01, loss) + + def test_compute_kp_regression_loss(self): + def graph_fn(): + loss = self.model._compute_kp_regression_loss( + input_height=self.input_height, + input_width=self.input_width, + task_name=_TASK_NAME, + regression_predictions=self.prediction_dict[cnma.get_keypoint_name( + _TASK_NAME, cnma.KEYPOINT_REGRESSION,)], + localization_loss_fn=self.localization_loss_fn) + return loss + + loss = self.execute(graph_fn, []) + + # The prediction and groundtruth are curated to produce very low loss. + self.assertGreater(0.01, loss) + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class CenterNetMetaArchRestoreTest(test_case.TestCase): + + def test_restore_map_resnet(self): + """Test restore map for a resnet backbone.""" + + model = build_center_net_meta_arch(build_resnet=True) + restore_map = model.restore_map('classification') + self.assertIsInstance(restore_map['feature_extractor'], tf.keras.Model) + + +class DummyFeatureExtractor(cnma.CenterNetFeatureExtractor): + + def __init__(self, + channel_means, + channel_stds, + bgr_ordering, + num_feature_outputs, + stride): + self._num_feature_outputs = num_feature_outputs + self._stride = stride + super(DummyFeatureExtractor, self).__init__( + channel_means=channel_means, channel_stds=channel_stds, + bgr_ordering=bgr_ordering) + + def predict(self): + pass + + def loss(self): + pass + + def postprocess(self): + pass + + def restore_map(self): + pass + + def call(self, inputs): + batch_size, input_height, input_width, _ = inputs.shape + fake_output = tf.ones([ + batch_size, input_height // self._stride, input_width // self._stride, + 64 + ], dtype=tf.float32) + return [fake_output] * self._num_feature_outputs + + @property + def out_stride(self): + return self._stride + + @property + def num_feature_outputs(self): + return self._num_feature_outputs + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class CenterNetFeatureExtractorTest(test_case.TestCase): + """Test the base feature extractor class.""" + + def test_preprocess(self): + feature_extractor = DummyFeatureExtractor( + channel_means=(1.0, 2.0, 3.0), + channel_stds=(10., 20., 30.), bgr_ordering=False, + num_feature_outputs=2, stride=4) + + img = np.zeros((2, 32, 32, 3)) + img[:, :, :] = 11, 22, 33 + + def graph_fn(): + output = feature_extractor.preprocess(img) + return output + + output = self.execute(graph_fn, []) + self.assertAlmostEqual(output.sum(), 2 * 32 * 32 * 3) + + def test_bgr_ordering(self): + feature_extractor = DummyFeatureExtractor( + channel_means=(0.0, 0.0, 0.0), + channel_stds=(1., 1., 1.), bgr_ordering=True, + num_feature_outputs=2, stride=4) + + img = np.zeros((2, 32, 32, 3), dtype=np.float32) + img[:, :, :] = 1, 2, 3 + + def graph_fn(): + output = feature_extractor.preprocess(img) + return output + + output = self.execute(graph_fn, []) + self.assertAllClose(output[..., 2], 1 * np.ones((2, 32, 32))) + self.assertAllClose(output[..., 1], 2 * np.ones((2, 32, 32))) + self.assertAllClose(output[..., 0], 3 * np.ones((2, 32, 32))) + + def test_default_ordering(self): + feature_extractor = DummyFeatureExtractor( + channel_means=(0.0, 0.0, 0.0), + channel_stds=(1., 1., 1.), bgr_ordering=False, + num_feature_outputs=2, stride=4) + + img = np.zeros((2, 32, 32, 3), dtype=np.float32) + img[:, :, :] = 1, 2, 3 + + def graph_fn(): + output = feature_extractor.preprocess(img) + return output + + output = self.execute(graph_fn, []) + self.assertAllClose(output[..., 0], 1 * np.ones((2, 32, 32))) + self.assertAllClose(output[..., 1], 2 * np.ones((2, 32, 32))) + self.assertAllClose(output[..., 2], 3 * np.ones((2, 32, 32))) + + +if __name__ == '__main__': + tf.enable_v2_behavior() + tf.test.main() diff --git a/research/object_detection/meta_architectures/context_rcnn_lib.py b/research/object_detection/meta_architectures/context_rcnn_lib.py new file mode 100644 index 000000000..902a88c77 --- /dev/null +++ b/research/object_detection/meta_architectures/context_rcnn_lib.py @@ -0,0 +1,224 @@ +# Lint as: python3 +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Library functions for ContextRCNN.""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import tensorflow.compat.v1 as tf +import tf_slim as slim + + +# The negative value used in padding the invalid weights. +_NEGATIVE_PADDING_VALUE = -100000 + + +def filter_weight_value(weights, values, valid_mask): + """Filters weights and values based on valid_mask. + + _NEGATIVE_PADDING_VALUE will be added to invalid elements in the weights to + avoid their contribution in softmax. 0 will be set for the invalid elements in + the values. + + Args: + weights: A float Tensor of shape [batch_size, input_size, context_size]. + values: A float Tensor of shape [batch_size, context_size, + projected_dimension]. + valid_mask: A boolean Tensor of shape [batch_size, context_size]. True means + valid and False means invalid. + + Returns: + weights: A float Tensor of shape [batch_size, input_size, context_size]. + values: A float Tensor of shape [batch_size, context_size, + projected_dimension]. + + Raises: + ValueError: If shape of doesn't match. + """ + w_batch_size, _, w_context_size = weights.shape + v_batch_size, v_context_size, _ = values.shape + m_batch_size, m_context_size = valid_mask.shape + if w_batch_size != v_batch_size or v_batch_size != m_batch_size: + raise ValueError("Please make sure the first dimension of the input" + " tensors are the same.") + + if w_context_size != v_context_size: + raise ValueError("Please make sure the third dimension of weights matches" + " the second dimension of values.") + + if w_context_size != m_context_size: + raise ValueError("Please make sure the third dimension of the weights" + " matches the second dimension of the valid_mask.") + + valid_mask = valid_mask[..., tf.newaxis] + + # Force the invalid weights to be very negative so it won't contribute to + # the softmax. + weights += tf.transpose( + tf.cast(tf.math.logical_not(valid_mask), weights.dtype) * + _NEGATIVE_PADDING_VALUE, + perm=[0, 2, 1]) + + # Force the invalid values to be 0. + values *= tf.cast(valid_mask, values.dtype) + + return weights, values + + +def compute_valid_mask(num_valid_elements, num_elements): + """Computes mask of valid entries within padded context feature. + + Args: + num_valid_elements: A int32 Tensor of shape [batch_size]. + num_elements: An int32 Tensor. + + Returns: + A boolean Tensor of the shape [batch_size, num_elements]. True means + valid and False means invalid. + """ + batch_size = num_valid_elements.shape[0] + element_idxs = tf.range(num_elements, dtype=tf.int32) + batch_element_idxs = tf.tile(element_idxs[tf.newaxis, ...], [batch_size, 1]) + num_valid_elements = num_valid_elements[..., tf.newaxis] + valid_mask = tf.less(batch_element_idxs, num_valid_elements) + return valid_mask + + +def project_features(features, projection_dimension, is_training, normalize): + """Projects features to another feature space. + + Args: + features: A float Tensor of shape [batch_size, features_size, + num_features]. + projection_dimension: A int32 Tensor. + is_training: A boolean Tensor (affecting batch normalization). + normalize: A boolean Tensor. If true, the output features will be l2 + normalized on the last dimension. + + Returns: + A float Tensor of shape [batch, features_size, projection_dimension]. + """ + # TODO(guanhangwu) Figure out a better way of specifying the batch norm + # params. + batch_norm_params = { + "is_training": is_training, + "decay": 0.97, + "epsilon": 0.001, + "center": True, + "scale": True + } + + batch_size, _, num_features = features.shape + features = tf.reshape(features, [-1, num_features]) + projected_features = slim.fully_connected( + features, + num_outputs=projection_dimension, + activation_fn=tf.nn.relu6, + normalizer_fn=slim.batch_norm, + normalizer_params=batch_norm_params) + + projected_features = tf.reshape(projected_features, + [batch_size, -1, projection_dimension]) + + if normalize: + projected_features = tf.math.l2_normalize(projected_features, axis=-1) + + return projected_features + + +def attention_block(input_features, context_features, bottleneck_dimension, + output_dimension, attention_temperature, valid_mask, + is_training): + """Generic attention block. + + Args: + input_features: A float Tensor of shape [batch_size, input_size, + num_input_features]. + context_features: A float Tensor of shape [batch_size, context_size, + num_context_features]. + bottleneck_dimension: A int32 Tensor representing the bottleneck dimension + for intermediate projections. + output_dimension: A int32 Tensor representing the last dimension of the + output feature. + attention_temperature: A float Tensor. It controls the temperature of the + softmax for weights calculation. The formula for calculation as follows: + weights = exp(weights / temperature) / sum(exp(weights / temperature)) + valid_mask: A boolean Tensor of shape [batch_size, context_size]. + is_training: A boolean Tensor (affecting batch normalization). + + Returns: + A float Tensor of shape [batch_size, input_size, output_dimension]. + """ + + with tf.variable_scope("AttentionBlock"): + queries = project_features( + input_features, bottleneck_dimension, is_training, normalize=True) + keys = project_features( + context_features, bottleneck_dimension, is_training, normalize=True) + values = project_features( + context_features, bottleneck_dimension, is_training, normalize=True) + + weights = tf.matmul(queries, keys, transpose_b=True) + + weights, values = filter_weight_value(weights, values, valid_mask) + + weights = tf.nn.softmax(weights / attention_temperature) + + features = tf.matmul(weights, values) + output_features = project_features( + features, output_dimension, is_training, normalize=False) + return output_features + + +def compute_box_context_attention(box_features, context_features, + valid_context_size, bottleneck_dimension, + attention_temperature, is_training): + """Computes the attention feature from the context given a batch of box. + + Args: + box_features: A float Tensor of shape [batch_size, max_num_proposals, + height, width, channels]. It is pooled features from first stage + proposals. + context_features: A float Tensor of shape [batch_size, context_size, + num_context_features]. + valid_context_size: A int32 Tensor of shape [batch_size]. + bottleneck_dimension: A int32 Tensor representing the bottleneck dimension + for intermediate projections. + attention_temperature: A float Tensor. It controls the temperature of the + softmax for weights calculation. The formula for calculation as follows: + weights = exp(weights / temperature) / sum(exp(weights / temperature)) + is_training: A boolean Tensor (affecting batch normalization). + + Returns: + A float Tensor of shape [batch_size, max_num_proposals, 1, 1, channels]. + """ + _, context_size, _ = context_features.shape + valid_mask = compute_valid_mask(valid_context_size, context_size) + + channels = box_features.shape[-1] + # Average pools over height and width dimension so that the shape of + # box_features becomes [batch_size, max_num_proposals, channels]. + box_features = tf.reduce_mean(box_features, [2, 3]) + + output_features = attention_block(box_features, context_features, + bottleneck_dimension, channels.value, + attention_temperature, valid_mask, + is_training) + + # Expands the dimension back to match with the original feature map. + output_features = output_features[:, :, tf.newaxis, tf.newaxis, :] + + return output_features diff --git a/research/object_detection/meta_architectures/context_rcnn_lib_tf1_test.py b/research/object_detection/meta_architectures/context_rcnn_lib_tf1_test.py new file mode 100644 index 000000000..a0b3b848d --- /dev/null +++ b/research/object_detection/meta_architectures/context_rcnn_lib_tf1_test.py @@ -0,0 +1,126 @@ +# Lint as: python3 +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for context_rcnn_lib.""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import unittest +from absl.testing import parameterized +import tensorflow.compat.v1 as tf + +from object_detection.meta_architectures import context_rcnn_lib +from object_detection.utils import test_case +from object_detection.utils import tf_version + +_NEGATIVE_PADDING_VALUE = -100000 + + +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') +class ContextRcnnLibTest(parameterized.TestCase, test_case.TestCase, + tf.test.TestCase): + """Tests for the functions in context_rcnn_lib.""" + + def test_compute_valid_mask(self): + num_elements = tf.constant(3, tf.int32) + num_valid_elementss = tf.constant((1, 2), tf.int32) + valid_mask = context_rcnn_lib.compute_valid_mask(num_valid_elementss, + num_elements) + expected_valid_mask = tf.constant([[1, 0, 0], [1, 1, 0]], tf.float32) + self.assertAllEqual(valid_mask, expected_valid_mask) + + def test_filter_weight_value(self): + weights = tf.ones((2, 3, 2), tf.float32) * 4 + values = tf.ones((2, 2, 4), tf.float32) + valid_mask = tf.constant([[True, True], [True, False]], tf.bool) + + filtered_weights, filtered_values = context_rcnn_lib.filter_weight_value( + weights, values, valid_mask) + expected_weights = tf.constant([[[4, 4], [4, 4], [4, 4]], + [[4, _NEGATIVE_PADDING_VALUE + 4], + [4, _NEGATIVE_PADDING_VALUE + 4], + [4, _NEGATIVE_PADDING_VALUE + 4]]]) + + expected_values = tf.constant([[[1, 1, 1, 1], [1, 1, 1, 1]], + [[1, 1, 1, 1], [0, 0, 0, 0]]]) + self.assertAllEqual(filtered_weights, expected_weights) + self.assertAllEqual(filtered_values, expected_values) + + # Changes the valid_mask so the results will be different. + valid_mask = tf.constant([[True, True], [False, False]], tf.bool) + + filtered_weights, filtered_values = context_rcnn_lib.filter_weight_value( + weights, values, valid_mask) + expected_weights = tf.constant( + [[[4, 4], [4, 4], [4, 4]], + [[_NEGATIVE_PADDING_VALUE + 4, _NEGATIVE_PADDING_VALUE + 4], + [_NEGATIVE_PADDING_VALUE + 4, _NEGATIVE_PADDING_VALUE + 4], + [_NEGATIVE_PADDING_VALUE + 4, _NEGATIVE_PADDING_VALUE + 4]]]) + + expected_values = tf.constant([[[1, 1, 1, 1], [1, 1, 1, 1]], + [[0, 0, 0, 0], [0, 0, 0, 0]]]) + self.assertAllEqual(filtered_weights, expected_weights) + self.assertAllEqual(filtered_values, expected_values) + + @parameterized.parameters((2, True, True), (2, False, True), + (10, True, False), (10, False, False)) + def test_project_features(self, projection_dimension, is_training, normalize): + features = tf.ones([2, 3, 4], tf.float32) + projected_features = context_rcnn_lib.project_features( + features, + projection_dimension, + is_training=is_training, + normalize=normalize) + + # Makes sure the shape is correct. + self.assertAllEqual(projected_features.shape, [2, 3, projection_dimension]) + + @parameterized.parameters( + (2, 10, 1), + (3, 10, 2), + (4, 20, 3), + (5, 20, 4), + (7, 20, 5), + ) + def test_attention_block(self, bottleneck_dimension, output_dimension, + attention_temperature): + input_features = tf.ones([2, 3, 4], tf.float32) + context_features = tf.ones([2, 2, 3], tf.float32) + valid_mask = tf.constant([[True, True], [False, False]], tf.bool) + is_training = False + output_features = context_rcnn_lib.attention_block( + input_features, context_features, bottleneck_dimension, + output_dimension, attention_temperature, valid_mask, is_training) + + # Makes sure the shape is correct. + self.assertAllEqual(output_features.shape, [2, 3, output_dimension]) + + @parameterized.parameters(True, False) + def test_compute_box_context_attention(self, is_training): + box_features = tf.ones([2, 3, 4, 4, 4], tf.float32) + context_features = tf.ones([2, 5, 6], tf.float32) + valid_context_size = tf.constant((2, 3), tf.int32) + bottleneck_dimension = 10 + attention_temperature = 1 + attention_features = context_rcnn_lib.compute_box_context_attention( + box_features, context_features, valid_context_size, + bottleneck_dimension, attention_temperature, is_training) + # Makes sure the shape is correct. + self.assertAllEqual(attention_features.shape, [2, 3, 1, 1, 4]) + + +if __name__ == '__main__': + tf.test.main() diff --git a/research/object_detection/meta_architectures/context_rcnn_meta_arch.py b/research/object_detection/meta_architectures/context_rcnn_meta_arch.py new file mode 100644 index 000000000..abe30558b --- /dev/null +++ b/research/object_detection/meta_architectures/context_rcnn_meta_arch.py @@ -0,0 +1,340 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Context R-CNN meta-architecture definition. + +This adds the ability to use attention into contextual features within the +Faster R-CNN object detection framework to improve object detection performance. +See https://arxiv.org/abs/1912.03538 for more information. +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import functools + +from object_detection.core import standard_fields as fields +from object_detection.meta_architectures import context_rcnn_lib +from object_detection.meta_architectures import faster_rcnn_meta_arch + + +class ContextRCNNMetaArch(faster_rcnn_meta_arch.FasterRCNNMetaArch): + """Context R-CNN Meta-architecture definition.""" + + def __init__(self, + is_training, + num_classes, + image_resizer_fn, + feature_extractor, + number_of_stages, + first_stage_anchor_generator, + first_stage_target_assigner, + first_stage_atrous_rate, + first_stage_box_predictor_arg_scope_fn, + first_stage_box_predictor_kernel_size, + first_stage_box_predictor_depth, + first_stage_minibatch_size, + first_stage_sampler, + first_stage_non_max_suppression_fn, + first_stage_max_proposals, + first_stage_localization_loss_weight, + first_stage_objectness_loss_weight, + crop_and_resize_fn, + initial_crop_size, + maxpool_kernel_size, + maxpool_stride, + second_stage_target_assigner, + second_stage_mask_rcnn_box_predictor, + second_stage_batch_size, + second_stage_sampler, + second_stage_non_max_suppression_fn, + second_stage_score_conversion_fn, + second_stage_localization_loss_weight, + second_stage_classification_loss_weight, + second_stage_classification_loss, + second_stage_mask_prediction_loss_weight=1.0, + hard_example_miner=None, + parallel_iterations=16, + add_summaries=True, + clip_anchors_to_image=False, + use_static_shapes=False, + resize_masks=True, + freeze_batchnorm=False, + return_raw_detections_during_predict=False, + output_final_box_features=False, + attention_bottleneck_dimension=None, + attention_temperature=None): + """ContextRCNNMetaArch Constructor. + + Args: + is_training: A boolean indicating whether the training version of the + computation graph should be constructed. + num_classes: Number of classes. Note that num_classes *does not* + include the background category, so if groundtruth labels take values + in {0, 1, .., K-1}, num_classes=K (and not K+1, even though the + assigned classification targets can range from {0,... K}). + image_resizer_fn: A callable for image resizing. This callable + takes a rank-3 image tensor of shape [height, width, channels] + (corresponding to a single image), an optional rank-3 instance mask + tensor of shape [num_masks, height, width] and returns a resized rank-3 + image tensor, a resized mask tensor if one was provided in the input. In + addition this callable must also return a 1-D tensor of the form + [height, width, channels] containing the size of the true image, as the + image resizer can perform zero padding. See protos/image_resizer.proto. + feature_extractor: A FasterRCNNFeatureExtractor object. + number_of_stages: An integer values taking values in {1, 2, 3}. If + 1, the function will construct only the Region Proposal Network (RPN) + part of the model. If 2, the function will perform box refinement and + other auxiliary predictions all in the second stage. If 3, it will + extract features from refined boxes and perform the auxiliary + predictions on the non-maximum suppressed refined boxes. + If is_training is true and the value of number_of_stages is 3, it is + reduced to 2 since all the model heads are trained in parallel in second + stage during training. + first_stage_anchor_generator: An anchor_generator.AnchorGenerator object + (note that currently we only support + grid_anchor_generator.GridAnchorGenerator objects) + first_stage_target_assigner: Target assigner to use for first stage of + Faster R-CNN (RPN). + first_stage_atrous_rate: A single integer indicating the atrous rate for + the single convolution op which is applied to the `rpn_features_to_crop` + tensor to obtain a tensor to be used for box prediction. Some feature + extractors optionally allow for producing feature maps computed at + denser resolutions. The atrous rate is used to compensate for the + denser feature maps by using an effectively larger receptive field. + (This should typically be set to 1). + first_stage_box_predictor_arg_scope_fn: Either a + Keras layer hyperparams object or a function to construct tf-slim + arg_scope for conv2d, separable_conv2d and fully_connected ops. Used + for the RPN box predictor. If it is a keras hyperparams object the + RPN box predictor will be a Keras model. If it is a function to + construct an arg scope it will be a tf-slim box predictor. + first_stage_box_predictor_kernel_size: Kernel size to use for the + convolution op just prior to RPN box predictions. + first_stage_box_predictor_depth: Output depth for the convolution op + just prior to RPN box predictions. + first_stage_minibatch_size: The "batch size" to use for computing the + objectness and location loss of the region proposal network. This + "batch size" refers to the number of anchors selected as contributing + to the loss function for any given image within the image batch and is + only called "batch_size" due to terminology from the Faster R-CNN paper. + first_stage_sampler: Sampler to use for first stage loss (RPN loss). + first_stage_non_max_suppression_fn: batch_multiclass_non_max_suppression + callable that takes `boxes`, `scores` and optional `clip_window`(with + all other inputs already set) and returns a dictionary containing + tensors with keys: `detection_boxes`, `detection_scores`, + `detection_classes`, `num_detections`. This is used to perform non max + suppression on the boxes predicted by the Region Proposal Network + (RPN). + See `post_processing.batch_multiclass_non_max_suppression` for the type + and shape of these tensors. + first_stage_max_proposals: Maximum number of boxes to retain after + performing Non-Max Suppression (NMS) on the boxes predicted by the + Region Proposal Network (RPN). + first_stage_localization_loss_weight: A float + first_stage_objectness_loss_weight: A float + crop_and_resize_fn: A differentiable resampler to use for cropping RPN + proposal features. + initial_crop_size: A single integer indicating the output size + (width and height are set to be the same) of the initial bilinear + interpolation based cropping during ROI pooling. + maxpool_kernel_size: A single integer indicating the kernel size of the + max pool op on the cropped feature map during ROI pooling. + maxpool_stride: A single integer indicating the stride of the max pool + op on the cropped feature map during ROI pooling. + second_stage_target_assigner: Target assigner to use for second stage of + Faster R-CNN. If the model is configured with multiple prediction heads, + this target assigner is used to generate targets for all heads (with the + correct `unmatched_class_label`). + second_stage_mask_rcnn_box_predictor: Mask R-CNN box predictor to use for + the second stage. + second_stage_batch_size: The batch size used for computing the + classification and refined location loss of the box classifier. This + "batch size" refers to the number of proposals selected as contributing + to the loss function for any given image within the image batch and is + only called "batch_size" due to terminology from the Faster R-CNN paper. + second_stage_sampler: Sampler to use for second stage loss (box + classifier loss). + second_stage_non_max_suppression_fn: batch_multiclass_non_max_suppression + callable that takes `boxes`, `scores`, optional `clip_window` and + optional (kwarg) `mask` inputs (with all other inputs already set) + and returns a dictionary containing tensors with keys: + `detection_boxes`, `detection_scores`, `detection_classes`, + `num_detections`, and (optionally) `detection_masks`. See + `post_processing.batch_multiclass_non_max_suppression` for the type and + shape of these tensors. + second_stage_score_conversion_fn: Callable elementwise nonlinearity + (that takes tensors as inputs and returns tensors). This is usually + used to convert logits to probabilities. + second_stage_localization_loss_weight: A float indicating the scale factor + for second stage localization loss. + second_stage_classification_loss_weight: A float indicating the scale + factor for second stage classification loss. + second_stage_classification_loss: Classification loss used by the second + stage classifier. Either losses.WeightedSigmoidClassificationLoss or + losses.WeightedSoftmaxClassificationLoss. + second_stage_mask_prediction_loss_weight: A float indicating the scale + factor for second stage mask prediction loss. This is applicable only if + second stage box predictor is configured to predict masks. + hard_example_miner: A losses.HardExampleMiner object (can be None). + parallel_iterations: (Optional) The number of iterations allowed to run + in parallel for calls to tf.map_fn. + add_summaries: boolean (default: True) controlling whether summary ops + should be added to tensorflow graph. + clip_anchors_to_image: Normally, anchors generated for a given image size + are pruned during training if they lie outside the image window. This + option clips the anchors to be within the image instead of pruning. + use_static_shapes: If True, uses implementation of ops with static shape + guarantees. + resize_masks: Indicates whether the masks presend in the groundtruth + should be resized in the model with `image_resizer_fn` + freeze_batchnorm: Whether to freeze batch norm parameters in the first + stage box predictor during training or not. When training with a small + batch size (e.g. 1), it is desirable to freeze batch norm update and + use pretrained batch norm params. + return_raw_detections_during_predict: Whether to return raw detection + boxes in the predict() method. These are decoded boxes that have not + been through postprocessing (i.e. NMS). Default False. + output_final_box_features: Whether to output final box features. If true, + it crops the feauture map based on the final box prediction and returns + in the dict as detection_features. + attention_bottleneck_dimension: A single integer. The bottleneck feature + dimension of the attention block. + attention_temperature: A single float. The attention temperature. + + Raises: + ValueError: If `second_stage_batch_size` > `first_stage_max_proposals` at + training time. + ValueError: If first_stage_anchor_generator is not of type + grid_anchor_generator.GridAnchorGenerator. + """ + super(ContextRCNNMetaArch, self).__init__( + is_training, + num_classes, + image_resizer_fn, + feature_extractor, + number_of_stages, + first_stage_anchor_generator, + first_stage_target_assigner, + first_stage_atrous_rate, + first_stage_box_predictor_arg_scope_fn, + first_stage_box_predictor_kernel_size, + first_stage_box_predictor_depth, + first_stage_minibatch_size, + first_stage_sampler, + first_stage_non_max_suppression_fn, + first_stage_max_proposals, + first_stage_localization_loss_weight, + first_stage_objectness_loss_weight, + crop_and_resize_fn, + initial_crop_size, + maxpool_kernel_size, + maxpool_stride, + second_stage_target_assigner, + second_stage_mask_rcnn_box_predictor, + second_stage_batch_size, + second_stage_sampler, + second_stage_non_max_suppression_fn, + second_stage_score_conversion_fn, + second_stage_localization_loss_weight, + second_stage_classification_loss_weight, + second_stage_classification_loss, + second_stage_mask_prediction_loss_weight=( + second_stage_mask_prediction_loss_weight), + hard_example_miner=hard_example_miner, + parallel_iterations=parallel_iterations, + add_summaries=add_summaries, + clip_anchors_to_image=clip_anchors_to_image, + use_static_shapes=use_static_shapes, + resize_masks=resize_masks, + freeze_batchnorm=freeze_batchnorm, + return_raw_detections_during_predict=( + return_raw_detections_during_predict), + output_final_box_features=output_final_box_features) + + self._context_feature_extract_fn = functools.partial( + context_rcnn_lib.compute_box_context_attention, + bottleneck_dimension=attention_bottleneck_dimension, + attention_temperature=attention_temperature, + is_training=is_training) + + @staticmethod + def get_side_inputs(features): + """Overrides the get_side_inputs function in the base class. + + This function returns context_features and valid_context_size, which will be + used in the _compute_second_stage_input_feature_maps function. + + Args: + features: A dictionary of tensors. + + Returns: + A dictionary of tensors contains context_features and valid_context_size. + + Raises: + ValueError: If context_features or valid_context_size is not in the + features. + """ + if (fields.InputDataFields.context_features not in features or + fields.InputDataFields.valid_context_size not in features): + raise ValueError( + "Please make sure context_features and valid_context_size are in the " + "features") + + return { + fields.InputDataFields.context_features: + features[fields.InputDataFields.context_features], + fields.InputDataFields.valid_context_size: + features[fields.InputDataFields.valid_context_size] + } + + def _compute_second_stage_input_feature_maps(self, features_to_crop, + proposal_boxes_normalized, + context_features, + valid_context_size): + """Crops to a set of proposals from the feature map for a batch of images. + + This function overrides the one in the FasterRCNNMetaArch. Aside from + cropping and resizing the feature maps, which is done in the parent class, + it adds context attention features to the box features. + + Args: + features_to_crop: A float32 Tensor with shape [batch_size, height, width, + depth] + proposal_boxes_normalized: A float32 Tensor with shape [batch_size, + num_proposals, box_code_size] containing proposal boxes in normalized + coordinates. + context_features: A float Tensor of shape [batch_size, context_size, + num_context_features]. + valid_context_size: A int32 Tensor of shape [batch_size]. + + Returns: + A float32 Tensor with shape [K, new_height, new_width, depth]. + """ + box_features = self._crop_and_resize_fn( + features_to_crop, proposal_boxes_normalized, + [self._initial_crop_size, self._initial_crop_size]) + + attention_features = self._context_feature_extract_fn( + box_features=box_features, + context_features=context_features, + valid_context_size=valid_context_size) + + # Adds box features with attention features. + box_features += attention_features + + flattened_feature_maps = self._flatten_first_two_dimensions(box_features) + + return self._maxpool_layer(flattened_feature_maps) diff --git a/research/object_detection/meta_architectures/context_rcnn_meta_arch_tf1_test.py b/research/object_detection/meta_architectures/context_rcnn_meta_arch_tf1_test.py index 47d7624d0..a5dc8cc8e 100644 --- a/research/object_detection/meta_architectures/context_rcnn_meta_arch_tf1_test.py +++ b/research/object_detection/meta_architectures/context_rcnn_meta_arch_tf1_test.py @@ -1,4 +1,4 @@ -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,14 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== -"""Tests for google3.third_party.tensorflow_models.object_detection.meta_architectures.context_meta_arch.""" +"""Tests for object_detection.meta_architectures.context_meta_arch.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import functools - +import unittest from absl.testing import parameterized import mock import tensorflow.compat.v1 as tf @@ -109,6 +109,7 @@ class FakeFasterRCNNKerasFeatureExtractor( ]) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class ContextRCNNMetaArchTest(test_case.TestCase, parameterized.TestCase): def _get_model(self, box_predictor, **common_kwargs): diff --git a/research/object_detection/metrics/calibration_evaluation_test.py b/research/object_detection/metrics/calibration_evaluation_tf1_test.py similarity index 98% rename from research/object_detection/metrics/calibration_evaluation_test.py rename to research/object_detection/metrics/calibration_evaluation_tf1_test.py index 375978d86..0f3d6eb31 100644 --- a/research/object_detection/metrics/calibration_evaluation_test.py +++ b/research/object_detection/metrics/calibration_evaluation_tf1_test.py @@ -18,9 +18,11 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function +import unittest import tensorflow.compat.v1 as tf from object_detection.core import standard_fields from object_detection.metrics import calibration_evaluation +from object_detection.utils import tf_version def _get_categories_list(): @@ -36,6 +38,7 @@ def _get_categories_list(): }] +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class CalibrationDetectionEvaluationTest(tf.test.TestCase): def _get_ece(self, ece_op, update_op): diff --git a/research/object_detection/metrics/calibration_metrics_test.py b/research/object_detection/metrics/calibration_metrics_tf1_test.py similarity index 97% rename from research/object_detection/metrics/calibration_metrics_test.py rename to research/object_detection/metrics/calibration_metrics_tf1_test.py index 54793fca0..9c1adbca2 100644 --- a/research/object_detection/metrics/calibration_metrics_test.py +++ b/research/object_detection/metrics/calibration_metrics_tf1_test.py @@ -18,11 +18,14 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function +import unittest import numpy as np import tensorflow.compat.v1 as tf from object_detection.metrics import calibration_metrics +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class CalibrationLibTest(tf.test.TestCase): @staticmethod diff --git a/research/object_detection/metrics/coco_evaluation.py b/research/object_detection/metrics/coco_evaluation.py index 7a962457b..3ecfddb0d 100644 --- a/research/object_detection/metrics/coco_evaluation.py +++ b/research/object_detection/metrics/coco_evaluation.py @@ -24,6 +24,7 @@ import tensorflow.compat.v1 as tf from object_detection.core import standard_fields from object_detection.metrics import coco_tools from object_detection.utils import json_utils +from object_detection.utils import np_mask_ops from object_detection.utils import object_detection_evaluation @@ -1263,3 +1264,535 @@ class CocoMaskEvaluator(object_detection_evaluation.DetectionEvaluator): eval_metric_ops[metric_name] = (tf.py_func( value_func_factory(metric_name), [], np.float32), update_op) return eval_metric_ops + + +class CocoPanopticSegmentationEvaluator( + object_detection_evaluation.DetectionEvaluator): + """Class to evaluate PQ (panoptic quality) metric on COCO dataset. + + More details about this metric: https://arxiv.org/pdf/1801.00868.pdf. + """ + + def __init__(self, + categories, + include_metrics_per_category=False, + iou_threshold=0.5, + ioa_threshold=0.5): + """Constructor. + + Args: + categories: A list of dicts, each of which has the following keys - + 'id': (required) an integer id uniquely identifying this category. + 'name': (required) string representing category name e.g., 'cat', 'dog'. + include_metrics_per_category: If True, include metrics for each category. + iou_threshold: intersection-over-union threshold for mask matching (with + normal groundtruths). + ioa_threshold: intersection-over-area threshold for mask matching with + "is_crowd" groundtruths. + """ + super(CocoPanopticSegmentationEvaluator, self).__init__(categories) + self._groundtruth_masks = {} + self._groundtruth_class_labels = {} + self._groundtruth_is_crowd = {} + self._predicted_masks = {} + self._predicted_class_labels = {} + self._include_metrics_per_category = include_metrics_per_category + self._iou_threshold = iou_threshold + self._ioa_threshold = ioa_threshold + + def clear(self): + """Clears the state to prepare for a fresh evaluation.""" + self._groundtruth_masks.clear() + self._groundtruth_class_labels.clear() + self._groundtruth_is_crowd.clear() + self._predicted_masks.clear() + self._predicted_class_labels.clear() + + def add_single_ground_truth_image_info(self, image_id, groundtruth_dict): + """Adds groundtruth for a single image to be used for evaluation. + + If the image has already been added, a warning is logged, and groundtruth is + ignored. + + Args: + image_id: A unique string/integer identifier for the image. + groundtruth_dict: A dictionary containing - + InputDataFields.groundtruth_classes: integer numpy array of shape + [num_masks] containing 1-indexed groundtruth classes for the mask. + InputDataFields.groundtruth_instance_masks: uint8 numpy array of shape + [num_masks, image_height, image_width] containing groundtruth masks. + The elements of the array must be in {0, 1}. + InputDataFields.groundtruth_is_crowd (optional): integer numpy array of + shape [num_boxes] containing iscrowd flag for groundtruth boxes. + """ + + if image_id in self._groundtruth_masks: + tf.logging.warning( + 'Ignoring groundtruth with image %s, since it has already been ' + 'added to the ground truth database.', image_id) + return + + self._groundtruth_masks[image_id] = groundtruth_dict[ + standard_fields.InputDataFields.groundtruth_instance_masks] + self._groundtruth_class_labels[image_id] = groundtruth_dict[ + standard_fields.InputDataFields.groundtruth_classes] + groundtruth_is_crowd = groundtruth_dict.get( + standard_fields.InputDataFields.groundtruth_is_crowd) + # Drop groundtruth_is_crowd if empty tensor. + if groundtruth_is_crowd is not None and not groundtruth_is_crowd.size > 0: + groundtruth_is_crowd = None + if groundtruth_is_crowd is not None: + self._groundtruth_is_crowd[image_id] = groundtruth_is_crowd + + def add_single_detected_image_info(self, image_id, detections_dict): + """Adds detections for a single image to be used for evaluation. + + If a detection has already been added for this image id, a warning is + logged, and the detection is skipped. + + Args: + image_id: A unique string/integer identifier for the image. + detections_dict: A dictionary containing - + DetectionResultFields.detection_classes: integer numpy array of shape + [num_masks] containing 1-indexed detection classes for the masks. + DetectionResultFields.detection_masks: optional uint8 numpy array of + shape [num_masks, image_height, image_width] containing instance + masks. The elements of the array must be in {0, 1}. + + Raises: + ValueError: If results and groundtruth shape don't match. + """ + + if image_id not in self._groundtruth_masks: + raise ValueError('Missing groundtruth for image id: {}'.format(image_id)) + + detection_masks = detections_dict[ + standard_fields.DetectionResultFields.detection_masks] + self._predicted_masks[image_id] = detection_masks + self._predicted_class_labels[image_id] = detections_dict[ + standard_fields.DetectionResultFields.detection_classes] + groundtruth_mask_shape = self._groundtruth_masks[image_id].shape + if groundtruth_mask_shape[1:] != detection_masks.shape[1:]: + raise ValueError("The shape of results doesn't match groundtruth.") + + def evaluate(self): + """Evaluates the detection masks and returns a dictionary of coco metrics. + + Returns: + A dictionary holding - + + 1. summary_metric: + 'PanopticQuality@%.2fIOU': mean panoptic quality averaged over classes at + the required IOU. + 'SegmentationQuality@%.2fIOU': mean segmentation quality averaged over + classes at the required IOU. + 'RecognitionQuality@%.2fIOU': mean recognition quality averaged over + classes at the required IOU. + 'NumValidClasses': number of valid classes. A valid class should have at + least one normal (is_crowd=0) groundtruth mask or one predicted mask. + 'NumTotalClasses': number of total classes. + + 2. per_category_pq: if include_metrics_per_category is True, category + specific results with keys of the form: + 'PanopticQuality@%.2fIOU_ByCategory/category'. + """ + # Evaluate and accumulate the iou/tp/fp/fn. + sum_tp_iou, sum_num_tp, sum_num_fp, sum_num_fn = self._evaluate_all_masks() + # Compute PQ metric for each category and average over all classes. + mask_metrics = self._compute_panoptic_metrics(sum_tp_iou, sum_num_tp, + sum_num_fp, sum_num_fn) + return mask_metrics + + def get_estimator_eval_metric_ops(self, eval_dict): + """Returns a dictionary of eval metric ops. + + Note that once value_op is called, the detections and groundtruth added via + update_op are cleared. + + Args: + eval_dict: A dictionary that holds tensors for evaluating object detection + performance. For single-image evaluation, this dictionary may be + produced from eval_util.result_dict_for_single_example(). If multi-image + evaluation, `eval_dict` should contain the fields + 'num_gt_masks_per_image' and 'num_det_masks_per_image' to properly unpad + the tensors from the batch. + + Returns: + a dictionary of metric names to tuple of value_op and update_op that can + be used as eval metric ops in tf.estimator.EstimatorSpec. Note that all + update ops must be run together and similarly all value ops must be run + together to guarantee correct behaviour. + """ + + def update_op(image_id_batched, groundtruth_classes_batched, + groundtruth_instance_masks_batched, + groundtruth_is_crowd_batched, num_gt_masks_per_image, + detection_classes_batched, detection_masks_batched, + num_det_masks_per_image): + """Update op for metrics.""" + for (image_id, groundtruth_classes, groundtruth_instance_masks, + groundtruth_is_crowd, num_gt_mask, detection_classes, + detection_masks, num_det_mask) in zip( + image_id_batched, groundtruth_classes_batched, + groundtruth_instance_masks_batched, groundtruth_is_crowd_batched, + num_gt_masks_per_image, detection_classes_batched, + detection_masks_batched, num_det_masks_per_image): + + self.add_single_ground_truth_image_info( + image_id, { + 'groundtruth_classes': + groundtruth_classes[:num_gt_mask], + 'groundtruth_instance_masks': + groundtruth_instance_masks[:num_gt_mask], + 'groundtruth_is_crowd': + groundtruth_is_crowd[:num_gt_mask] + }) + self.add_single_detected_image_info( + image_id, { + 'detection_classes': detection_classes[:num_det_mask], + 'detection_masks': detection_masks[:num_det_mask] + }) + + # Unpack items from the evaluation dictionary. + (image_id, groundtruth_classes, groundtruth_instance_masks, + groundtruth_is_crowd, num_gt_masks_per_image, detection_classes, + detection_masks, num_det_masks_per_image + ) = self._unpack_evaluation_dictionary_items(eval_dict) + + update_op = tf.py_func(update_op, [ + image_id, groundtruth_classes, groundtruth_instance_masks, + groundtruth_is_crowd, num_gt_masks_per_image, detection_classes, + detection_masks, num_det_masks_per_image + ], []) + + metric_names = [ + 'PanopticQuality@%.2fIOU' % self._iou_threshold, + 'SegmentationQuality@%.2fIOU' % self._iou_threshold, + 'RecognitionQuality@%.2fIOU' % self._iou_threshold + ] + if self._include_metrics_per_category: + for category_dict in self._categories: + metric_names.append('PanopticQuality@%.2fIOU_ByCategory/%s' % + (self._iou_threshold, category_dict['name'])) + + def first_value_func(): + self._metrics = self.evaluate() + self.clear() + return np.float32(self._metrics[metric_names[0]]) + + def value_func_factory(metric_name): + + def value_func(): + return np.float32(self._metrics[metric_name]) + + return value_func + + # Ensure that the metrics are only evaluated once. + first_value_op = tf.py_func(first_value_func, [], tf.float32) + eval_metric_ops = {metric_names[0]: (first_value_op, update_op)} + with tf.control_dependencies([first_value_op]): + for metric_name in metric_names[1:]: + eval_metric_ops[metric_name] = (tf.py_func( + value_func_factory(metric_name), [], np.float32), update_op) + return eval_metric_ops + + def _evaluate_all_masks(self): + """Evaluate all masks and compute sum iou/TP/FP/FN.""" + + sum_num_tp = {category['id']: 0 for category in self._categories} + sum_num_fp = sum_num_tp.copy() + sum_num_fn = sum_num_tp.copy() + sum_tp_iou = sum_num_tp.copy() + + for image_id in self._groundtruth_class_labels: + # Separate normal and is_crowd groundtruth + crowd_gt_indices = self._groundtruth_is_crowd.get(image_id) + (normal_gt_masks, normal_gt_classes, crowd_gt_masks, + crowd_gt_classes) = self._separate_normal_and_crowd_labels( + crowd_gt_indices, self._groundtruth_masks[image_id], + self._groundtruth_class_labels[image_id]) + + # Mask matching to normal GT. + predicted_masks = self._predicted_masks[image_id] + predicted_class_labels = self._predicted_class_labels[image_id] + (overlaps, pred_matched, + gt_matched) = self._match_predictions_to_groundtruths( + predicted_masks, + predicted_class_labels, + normal_gt_masks, + normal_gt_classes, + self._iou_threshold, + is_crowd=False, + with_replacement=False) + + # Accumulate true positives. + for (class_id, is_matched, overlap) in zip(predicted_class_labels, + pred_matched, overlaps): + if is_matched: + sum_num_tp[class_id] += 1 + sum_tp_iou[class_id] += overlap + + # Accumulate false negatives. + for (class_id, is_matched) in zip(normal_gt_classes, gt_matched): + if not is_matched: + sum_num_fn[class_id] += 1 + + # Match remaining predictions to crowd gt. + remained_pred_indices = np.logical_not(pred_matched) + remained_pred_masks = predicted_masks[remained_pred_indices, :, :] + remained_pred_classes = predicted_class_labels[remained_pred_indices] + _, pred_matched, _ = self._match_predictions_to_groundtruths( + remained_pred_masks, + remained_pred_classes, + crowd_gt_masks, + crowd_gt_classes, + self._ioa_threshold, + is_crowd=True, + with_replacement=True) + + # Accumulate false positives + for (class_id, is_matched) in zip(remained_pred_classes, pred_matched): + if not is_matched: + sum_num_fp[class_id] += 1 + return sum_tp_iou, sum_num_tp, sum_num_fp, sum_num_fn + + def _compute_panoptic_metrics(self, sum_tp_iou, sum_num_tp, sum_num_fp, + sum_num_fn): + """Compute PQ metric for each category and average over all classes. + + Args: + sum_tp_iou: dict, summed true positive intersection-over-union (IoU) for + each class, keyed by class_id. + sum_num_tp: the total number of true positives for each class, keyed by + class_id. + sum_num_fp: the total number of false positives for each class, keyed by + class_id. + sum_num_fn: the total number of false negatives for each class, keyed by + class_id. + + Returns: + mask_metrics: a dictionary containing averaged metrics over all classes, + and per-category metrics if required. + """ + mask_metrics = {} + sum_pq = 0 + sum_sq = 0 + sum_rq = 0 + num_valid_classes = 0 + for category in self._categories: + class_id = category['id'] + (panoptic_quality, segmentation_quality, + recognition_quality) = self._compute_panoptic_metrics_single_class( + sum_tp_iou[class_id], sum_num_tp[class_id], sum_num_fp[class_id], + sum_num_fn[class_id]) + if panoptic_quality is not None: + sum_pq += panoptic_quality + sum_sq += segmentation_quality + sum_rq += recognition_quality + num_valid_classes += 1 + if self._include_metrics_per_category: + mask_metrics['PanopticQuality@%.2fIOU_ByCategory/%s' % + (self._iou_threshold, + category['name'])] = panoptic_quality + mask_metrics['PanopticQuality@%.2fIOU' % + self._iou_threshold] = sum_pq / num_valid_classes + mask_metrics['SegmentationQuality@%.2fIOU' % + self._iou_threshold] = sum_sq / num_valid_classes + mask_metrics['RecognitionQuality@%.2fIOU' % + self._iou_threshold] = sum_rq / num_valid_classes + mask_metrics['NumValidClasses'] = num_valid_classes + mask_metrics['NumTotalClasses'] = len(self._categories) + return mask_metrics + + def _compute_panoptic_metrics_single_class(self, sum_tp_iou, num_tp, num_fp, + num_fn): + """Compute panoptic metrics: panoptic/segmentation/recognition quality. + + More computation details in https://arxiv.org/pdf/1801.00868.pdf. + Args: + sum_tp_iou: summed true positive intersection-over-union (IoU) for a + specific class. + num_tp: the total number of true positives for a specific class. + num_fp: the total number of false positives for a specific class. + num_fn: the total number of false negatives for a specific class. + + Returns: + panoptic_quality: sum_tp_iou / (num_tp + 0.5*num_fp + 0.5*num_fn). + segmentation_quality: sum_tp_iou / num_tp. + recognition_quality: num_tp / (num_tp + 0.5*num_fp + 0.5*num_fn). + """ + denominator = num_tp + 0.5 * num_fp + 0.5 * num_fn + # Calculate metric only if there is at least one GT or one prediction. + if denominator > 0: + recognition_quality = num_tp / denominator + if num_tp > 0: + segmentation_quality = sum_tp_iou / num_tp + else: + # If there is no TP for this category. + segmentation_quality = 0 + panoptic_quality = segmentation_quality * recognition_quality + return panoptic_quality, segmentation_quality, recognition_quality + else: + return None, None, None + + def _separate_normal_and_crowd_labels(self, crowd_gt_indices, + groundtruth_masks, groundtruth_classes): + """Separate normal and crowd groundtruth class_labels and masks. + + Args: + crowd_gt_indices: None or array of shape [num_groundtruths]. If None, all + groundtruths are treated as normal ones. + groundtruth_masks: array of shape [num_groundtruths, height, width]. + groundtruth_classes: array of shape [num_groundtruths]. + + Returns: + normal_gt_masks: array of shape [num_normal_groundtruths, height, width]. + normal_gt_classes: array of shape [num_normal_groundtruths]. + crowd_gt_masks: array of shape [num_crowd_groundtruths, height, width]. + crowd_gt_classes: array of shape [num_crowd_groundtruths]. + Raises: + ValueError: if the shape of groundtruth classes doesn't match groundtruth + masks or if the shape of crowd_gt_indices. + """ + if groundtruth_masks.shape[0] != groundtruth_classes.shape[0]: + raise ValueError( + "The number of masks doesn't match the number of labels.") + if crowd_gt_indices is None: + # All gts are treated as normal + crowd_gt_indices = np.zeros(groundtruth_masks.shape, dtype=np.bool) + else: + if groundtruth_masks.shape[0] != crowd_gt_indices.shape[0]: + raise ValueError( + "The number of masks doesn't match the number of is_crowd labels.") + crowd_gt_indices = crowd_gt_indices.astype(np.bool) + normal_gt_indices = np.logical_not(crowd_gt_indices) + if normal_gt_indices.size: + normal_gt_masks = groundtruth_masks[normal_gt_indices, :, :] + normal_gt_classes = groundtruth_classes[normal_gt_indices] + crowd_gt_masks = groundtruth_masks[crowd_gt_indices, :, :] + crowd_gt_classes = groundtruth_classes[crowd_gt_indices] + else: + # No groundtruths available, groundtruth_masks.shape = (0, h, w) + normal_gt_masks = groundtruth_masks + normal_gt_classes = groundtruth_classes + crowd_gt_masks = groundtruth_masks + crowd_gt_classes = groundtruth_classes + return normal_gt_masks, normal_gt_classes, crowd_gt_masks, crowd_gt_classes + + def _match_predictions_to_groundtruths(self, + predicted_masks, + predicted_classes, + groundtruth_masks, + groundtruth_classes, + matching_threshold, + is_crowd=False, + with_replacement=False): + """Match the predicted masks to groundtruths. + + Args: + predicted_masks: array of shape [num_predictions, height, width]. + predicted_classes: array of shape [num_predictions]. + groundtruth_masks: array of shape [num_groundtruths, height, width]. + groundtruth_classes: array of shape [num_groundtruths]. + matching_threshold: if the overlap between a prediction and a groundtruth + is larger than this threshold, the prediction is true positive. + is_crowd: whether the groundtruths are crowd annotation or not. If True, + use intersection over area (IoA) as the overlapping metric; otherwise + use intersection over union (IoU). + with_replacement: whether a groundtruth can be matched to multiple + predictions. By default, for normal groundtruths, only 1-1 matching is + allowed for normal groundtruths; for crowd groundtruths, 1-to-many must + be allowed. + + Returns: + best_overlaps: array of shape [num_predictions]. Values representing the + IoU + or IoA with best matched groundtruth. + pred_matched: array of shape [num_predictions]. Boolean value representing + whether the ith prediction is matched to a groundtruth. + gt_matched: array of shape [num_groundtruth]. Boolean value representing + whether the ith groundtruth is matched to a prediction. + Raises: + ValueError: if the shape of groundtruth/predicted masks doesn't match + groundtruth/predicted classes. + """ + if groundtruth_masks.shape[0] != groundtruth_classes.shape[0]: + raise ValueError( + "The number of GT masks doesn't match the number of labels.") + if predicted_masks.shape[0] != predicted_classes.shape[0]: + raise ValueError( + "The number of predicted masks doesn't match the number of labels.") + gt_matched = np.zeros(groundtruth_classes.shape, dtype=np.bool) + pred_matched = np.zeros(predicted_classes.shape, dtype=np.bool) + best_overlaps = np.zeros(predicted_classes.shape) + for pid in range(predicted_classes.shape[0]): + best_overlap = 0 + matched_gt_id = -1 + for gid in range(groundtruth_classes.shape[0]): + if predicted_classes[pid] == groundtruth_classes[gid]: + if (not with_replacement) and gt_matched[gid]: + continue + if not is_crowd: + overlap = np_mask_ops.iou(predicted_masks[pid:pid + 1], + groundtruth_masks[gid:gid + 1])[0, 0] + else: + overlap = np_mask_ops.ioa(groundtruth_masks[gid:gid + 1], + predicted_masks[pid:pid + 1])[0, 0] + if overlap >= matching_threshold and overlap > best_overlap: + matched_gt_id = gid + best_overlap = overlap + if matched_gt_id >= 0: + gt_matched[matched_gt_id] = True + pred_matched[pid] = True + best_overlaps[pid] = best_overlap + return best_overlaps, pred_matched, gt_matched + + def _unpack_evaluation_dictionary_items(self, eval_dict): + """Unpack items from the evaluation dictionary.""" + input_data_fields = standard_fields.InputDataFields + detection_fields = standard_fields.DetectionResultFields + image_id = eval_dict[input_data_fields.key] + groundtruth_classes = eval_dict[input_data_fields.groundtruth_classes] + groundtruth_instance_masks = eval_dict[ + input_data_fields.groundtruth_instance_masks] + groundtruth_is_crowd = eval_dict.get(input_data_fields.groundtruth_is_crowd, + None) + num_gt_masks_per_image = eval_dict.get( + input_data_fields.num_groundtruth_boxes, None) + detection_classes = eval_dict[detection_fields.detection_classes] + detection_masks = eval_dict[detection_fields.detection_masks] + num_det_masks_per_image = eval_dict.get(detection_fields.num_detections, + None) + if groundtruth_is_crowd is None: + groundtruth_is_crowd = tf.zeros_like(groundtruth_classes, dtype=tf.bool) + + if not image_id.shape.as_list(): + # Apply a batch dimension to all tensors. + image_id = tf.expand_dims(image_id, 0) + groundtruth_classes = tf.expand_dims(groundtruth_classes, 0) + groundtruth_instance_masks = tf.expand_dims(groundtruth_instance_masks, 0) + groundtruth_is_crowd = tf.expand_dims(groundtruth_is_crowd, 0) + detection_classes = tf.expand_dims(detection_classes, 0) + detection_masks = tf.expand_dims(detection_masks, 0) + + if num_gt_masks_per_image is None: + num_gt_masks_per_image = tf.shape(groundtruth_classes)[1:2] + else: + num_gt_masks_per_image = tf.expand_dims(num_gt_masks_per_image, 0) + + if num_det_masks_per_image is None: + num_det_masks_per_image = tf.shape(detection_classes)[1:2] + else: + num_det_masks_per_image = tf.expand_dims(num_det_masks_per_image, 0) + else: + if num_gt_masks_per_image is None: + num_gt_masks_per_image = tf.tile( + tf.shape(groundtruth_classes)[1:2], + multiples=tf.shape(groundtruth_classes)[0:1]) + if num_det_masks_per_image is None: + num_det_masks_per_image = tf.tile( + tf.shape(detection_classes)[1:2], + multiples=tf.shape(detection_classes)[0:1]) + return (image_id, groundtruth_classes, groundtruth_instance_masks, + groundtruth_is_crowd, num_gt_masks_per_image, detection_classes, + detection_masks, num_det_masks_per_image) diff --git a/research/object_detection/metrics/coco_evaluation_test.py b/research/object_detection/metrics/coco_evaluation_test.py index aed6047f8..165c94780 100644 --- a/research/object_detection/metrics/coco_evaluation_test.py +++ b/research/object_detection/metrics/coco_evaluation_test.py @@ -18,10 +18,12 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function +import unittest import numpy as np import tensorflow.compat.v1 as tf from object_detection.core import standard_fields from object_detection.metrics import coco_evaluation +from object_detection.utils import tf_version def _get_categories_list(): @@ -250,6 +252,7 @@ class CocoDetectionEvaluationTest(tf.test.TestCase): }) +@unittest.skipIf(tf_version.is_tf2(), 'Only Supported in TF1.X') class CocoEvaluationPyFuncTest(tf.test.TestCase): def testGetOneMAPWithMatchingGroundtruthAndDetections(self): @@ -926,6 +929,7 @@ class CocoKeypointEvaluationTest(tf.test.TestCase): -1.0) +@unittest.skipIf(tf_version.is_tf2(), 'Only Supported in TF1.X') class CocoKeypointEvaluationPyFuncTest(tf.test.TestCase): def testGetOneMAPWithMatchingKeypoints(self): @@ -1438,6 +1442,7 @@ class CocoMaskEvaluationTest(tf.test.TestCase): self.assertFalse(coco_evaluator._detection_masks_list) +@unittest.skipIf(tf_version.is_tf2(), 'Only Supported in TF1.X') class CocoMaskEvaluationPyFuncTest(tf.test.TestCase): def testAddEvalDict(self): @@ -1716,5 +1721,221 @@ class CocoMaskEvaluationPyFuncTest(tf.test.TestCase): self.assertFalse(coco_evaluator._detection_masks_list) +def _get_panoptic_test_data(): + # image1 contains 3 people in gt, (2 normal annotation and 1 "is_crowd" + # annotation), and 3 people in prediction. + gt_masks1 = np.zeros((3, 50, 50), dtype=np.uint8) + result_masks1 = np.zeros((3, 50, 50), dtype=np.uint8) + gt_masks1[0, 10:20, 20:30] = 1 + result_masks1[0, 10:18, 20:30] = 1 + gt_masks1[1, 25:30, 25:35] = 1 + result_masks1[1, 18:25, 25:30] = 1 + gt_masks1[2, 40:50, 40:50] = 1 + result_masks1[2, 47:50, 47:50] = 1 + gt_class1 = np.array([1, 1, 1]) + gt_is_crowd1 = np.array([0, 0, 1]) + result_class1 = np.array([1, 1, 1]) + + # image2 contains 1 dog and 1 cat in gt, while 1 person and 1 dog in + # prediction. + gt_masks2 = np.zeros((2, 30, 40), dtype=np.uint8) + result_masks2 = np.zeros((2, 30, 40), dtype=np.uint8) + gt_masks2[0, 5:15, 20:35] = 1 + gt_masks2[1, 20:30, 0:10] = 1 + result_masks2[0, 20:25, 10:15] = 1 + result_masks2[1, 6:15, 15:35] = 1 + gt_class2 = np.array([2, 3]) + gt_is_crowd2 = np.array([0, 0]) + result_class2 = np.array([1, 2]) + + gt_class = [gt_class1, gt_class2] + gt_masks = [gt_masks1, gt_masks2] + gt_is_crowd = [gt_is_crowd1, gt_is_crowd2] + result_class = [result_class1, result_class2] + result_masks = [result_masks1, result_masks2] + return gt_class, gt_masks, gt_is_crowd, result_class, result_masks + + +class CocoPanopticEvaluationTest(tf.test.TestCase): + + def test_panoptic_quality(self): + pq_evaluator = coco_evaluation.CocoPanopticSegmentationEvaluator( + _get_categories_list(), include_metrics_per_category=True) + (gt_class, gt_masks, gt_is_crowd, result_class, + result_masks) = _get_panoptic_test_data() + + for i in range(2): + pq_evaluator.add_single_ground_truth_image_info( + image_id='image%d' % i, + groundtruth_dict={ + standard_fields.InputDataFields.groundtruth_classes: + gt_class[i], + standard_fields.InputDataFields.groundtruth_instance_masks: + gt_masks[i], + standard_fields.InputDataFields.groundtruth_is_crowd: + gt_is_crowd[i] + }) + + pq_evaluator.add_single_detected_image_info( + image_id='image%d' % i, + detections_dict={ + standard_fields.DetectionResultFields.detection_classes: + result_class[i], + standard_fields.DetectionResultFields.detection_masks: + result_masks[i] + }) + + metrics = pq_evaluator.evaluate() + self.assertAlmostEqual(metrics['PanopticQuality@0.50IOU_ByCategory/person'], + 0.32) + self.assertAlmostEqual(metrics['PanopticQuality@0.50IOU_ByCategory/dog'], + 135.0 / 195) + self.assertAlmostEqual(metrics['PanopticQuality@0.50IOU_ByCategory/cat'], 0) + self.assertAlmostEqual(metrics['SegmentationQuality@0.50IOU'], + (0.8 + 135.0 / 195) / 3) + self.assertAlmostEqual(metrics['RecognitionQuality@0.50IOU'], (0.4 + 1) / 3) + self.assertAlmostEqual(metrics['PanopticQuality@0.50IOU'], + (0.32 + 135.0 / 195) / 3) + self.assertEqual(metrics['NumValidClasses'], 3) + self.assertEqual(metrics['NumTotalClasses'], 3) + + +@unittest.skipIf(tf_version.is_tf2(), 'Only Supported in TF1.X') +class CocoPanopticEvaluationPyFuncTest(tf.test.TestCase): + + def testPanopticQualityNoBatch(self): + pq_evaluator = coco_evaluation.CocoPanopticSegmentationEvaluator( + _get_categories_list(), include_metrics_per_category=True) + + image_id = tf.placeholder(tf.string, shape=()) + groundtruth_classes = tf.placeholder(tf.int32, shape=(None)) + groundtruth_masks = tf.placeholder(tf.uint8, shape=(None, None, None)) + groundtruth_is_crowd = tf.placeholder(tf.int32, shape=(None)) + detection_classes = tf.placeholder(tf.int32, shape=(None)) + detection_masks = tf.placeholder(tf.uint8, shape=(None, None, None)) + + input_data_fields = standard_fields.InputDataFields + detection_fields = standard_fields.DetectionResultFields + eval_dict = { + input_data_fields.key: image_id, + input_data_fields.groundtruth_classes: groundtruth_classes, + input_data_fields.groundtruth_instance_masks: groundtruth_masks, + input_data_fields.groundtruth_is_crowd: groundtruth_is_crowd, + detection_fields.detection_classes: detection_classes, + detection_fields.detection_masks: detection_masks, + } + + eval_metric_ops = pq_evaluator.get_estimator_eval_metric_ops(eval_dict) + + _, update_op = eval_metric_ops['PanopticQuality@0.50IOU'] + (gt_class, gt_masks, gt_is_crowd, result_class, + result_masks) = _get_panoptic_test_data() + + with self.test_session() as sess: + for i in range(2): + sess.run( + update_op, + feed_dict={ + image_id: 'image%d' % i, + groundtruth_classes: gt_class[i], + groundtruth_masks: gt_masks[i], + groundtruth_is_crowd: gt_is_crowd[i], + detection_classes: result_class[i], + detection_masks: result_masks[i] + }) + metrics = {} + for key, (value_op, _) in eval_metric_ops.items(): + metrics[key] = value_op + metrics = sess.run(metrics) + self.assertAlmostEqual(metrics['PanopticQuality@0.50IOU'], + (0.32 + 135.0 / 195) / 3) + + def testPanopticQualityBatched(self): + pq_evaluator = coco_evaluation.CocoPanopticSegmentationEvaluator( + _get_categories_list(), include_metrics_per_category=True) + batch_size = 2 + image_id = tf.placeholder(tf.string, shape=(batch_size)) + groundtruth_classes = tf.placeholder(tf.int32, shape=(batch_size, None)) + groundtruth_masks = tf.placeholder( + tf.uint8, shape=(batch_size, None, None, None)) + groundtruth_is_crowd = tf.placeholder(tf.int32, shape=(batch_size, None)) + detection_classes = tf.placeholder(tf.int32, shape=(batch_size, None)) + detection_masks = tf.placeholder( + tf.uint8, shape=(batch_size, None, None, None)) + num_gt_masks_per_image = tf.placeholder(tf.int32, shape=(batch_size)) + num_det_masks_per_image = tf.placeholder(tf.int32, shape=(batch_size)) + + input_data_fields = standard_fields.InputDataFields + detection_fields = standard_fields.DetectionResultFields + eval_dict = { + input_data_fields.key: image_id, + input_data_fields.groundtruth_classes: groundtruth_classes, + input_data_fields.groundtruth_instance_masks: groundtruth_masks, + input_data_fields.groundtruth_is_crowd: groundtruth_is_crowd, + input_data_fields.num_groundtruth_boxes: num_gt_masks_per_image, + detection_fields.detection_classes: detection_classes, + detection_fields.detection_masks: detection_masks, + detection_fields.num_detections: num_det_masks_per_image, + } + + eval_metric_ops = pq_evaluator.get_estimator_eval_metric_ops(eval_dict) + + _, update_op = eval_metric_ops['PanopticQuality@0.50IOU'] + (gt_class, gt_masks, gt_is_crowd, result_class, + result_masks) = _get_panoptic_test_data() + with self.test_session() as sess: + sess.run( + update_op, + feed_dict={ + image_id: ['image0', 'image1'], + groundtruth_classes: + np.stack([ + gt_class[0], + np.pad(gt_class[1], (0, 1), mode='constant') + ], + axis=0), + groundtruth_masks: + np.stack([ + np.pad( + gt_masks[0], ((0, 0), (0, 10), (0, 10)), + mode='constant'), + np.pad( + gt_masks[1], ((0, 1), (0, 30), (0, 20)), + mode='constant'), + ], + axis=0), + groundtruth_is_crowd: + np.stack([ + gt_is_crowd[0], + np.pad(gt_is_crowd[1], (0, 1), mode='constant') + ], + axis=0), + num_gt_masks_per_image: np.array([3, 2]), + detection_classes: + np.stack([ + result_class[0], + np.pad(result_class[1], (0, 1), mode='constant') + ], + axis=0), + detection_masks: + np.stack([ + np.pad( + result_masks[0], ((0, 0), (0, 10), (0, 10)), + mode='constant'), + np.pad( + result_masks[1], ((0, 1), (0, 30), (0, 20)), + mode='constant'), + ], + axis=0), + num_det_masks_per_image: np.array([3, 2]), + }) + metrics = {} + for key, (value_op, _) in eval_metric_ops.items(): + metrics[key] = value_op + metrics = sess.run(metrics) + self.assertAlmostEqual(metrics['PanopticQuality@0.50IOU'], + (0.32 + 135.0 / 195) / 3) + + if __name__ == '__main__': tf.test.main() diff --git a/research/object_detection/metrics/coco_tools.py b/research/object_detection/metrics/coco_tools.py index f2379f653..790d5bdef 100644 --- a/research/object_detection/metrics/coco_tools.py +++ b/research/object_detection/metrics/coco_tools.py @@ -52,6 +52,7 @@ from pycocotools import coco from pycocotools import cocoeval from pycocotools import mask +import six from six.moves import range from six.moves import zip import tensorflow.compat.v1 as tf @@ -353,7 +354,9 @@ def _RleCompress(masks): Returns: A pycocotools Run-length encoding of the mask. """ - return mask.encode(np.asfortranarray(masks)) + rle = mask.encode(np.asfortranarray(masks)) + rle['counts'] = six.ensure_str(rle['counts']) + return rle def ExportSingleImageGroundtruthToCoco(image_id, diff --git a/research/object_detection/metrics/offline_eval_map_corloc.py b/research/object_detection/metrics/offline_eval_map_corloc.py index 69ecaeaae..a12b1d984 100644 --- a/research/object_detection/metrics/offline_eval_map_corloc.py +++ b/research/object_detection/metrics/offline_eval_map_corloc.py @@ -36,8 +36,8 @@ import os import re import tensorflow.compat.v1 as tf +from object_detection import eval_util from object_detection.core import standard_fields -from object_detection.legacy import evaluator from object_detection.metrics import tf_example_parser from object_detection.utils import config_util from object_detection.utils import label_map_util @@ -94,7 +94,7 @@ def read_data_and_evaluate(input_config, eval_config): categories = label_map_util.create_categories_from_labelmap( input_config.label_map_path) - object_detection_evaluators = evaluator.get_evaluators( + object_detection_evaluators = eval_util.get_evaluators( eval_config, categories) # Support a single evaluator object_detection_evaluator = object_detection_evaluators[0] diff --git a/research/object_detection/model_lib_test.py b/research/object_detection/model_lib_tf1_test.py similarity index 98% rename from research/object_detection/model_lib_test.py rename to research/object_detection/model_lib_tf1_test.py index ae14ad844..7d4d81b2c 100644 --- a/research/object_detection/model_lib_test.py +++ b/research/object_detection/model_lib_tf1_test.py @@ -20,19 +20,17 @@ from __future__ import print_function import functools import os - +import unittest import numpy as np import tensorflow.compat.v1 as tf -from tensorflow.contrib.tpu.python.tpu import tpu_config -from tensorflow.contrib.tpu.python.tpu import tpu_estimator - from object_detection import inputs from object_detection import model_hparams from object_detection import model_lib from object_detection.builders import model_builder from object_detection.core import standard_fields as fields from object_detection.utils import config_util +from object_detection.utils import tf_version # Model for test. Options are: @@ -122,6 +120,7 @@ def _make_initializable_iterator(dataset): return iterator +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class ModelLibTest(tf.test.TestCase): @classmethod @@ -337,8 +336,7 @@ class ModelLibTest(tf.test.TestCase): def test_create_tpu_estimator_and_inputs(self): """Tests that number of train/eval defaults to config values.""" - - run_config = tpu_config.RunConfig() + run_config = tf.estimator.tpu.RunConfig() hparams = model_hparams.create_hparams( hparams_overrides='load_pretrained=false') pipeline_config_path = get_pipeline_config_path(MODEL_NAME_FOR_TEST) @@ -352,7 +350,7 @@ class ModelLibTest(tf.test.TestCase): estimator = train_and_eval_dict['estimator'] train_steps = train_and_eval_dict['train_steps'] - self.assertIsInstance(estimator, tpu_estimator.TPUEstimator) + self.assertIsInstance(estimator, tf.estimator.tpu.TPUEstimator) self.assertEqual(20, train_steps) def test_create_train_and_eval_specs(self): @@ -406,6 +404,7 @@ class ModelLibTest(tf.test.TestCase): self.assertEqual(None, experiment.eval_steps) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class UnbatchTensorsTest(tf.test.TestCase): def test_unbatch_without_unpadding(self): diff --git a/research/object_detection/model_lib_v2_test.py b/research/object_detection/model_lib_tf2_test.py similarity index 96% rename from research/object_detection/model_lib_v2_test.py rename to research/object_detection/model_lib_tf2_test.py index d2eff82f9..8c6d96172 100644 --- a/research/object_detection/model_lib_v2_test.py +++ b/research/object_detection/model_lib_tf2_test.py @@ -20,7 +20,7 @@ from __future__ import print_function import os import tempfile - +import unittest import numpy as np import six import tensorflow.compat.v1 as tf @@ -32,6 +32,7 @@ from object_detection.builders import model_builder from object_detection.core import model from object_detection.protos import train_pb2 from object_detection.utils import config_util +from object_detection.utils import tf_version if six.PY2: import mock # pylint: disable=g-importing-member,g-import-not-at-top @@ -72,6 +73,7 @@ def _get_config_kwarg_overrides(): } +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class ModelLibTest(tf.test.TestCase): @classmethod @@ -139,6 +141,7 @@ class SimpleModel(model.DetectionModel): return [] +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class ModelCheckpointTest(tf.test.TestCase): """Test for model checkpoint related functionality.""" @@ -171,6 +174,7 @@ class IncompatibleModel(SimpleModel): return {'weight': self.weight} +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class CheckpointV2Test(tf.test.TestCase): def setUp(self): diff --git a/research/object_detection/model_lib_v2.py b/research/object_detection/model_lib_v2.py index ab1fbdc1e..29eb5a295 100644 --- a/research/object_detection/model_lib_v2.py +++ b/research/object_detection/model_lib_v2.py @@ -358,7 +358,7 @@ def load_fine_tune_checkpoint( ckpt.restore(checkpoint_path).assert_existing_objects_matched() -def _get_filepath(strategy, filepath): +def get_filepath(strategy, filepath): """Get appropriate filepath for worker. Args: @@ -377,7 +377,7 @@ def _get_filepath(strategy, filepath): return os.path.join(filepath, 'temp_worker_{:03d}'.format(task_id)) -def _clean_temporary_directories(strategy, filepath): +def clean_temporary_directories(strategy, filepath): """Temporary directory clean up for MultiWorker Mirrored Strategy. This is needed for all non-chief workers. @@ -539,8 +539,8 @@ def train_loop( ## Train the model # Get the appropriate filepath (temporary or not) based on whether the worker # is the chief. - summary_writer_filepath = _get_filepath(strategy, - os.path.join(model_dir, 'train')) + summary_writer_filepath = get_filepath(strategy, + os.path.join(model_dir, 'train')) summary_writer = tf.compat.v2.summary.create_file_writer( summary_writer_filepath) @@ -567,7 +567,7 @@ def train_loop( ckpt = tf.compat.v2.train.Checkpoint( step=global_step, model=detection_model, optimizer=optimizer) - manager_dir = _get_filepath(strategy, model_dir) + manager_dir = get_filepath(strategy, model_dir) if not strategy.extended.should_checkpoint: checkpoint_max_to_keep = 1 manager = tf.compat.v2.train.CheckpointManager( @@ -615,6 +615,10 @@ def train_loop( return _sample_and_train(strategy, train_step_fn, data_iterator) train_input_iter = iter(train_input) + + if int(global_step.value()) == 0: + manager.save() + checkpointed_step = int(global_step.value()) logged_step = global_step.value() @@ -646,8 +650,8 @@ def train_loop( # Remove the checkpoint directories of the non-chief workers that # MultiWorkerMirroredStrategy forces us to save during sync distributed # training. - _clean_temporary_directories(strategy, manager_dir) - _clean_temporary_directories(strategy, summary_writer_filepath) + clean_temporary_directories(strategy, manager_dir) + clean_temporary_directories(strategy, summary_writer_filepath) def eager_eval_loop( diff --git a/research/object_detection/model_main_tf2.py b/research/object_detection/model_main_tf2.py new file mode 100644 index 000000000..f6832ba84 --- /dev/null +++ b/research/object_detection/model_main_tf2.py @@ -0,0 +1,112 @@ +# Lint as: python3 +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +r"""Creates and runs TF2 object detection models. + +################################## +NOTE: This module has not been fully tested; please bear with us while we iron +out the kinks. +################################## + +When a TPU device is available, this binary uses TPUStrategy. Otherwise, it uses +GPUS with MirroredStrategy/MultiWorkerMirroredStrategy. + +For local training/evaluation run: +PIPELINE_CONFIG_PATH=path/to/pipeline.config +MODEL_DIR=/tmp/model_outputs +NUM_TRAIN_STEPS=10000 +SAMPLE_1_OF_N_EVAL_EXAMPLES=1 +python model_main_tf2.py -- \ + --model_dir=$MODEL_DIR --num_train_steps=$NUM_TRAIN_STEPS \ + --sample_1_of_n_eval_examples=$SAMPLE_1_OF_N_EVAL_EXAMPLES \ + --pipeline_config_path=$PIPELINE_CONFIG_PATH \ + --alsologtostderr +""" +from absl import flags +import tensorflow.compat.v2 as tf +from object_detection import model_hparams +from object_detection import model_lib_v2 + +flags.DEFINE_string('pipeline_config_path', None, 'Path to pipeline config ' + 'file.') +flags.DEFINE_integer('num_train_steps', None, 'Number of train steps.') +flags.DEFINE_bool('eval_on_train_data', False, 'Enable evaluating on train ' + 'data (only supported in distributed training).') +flags.DEFINE_integer('sample_1_of_n_eval_examples', None, 'Will sample one of ' + 'every n eval input examples, where n is provided.') +flags.DEFINE_integer('sample_1_of_n_eval_on_train_examples', 5, 'Will sample ' + 'one of every n train input examples for evaluation, ' + 'where n is provided. This is only used if ' + '`eval_training_data` is True.') +flags.DEFINE_string( + 'hparams_overrides', None, 'Hyperparameter overrides, ' + 'represented as a string containing comma-separated ' + 'hparam_name=value pairs.') +flags.DEFINE_string( + 'model_dir', None, 'Path to output model directory ' + 'where event and checkpoint files will be written.') +flags.DEFINE_string( + 'checkpoint_dir', None, 'Path to directory holding a checkpoint. If ' + '`checkpoint_dir` is provided, this binary operates in eval-only mode, ' + 'writing resulting metrics to `model_dir`.') + +flags.DEFINE_integer('eval_timeout', 3600, 'Number of seconds to wait for an' + 'evaluation checkpoint before exiting.') +flags.DEFINE_integer( + 'num_workers', 1, 'When num_workers > 1, training uses ' + 'MultiWorkerMirroredStrategy. When num_workers = 1 it uses ' + 'MirroredStrategy.') + +FLAGS = flags.FLAGS + + +def main(unused_argv): + flags.mark_flag_as_required('model_dir') + flags.mark_flag_as_required('pipeline_config_path') + tf.config.set_soft_device_placement(True) + + if FLAGS.checkpoint_dir: + model_lib_v2.eval_continuously( + hparams=model_hparams.create_hparams(FLAGS.hparams_overrides), + pipeline_config_path=FLAGS.pipeline_config_path, + model_dir=FLAGS.model_dir, + train_steps=FLAGS.num_train_steps, + sample_1_of_n_eval_examples=FLAGS.sample_1_of_n_eval_examples, + sample_1_of_n_eval_on_train_examples=( + FLAGS.sample_1_of_n_eval_on_train_examples), + checkpoint_dir=FLAGS.checkpoint_dir, + wait_interval=300, timeout=FLAGS.eval_timeout) + else: + if tf.config.get_visible_devices('TPU'): + resolver = tf.distribute.cluster_resolver.TPUClusterResolver() + tf.config.experimental_connect_to_cluster(resolver) + tf.tpu.experimental.initialize_tpu_system(resolver) + strategy = tf.distribute.experimental.TPUStrategy(resolver) + elif FLAGS.num_workers > 1: + strategy = tf.distribute.experimental.MultiWorkerMirroredStrategy() + else: + strategy = tf.compat.v2.distribute.MirroredStrategy() + + with strategy.scope(): + model_lib_v2.train_loop( + hparams=model_hparams.create_hparams(FLAGS.hparams_overrides), + pipeline_config_path=FLAGS.pipeline_config_path, + model_dir=FLAGS.model_dir, + train_steps=FLAGS.num_train_steps, + use_tpu=FLAGS.use_tpu) + +if __name__ == '__main__': + tf.app.run() diff --git a/research/object_detection/models/center_net_hourglass_feature_extractor.py b/research/object_detection/models/center_net_hourglass_feature_extractor.py new file mode 100644 index 000000000..4761915aa --- /dev/null +++ b/research/object_detection/models/center_net_hourglass_feature_extractor.py @@ -0,0 +1,75 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Hourglass[1] feature extractor for CenterNet[2] meta architecture. + +[1]: https://arxiv.org/abs/1603.06937 +[2]: https://arxiv.org/abs/1904.07850 +""" + +from object_detection.meta_architectures import center_net_meta_arch +from object_detection.models.keras_models import hourglass_network + + +class CenterNetHourglassFeatureExtractor( + center_net_meta_arch.CenterNetFeatureExtractor): + """The hourglass feature extractor for CenterNet. + + This class is a thin wrapper around the HourglassFeatureExtractor class + along with some preprocessing methods inherited from the base class. + """ + + def __init__(self, hourglass_net, channel_means=(0., 0., 0.), + channel_stds=(1., 1., 1.), bgr_ordering=False): + """Intializes the feature extractor. + + Args: + hourglass_net: The underlying hourglass network to use. + channel_means: A tuple of floats, denoting the mean of each channel + which will be subtracted from it. + channel_stds: A tuple of floats, denoting the standard deviation of each + channel. Each channel will be divided by its standard deviation value. + bgr_ordering: bool, if set will change the channel ordering to be in the + [blue, red, green] order. + """ + + super(CenterNetHourglassFeatureExtractor, self).__init__( + channel_means=channel_means, channel_stds=channel_stds, + bgr_ordering=bgr_ordering) + self._network = hourglass_net + + def call(self, inputs): + return self._network(inputs) + + @property + def out_stride(self): + """The stride in the output image of the network.""" + return 4 + + @property + def num_feature_outputs(self): + """Ther number of feature outputs returned by the feature extractor.""" + return self._network.num_hourglasses + + def get_model(self): + return self._network + + +def hourglass_104(channel_means, channel_stds, bgr_ordering): + """The Hourglass-104 backbone for CenterNet.""" + + network = hourglass_network.hourglass_104() + return CenterNetHourglassFeatureExtractor( + network, channel_means=channel_means, channel_stds=channel_stds, + bgr_ordering=bgr_ordering) diff --git a/research/object_detection/models/center_net_hourglass_feature_extractor_tf2_test.py b/research/object_detection/models/center_net_hourglass_feature_extractor_tf2_test.py new file mode 100644 index 000000000..19d5cbe98 --- /dev/null +++ b/research/object_detection/models/center_net_hourglass_feature_extractor_tf2_test.py @@ -0,0 +1,44 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Testing hourglass feature extractor for CenterNet.""" +import unittest +import numpy as np +import tensorflow.compat.v1 as tf + +from object_detection.models import center_net_hourglass_feature_extractor as hourglass +from object_detection.models.keras_models import hourglass_network +from object_detection.utils import test_case +from object_detection.utils import tf_version + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class CenterNetHourglassFeatureExtractorTest(test_case.TestCase): + + def test_center_net_hourglass_feature_extractor(self): + + net = hourglass_network.HourglassNetwork( + num_stages=4, blocks_per_stage=[2, 3, 4, 5, 6], + channel_dims=[4, 6, 8, 10, 12, 14], num_hourglasses=2) + + model = hourglass.CenterNetHourglassFeatureExtractor(net) + def graph_fn(): + return model(tf.zeros((2, 64, 64, 3), dtype=np.float32)) + outputs = self.execute(graph_fn, []) + self.assertEqual(outputs[0].shape, (2, 16, 16, 6)) + self.assertEqual(outputs[1].shape, (2, 16, 16, 6)) + + +if __name__ == '__main__': + tf.test.main() diff --git a/research/object_detection/models/center_net_resnet_feature_extractor.py b/research/object_detection/models/center_net_resnet_feature_extractor.py new file mode 100644 index 000000000..477fa4c50 --- /dev/null +++ b/research/object_detection/models/center_net_resnet_feature_extractor.py @@ -0,0 +1,149 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Resnetv2 based feature extractors for CenterNet[1] meta architecture. + +[1]: https://arxiv.org/abs/1904.07850 +""" + + +import tensorflow.compat.v1 as tf + +from object_detection.meta_architectures.center_net_meta_arch import CenterNetFeatureExtractor + + +class CenterNetResnetFeatureExtractor(CenterNetFeatureExtractor): + """Resnet v2 base feature extractor for the CenterNet model.""" + + def __init__(self, resnet_type, channel_means=(0., 0., 0.), + channel_stds=(1., 1., 1.), bgr_ordering=False): + """Initializes the feature extractor with a specific ResNet architecture. + + Args: + resnet_type: A string specifying which kind of ResNet to use. Currently + only `resnet_v2_50` and `resnet_v2_101` are supported. + channel_means: A tuple of floats, denoting the mean of each channel + which will be subtracted from it. + channel_stds: A tuple of floats, denoting the standard deviation of each + channel. Each channel will be divided by its standard deviation value. + bgr_ordering: bool, if set will change the channel ordering to be in the + [blue, red, green] order. + + """ + + super(CenterNetResnetFeatureExtractor, self).__init__( + channel_means=channel_means, channel_stds=channel_stds, + bgr_ordering=bgr_ordering) + if resnet_type == 'resnet_v2_101': + self._base_model = tf.keras.applications.ResNet101V2(weights=None) + output_layer = 'conv5_block3_out' + elif resnet_type == 'resnet_v2_50': + self._base_model = tf.keras.applications.ResNet50V2(weights=None) + output_layer = 'conv5_block3_out' + else: + raise ValueError('Unknown Resnet Model {}'.format(resnet_type)) + output_layer = self._base_model.get_layer(output_layer) + + self._resnet_model = tf.keras.models.Model(inputs=self._base_model.input, + outputs=output_layer.output) + resnet_output = self._resnet_model(self._base_model.input) + + for num_filters in [256, 128, 64]: + # TODO(vighneshb) This section has a few differences from the paper + # Figure out how much of a performance impact they have. + + # 1. We use a simple convolution instead of a deformable convolution + conv = tf.keras.layers.Conv2D(filters=num_filters, kernel_size=3, + strides=1, padding='same') + resnet_output = conv(resnet_output) + resnet_output = tf.keras.layers.BatchNormalization()(resnet_output) + resnet_output = tf.keras.layers.ReLU()(resnet_output) + + # 2. We use the default initialization for the convolution layers + # instead of initializing it to do bilinear upsampling. + conv_transpose = tf.keras.layers.Conv2DTranspose(filters=num_filters, + kernel_size=3, strides=2, + padding='same') + resnet_output = conv_transpose(resnet_output) + resnet_output = tf.keras.layers.BatchNormalization()(resnet_output) + resnet_output = tf.keras.layers.ReLU()(resnet_output) + + self._feature_extractor_model = tf.keras.models.Model( + inputs=self._base_model.input, outputs=resnet_output) + + def preprocess(self, resized_inputs): + """Preprocess input images for the ResNet model. + + This scales images in the range [0, 255] to the range [-1, 1] + + Args: + resized_inputs: a [batch, height, width, channels] float32 tensor. + + Returns: + outputs: a [batch, height, width, channels] float32 tensor. + + """ + resized_inputs = super(CenterNetResnetFeatureExtractor, self).preprocess( + resized_inputs) + return tf.keras.applications.resnet_v2.preprocess_input(resized_inputs) + + def load_feature_extractor_weights(self, path): + self._base_model.load_weights(path) + + def get_base_model(self): + """Get base resnet model for inspection and testing.""" + return self._base_model + + def call(self, inputs): + """Returns image features extracted by the backbone. + + Args: + inputs: An image tensor of shape [batch_size, input_height, + input_width, 3] + + Returns: + features_list: A list of length 1 containing a tensor of shape + [batch_size, input_height // 4, input_width // 4, 64] containing + the features extracted by the ResNet. + """ + return [self._feature_extractor_model(inputs)] + + @property + def num_feature_outputs(self): + return 1 + + @property + def out_stride(self): + return 4 + + +def resnet_v2_101(channel_means, channel_stds, bgr_ordering): + """The ResNet v2 101 feature extractor.""" + + return CenterNetResnetFeatureExtractor( + resnet_type='resnet_v2_101', + channel_means=channel_means, + channel_stds=channel_stds, + bgr_ordering=bgr_ordering + ) + + +def resnet_v2_50(channel_means, channel_stds, bgr_ordering): + """The ResNet v2 50 feature extractor.""" + + return CenterNetResnetFeatureExtractor( + resnet_type='resnet_v2_50', + channel_means=channel_means, + channel_stds=channel_stds, + bgr_ordering=bgr_ordering) diff --git a/research/object_detection/models/center_net_resnet_feature_extractor_tf2_test.py b/research/object_detection/models/center_net_resnet_feature_extractor_tf2_test.py new file mode 100644 index 000000000..3429c0442 --- /dev/null +++ b/research/object_detection/models/center_net_resnet_feature_extractor_tf2_test.py @@ -0,0 +1,54 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Testing ResNet v2 models for the CenterNet meta architecture.""" +import unittest +import numpy as np +import tensorflow.compat.v1 as tf + +from object_detection.models import center_net_resnet_feature_extractor +from object_detection.utils import test_case +from object_detection.utils import tf_version + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class CenterNetResnetFeatureExtractorTest(test_case.TestCase): + + def test_output_size(self): + """Verify that shape of features returned by the backbone is correct.""" + + model = center_net_resnet_feature_extractor.\ + CenterNetResnetFeatureExtractor('resnet_v2_101') + def graph_fn(): + img = np.zeros((8, 224, 224, 3), dtype=np.float32) + processed_img = model.preprocess(img) + return model(processed_img) + outputs = self.execute(graph_fn, []) + self.assertEqual(outputs.shape, (8, 56, 56, 64)) + + def test_output_size_resnet50(self): + """Verify that shape of features returned by the backbone is correct.""" + + model = center_net_resnet_feature_extractor.\ + CenterNetResnetFeatureExtractor('resnet_v2_50') + def graph_fn(): + img = np.zeros((8, 224, 224, 3), dtype=np.float32) + processed_img = model.preprocess(img) + return model(processed_img) + outputs = self.execute(graph_fn, []) + self.assertEqual(outputs.shape, (8, 56, 56, 64)) + + +if __name__ == '__main__': + tf.test.main() diff --git a/research/object_detection/models/center_net_resnet_v1_fpn_feature_extractor.py b/research/object_detection/models/center_net_resnet_v1_fpn_feature_extractor.py new file mode 100644 index 000000000..842e9cf1b --- /dev/null +++ b/research/object_detection/models/center_net_resnet_v1_fpn_feature_extractor.py @@ -0,0 +1,176 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Resnetv1 FPN [1] based feature extractors for CenterNet[2] meta architecture. + + +[1]: https://arxiv.org/abs/1612.03144. +[2]: https://arxiv.org/abs/1904.07850. +""" +import tensorflow.compat.v1 as tf + +from object_detection.meta_architectures.center_net_meta_arch import CenterNetFeatureExtractor + + +_RESNET_MODEL_OUTPUT_LAYERS = { + 'resnet_v1_50': ['conv2_block3_out', 'conv3_block4_out', + 'conv4_block6_out', 'conv5_block3_out'], + 'resnet_v1_101': ['conv2_block3_out', 'conv3_block4_out', + 'conv4_block23_out', 'conv5_block3_out'], +} + + +class CenterNetResnetV1FpnFeatureExtractor(CenterNetFeatureExtractor): + """Resnet v1 FPN base feature extractor for the CenterNet model. + + This feature extractor uses residual skip connections and nearest neighbor + upsampling to produce an output feature map of stride 4, which has precise + localization information along with strong semantic information from the top + of the net. This design does not exactly follow the original FPN design, + specifically: + - Since only one output map is necessary for heatmap prediction (stride 4 + output), the top-down feature maps can have different numbers of channels. + Specifically, the top down feature maps have the following sizes: + [h/4, w/4, 64], [h/8, w/8, 128], [h/16, w/16, 256], [h/32, w/32, 256]. + - No additional coarse features are used after conv5_x. + """ + + def __init__(self, resnet_type, channel_means=(0., 0., 0.), + channel_stds=(1., 1., 1.), bgr_ordering=False): + """Initializes the feature extractor with a specific ResNet architecture. + + Args: + resnet_type: A string specifying which kind of ResNet to use. Currently + only `resnet_v1_50` and `resnet_v1_101` are supported. + channel_means: A tuple of floats, denoting the mean of each channel + which will be subtracted from it. + channel_stds: A tuple of floats, denoting the standard deviation of each + channel. Each channel will be divided by its standard deviation value. + bgr_ordering: bool, if set will change the channel ordering to be in the + [blue, red, green] order. + + """ + + super(CenterNetResnetV1FpnFeatureExtractor, self).__init__( + channel_means=channel_means, channel_stds=channel_stds, + bgr_ordering=bgr_ordering) + if resnet_type == 'resnet_v1_50': + self._base_model = tf.keras.applications.ResNet50(weights=None) + elif resnet_type == 'resnet_v1_101': + self._base_model = tf.keras.applications.ResNet101(weights=None) + else: + raise ValueError('Unknown Resnet Model {}'.format(resnet_type)) + output_layers = _RESNET_MODEL_OUTPUT_LAYERS[resnet_type] + outputs = [self._base_model.get_layer(output_layer_name).output + for output_layer_name in output_layers] + + self._resnet_model = tf.keras.models.Model(inputs=self._base_model.input, + outputs=outputs) + resnet_outputs = self._resnet_model(self._base_model.input) + + # Construct the top-down feature maps. + top_layer = resnet_outputs[-1] + residual_op = tf.keras.layers.Conv2D(filters=256, kernel_size=1, + strides=1, padding='same') + top_down = residual_op(top_layer) + + num_filters_list = [256, 128, 64] + for i, num_filters in enumerate(num_filters_list): + level_ind = 2 - i + # Upsample. + upsample_op = tf.keras.layers.UpSampling2D(2, interpolation='nearest') + top_down = upsample_op(top_down) + + # Residual (skip-connection) from bottom-up pathway. + residual_op = tf.keras.layers.Conv2D(filters=num_filters, kernel_size=1, + strides=1, padding='same') + residual = residual_op(resnet_outputs[level_ind]) + + # Merge. + top_down = top_down + residual + next_num_filters = num_filters_list[i+1] if i + 1 <= 2 else 64 + conv = tf.keras.layers.Conv2D(filters=next_num_filters, + kernel_size=3, strides=1, padding='same') + top_down = conv(top_down) + top_down = tf.keras.layers.BatchNormalization()(top_down) + top_down = tf.keras.layers.ReLU()(top_down) + + self._feature_extractor_model = tf.keras.models.Model( + inputs=self._base_model.input, outputs=top_down) + + def preprocess(self, resized_inputs): + """Preprocess input images for the ResNet model. + + This scales images in the range [0, 255] to the range [-1, 1] + + Args: + resized_inputs: a [batch, height, width, channels] float32 tensor. + + Returns: + outputs: a [batch, height, width, channels] float32 tensor. + + """ + resized_inputs = super( + CenterNetResnetV1FpnFeatureExtractor, self).preprocess(resized_inputs) + return tf.keras.applications.resnet.preprocess_input(resized_inputs) + + def load_feature_extractor_weights(self, path): + self._base_model.load_weights(path) + + def get_base_model(self): + """Get base resnet model for inspection and testing.""" + return self._base_model + + def call(self, inputs): + """Returns image features extracted by the backbone. + + Args: + inputs: An image tensor of shape [batch_size, input_height, + input_width, 3] + + Returns: + features_list: A list of length 1 containing a tensor of shape + [batch_size, input_height // 4, input_width // 4, 64] containing + the features extracted by the ResNet. + """ + return [self._feature_extractor_model(inputs)] + + @property + def num_feature_outputs(self): + return 1 + + @property + def out_stride(self): + return 4 + + +def resnet_v1_101_fpn(channel_means, channel_stds, bgr_ordering): + """The ResNet v1 101 FPN feature extractor.""" + + return CenterNetResnetV1FpnFeatureExtractor( + resnet_type='resnet_v1_101', + channel_means=channel_means, + channel_stds=channel_stds, + bgr_ordering=bgr_ordering + ) + + +def resnet_v1_50_fpn(channel_means, channel_stds, bgr_ordering): + """The ResNet v1 50 FPN feature extractor.""" + + return CenterNetResnetV1FpnFeatureExtractor( + resnet_type='resnet_v1_50', + channel_means=channel_means, + channel_stds=channel_stds, + bgr_ordering=bgr_ordering) diff --git a/research/object_detection/models/center_net_resnet_v1_fpn_feature_extractor_tf2_test.py b/research/object_detection/models/center_net_resnet_v1_fpn_feature_extractor_tf2_test.py new file mode 100644 index 000000000..3f1524904 --- /dev/null +++ b/research/object_detection/models/center_net_resnet_v1_fpn_feature_extractor_tf2_test.py @@ -0,0 +1,49 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Testing ResNet v1 FPN models for the CenterNet meta architecture.""" +import unittest +from absl.testing import parameterized + +import numpy as np +import tensorflow.compat.v1 as tf + +from object_detection.models import center_net_resnet_v1_fpn_feature_extractor +from object_detection.utils import test_case +from object_detection.utils import tf_version + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class CenterNetResnetV1FpnFeatureExtractorTest(test_case.TestCase, + parameterized.TestCase): + + @parameterized.parameters( + {'resnet_type': 'resnet_v1_50'}, + {'resnet_type': 'resnet_v1_101'}, + ) + def test_correct_output_size(self, resnet_type): + """Verify that shape of features returned by the backbone is correct.""" + + model = center_net_resnet_v1_fpn_feature_extractor.\ + CenterNetResnetV1FpnFeatureExtractor(resnet_type) + def graph_fn(): + img = np.zeros((8, 224, 224, 3), dtype=np.float32) + processed_img = model.preprocess(img) + return model(processed_img) + + self.assertEqual(self.execute(graph_fn, []).shape, (8, 56, 56, 64)) + + +if __name__ == '__main__': + tf.test.main() diff --git a/research/object_detection/models/embedded_ssd_mobilenet_v1_feature_extractor_test.py b/research/object_detection/models/embedded_ssd_mobilenet_v1_feature_extractor_tf1_test.py similarity index 97% rename from research/object_detection/models/embedded_ssd_mobilenet_v1_feature_extractor_test.py rename to research/object_detection/models/embedded_ssd_mobilenet_v1_feature_extractor_tf1_test.py index fd7e04544..4a27e8c8d 100644 --- a/research/object_detection/models/embedded_ssd_mobilenet_v1_feature_extractor_test.py +++ b/research/object_detection/models/embedded_ssd_mobilenet_v1_feature_extractor_tf1_test.py @@ -14,13 +14,16 @@ # ============================================================================== """Tests for embedded_ssd_mobilenet_v1_feature_extractor.""" +import unittest import numpy as np import tensorflow.compat.v1 as tf from object_detection.models import embedded_ssd_mobilenet_v1_feature_extractor from object_detection.models import ssd_feature_extractor_test +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class EmbeddedSSDMobileNetV1FeatureExtractorTest( ssd_feature_extractor_test.SsdFeatureExtractorTestBase): diff --git a/research/object_detection/models/faster_rcnn_inception_resnet_v2_feature_extractor_test.py b/research/object_detection/models/faster_rcnn_inception_resnet_v2_feature_extractor_tf1_test.py similarity index 97% rename from research/object_detection/models/faster_rcnn_inception_resnet_v2_feature_extractor_test.py rename to research/object_detection/models/faster_rcnn_inception_resnet_v2_feature_extractor_tf1_test.py index 8b5351c8f..2505fbfb3 100644 --- a/research/object_detection/models/faster_rcnn_inception_resnet_v2_feature_extractor_test.py +++ b/research/object_detection/models/faster_rcnn_inception_resnet_v2_feature_extractor_tf1_test.py @@ -14,12 +14,14 @@ # ============================================================================== """Tests for models.faster_rcnn_inception_resnet_v2_feature_extractor.""" - +import unittest import tensorflow.compat.v1 as tf from object_detection.models import faster_rcnn_inception_resnet_v2_feature_extractor as frcnn_inc_res +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class FasterRcnnInceptionResnetV2FeatureExtractorTest(tf.test.TestCase): def _build_feature_extractor(self, first_stage_features_stride): diff --git a/research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor_test.py b/research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor_tf2_test.py similarity index 67% rename from research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor_test.py rename to research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor_tf2_test.py index c8227603a..49c560457 100644 --- a/research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor_test.py +++ b/research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor_tf2_test.py @@ -14,12 +14,14 @@ # ============================================================================== """Tests for models.faster_rcnn_inception_resnet_v2_keras_feature_extractor.""" - +import unittest import tensorflow.compat.v1 as tf from object_detection.models import faster_rcnn_inception_resnet_v2_keras_feature_extractor as frcnn_inc_res +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class FasterRcnnInceptionResnetV2KerasFeatureExtractorTest(tf.test.TestCase): def _build_feature_extractor(self, first_stage_features_stride): @@ -38,11 +40,7 @@ class FasterRcnnInceptionResnetV2KerasFeatureExtractorTest(tf.test.TestCase): name='TestScope')(preprocessed_inputs) features_shape = tf.shape(rpn_feature_map) - init_op = tf.global_variables_initializer() - with self.test_session() as sess: - sess.run(init_op) - features_shape_out = sess.run(features_shape) - self.assertAllEqual(features_shape_out, [1, 19, 19, 1088]) + self.assertAllEqual(features_shape.numpy(), [1, 19, 19, 1088]) def test_extract_proposal_features_stride_eight(self): feature_extractor = self._build_feature_extractor( @@ -53,11 +51,7 @@ class FasterRcnnInceptionResnetV2KerasFeatureExtractorTest(tf.test.TestCase): name='TestScope')(preprocessed_inputs) features_shape = tf.shape(rpn_feature_map) - init_op = tf.global_variables_initializer() - with self.test_session() as sess: - sess.run(init_op) - features_shape_out = sess.run(features_shape) - self.assertAllEqual(features_shape_out, [1, 28, 28, 1088]) + self.assertAllEqual(features_shape.numpy(), [1, 28, 28, 1088]) def test_extract_proposal_features_half_size_input(self): feature_extractor = self._build_feature_extractor( @@ -67,25 +61,7 @@ class FasterRcnnInceptionResnetV2KerasFeatureExtractorTest(tf.test.TestCase): rpn_feature_map = feature_extractor.get_proposal_feature_extractor_model( name='TestScope')(preprocessed_inputs) features_shape = tf.shape(rpn_feature_map) - - init_op = tf.global_variables_initializer() - with self.test_session() as sess: - sess.run(init_op) - features_shape_out = sess.run(features_shape) - self.assertAllEqual(features_shape_out, [1, 7, 7, 1088]) - - def test_extract_proposal_features_dies_on_invalid_stride(self): - with self.assertRaises(ValueError): - self._build_feature_extractor(first_stage_features_stride=99) - - def test_extract_proposal_features_dies_with_incorrect_rank_inputs(self): - feature_extractor = self._build_feature_extractor( - first_stage_features_stride=16) - preprocessed_inputs = tf.random_uniform( - [224, 224, 3], maxval=255, dtype=tf.float32) - with self.assertRaises(ValueError): - feature_extractor.get_proposal_feature_extractor_model( - name='TestScope')(preprocessed_inputs) + self.assertAllEqual(features_shape.numpy(), [1, 7, 7, 1088]) def test_extract_box_classifier_features_returns_expected_size(self): feature_extractor = self._build_feature_extractor( @@ -97,12 +73,7 @@ class FasterRcnnInceptionResnetV2KerasFeatureExtractorTest(tf.test.TestCase): proposal_classifier_features = ( model(proposal_feature_maps)) features_shape = tf.shape(proposal_classifier_features) - - init_op = tf.global_variables_initializer() - with self.test_session() as sess: - sess.run(init_op) - features_shape_out = sess.run(features_shape) - self.assertAllEqual(features_shape_out, [2, 8, 8, 1536]) + self.assertAllEqual(features_shape.numpy(), [2, 8, 8, 1536]) if __name__ == '__main__': diff --git a/research/object_detection/models/faster_rcnn_inception_v2_feature_extractor_test.py b/research/object_detection/models/faster_rcnn_inception_v2_feature_extractor_tf1_test.py similarity index 97% rename from research/object_detection/models/faster_rcnn_inception_v2_feature_extractor_test.py rename to research/object_detection/models/faster_rcnn_inception_v2_feature_extractor_tf1_test.py index 600c699c8..f5d01145f 100644 --- a/research/object_detection/models/faster_rcnn_inception_v2_feature_extractor_test.py +++ b/research/object_detection/models/faster_rcnn_inception_v2_feature_extractor_tf1_test.py @@ -14,13 +14,15 @@ # ============================================================================== """Tests for faster_rcnn_inception_v2_feature_extractor.""" - +import unittest import numpy as np import tensorflow.compat.v1 as tf from object_detection.models import faster_rcnn_inception_v2_feature_extractor as faster_rcnn_inception_v2 +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class FasterRcnnInceptionV2FeatureExtractorTest(tf.test.TestCase): def _build_feature_extractor(self, first_stage_features_stride): diff --git a/research/object_detection/models/faster_rcnn_mobilenet_v1_feature_extractor_test.py b/research/object_detection/models/faster_rcnn_mobilenet_v1_feature_extractor_tf1_test.py similarity index 97% rename from research/object_detection/models/faster_rcnn_mobilenet_v1_feature_extractor_test.py rename to research/object_detection/models/faster_rcnn_mobilenet_v1_feature_extractor_tf1_test.py index 39d6d234d..65a4958e4 100644 --- a/research/object_detection/models/faster_rcnn_mobilenet_v1_feature_extractor_test.py +++ b/research/object_detection/models/faster_rcnn_mobilenet_v1_feature_extractor_tf1_test.py @@ -14,13 +14,15 @@ # ============================================================================== """Tests for faster_rcnn_mobilenet_v1_feature_extractor.""" - +import unittest import numpy as np import tensorflow.compat.v1 as tf from object_detection.models import faster_rcnn_mobilenet_v1_feature_extractor as faster_rcnn_mobilenet_v1 +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class FasterRcnnMobilenetV1FeatureExtractorTest(tf.test.TestCase): def _build_feature_extractor(self, first_stage_features_stride): diff --git a/research/object_detection/models/faster_rcnn_nas_feature_extractor.py b/research/object_detection/models/faster_rcnn_nas_feature_extractor.py index b1f5e1e6e..9fe17cbea 100644 --- a/research/object_detection/models/faster_rcnn_nas_feature_extractor.py +++ b/research/object_detection/models/faster_rcnn_nas_feature_extractor.py @@ -31,8 +31,14 @@ import tf_slim as slim from object_detection.meta_architectures import faster_rcnn_meta_arch from object_detection.utils import variables_helper -from nets.nasnet import nasnet -from nets.nasnet import nasnet_utils + +# pylint: disable=g-import-not-at-top +try: + from nets.nasnet import nasnet + from nets.nasnet import nasnet_utils +except: # pylint: disable=bare-except + pass +# pylint: enable=g-import-not-at-top arg_scope = slim.arg_scope diff --git a/research/object_detection/models/faster_rcnn_nas_feature_extractor_test.py b/research/object_detection/models/faster_rcnn_nas_feature_extractor_tf1_test.py similarity index 97% rename from research/object_detection/models/faster_rcnn_nas_feature_extractor_test.py rename to research/object_detection/models/faster_rcnn_nas_feature_extractor_tf1_test.py index 4f7e5bed9..a41cb0f73 100644 --- a/research/object_detection/models/faster_rcnn_nas_feature_extractor_test.py +++ b/research/object_detection/models/faster_rcnn_nas_feature_extractor_tf1_test.py @@ -14,12 +14,14 @@ # ============================================================================== """Tests for models.faster_rcnn_nas_feature_extractor.""" - +import unittest import tensorflow.compat.v1 as tf from object_detection.models import faster_rcnn_nas_feature_extractor as frcnn_nas +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class FasterRcnnNASFeatureExtractorTest(tf.test.TestCase): def _build_feature_extractor(self, first_stage_features_stride): diff --git a/research/object_detection/models/faster_rcnn_pnas_feature_extractor.py b/research/object_detection/models/faster_rcnn_pnas_feature_extractor.py index 7f4ff7e8f..ec32cd309 100644 --- a/research/object_detection/models/faster_rcnn_pnas_feature_extractor.py +++ b/research/object_detection/models/faster_rcnn_pnas_feature_extractor.py @@ -30,7 +30,11 @@ import tf_slim as slim from object_detection.meta_architectures import faster_rcnn_meta_arch from object_detection.utils import variables_helper from nets.nasnet import nasnet_utils -from nets.nasnet import pnasnet + +try: + from nets.nasnet import pnasnet # pylint: disable=g-import-not-at-top +except: # pylint: disable=bare-except + pass arg_scope = slim.arg_scope diff --git a/research/object_detection/models/faster_rcnn_pnas_feature_extractor_test.py b/research/object_detection/models/faster_rcnn_pnas_feature_extractor_tf1_test.py similarity index 97% rename from research/object_detection/models/faster_rcnn_pnas_feature_extractor_test.py rename to research/object_detection/models/faster_rcnn_pnas_feature_extractor_tf1_test.py index 46b822fd2..16774511b 100644 --- a/research/object_detection/models/faster_rcnn_pnas_feature_extractor_test.py +++ b/research/object_detection/models/faster_rcnn_pnas_feature_extractor_tf1_test.py @@ -14,12 +14,14 @@ # ============================================================================== """Tests for models.faster_rcnn_pnas_feature_extractor.""" - +import unittest import tensorflow.compat.v1 as tf from object_detection.models import faster_rcnn_pnas_feature_extractor as frcnn_pnas +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class FasterRcnnPNASFeatureExtractorTest(tf.test.TestCase): def _build_feature_extractor(self, first_stage_features_stride): diff --git a/research/object_detection/models/faster_rcnn_resnet_keras_feature_extractor.py b/research/object_detection/models/faster_rcnn_resnet_keras_feature_extractor.py new file mode 100644 index 000000000..a2029d242 --- /dev/null +++ b/research/object_detection/models/faster_rcnn_resnet_keras_feature_extractor.py @@ -0,0 +1,271 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""Resnet based Faster R-CNN implementation in Keras. + +See Deep Residual Learning for Image Recognition by He et al. +https://arxiv.org/abs/1512.03385 +""" + +import tensorflow.compat.v1 as tf + +from object_detection.meta_architectures import faster_rcnn_meta_arch +from object_detection.models.keras_models import resnet_v1 +from object_detection.utils import model_util + + +_RESNET_MODEL_CONV4_LAST_LAYERS = { + 'resnet_v1_50': 'conv4_block6_out', + 'resnet_v1_101': 'conv4_block23_out', + 'resnet_v1_152': 'conv4_block36_out', +} + + +class FasterRCNNResnetKerasFeatureExtractor( + faster_rcnn_meta_arch.FasterRCNNKerasFeatureExtractor): + """Faster R-CNN with Resnet feature extractor implementation.""" + + def __init__(self, + is_training, + resnet_v1_base_model, + resnet_v1_base_model_name, + first_stage_features_stride=16, + batch_norm_trainable=False, + weight_decay=0.0): + """Constructor. + + Args: + is_training: See base class. + resnet_v1_base_model: base resnet v1 network to use. One of + the resnet_v1.resnet_v1_{50,101,152} models. + resnet_v1_base_model_name: model name under which to construct resnet v1. + first_stage_features_stride: See base class. + batch_norm_trainable: See base class. + weight_decay: See base class. + + Raises: + ValueError: If `first_stage_features_stride` is not 8 or 16. + """ + if first_stage_features_stride != 16: + raise ValueError('`first_stage_features_stride` must be 16.') + super(FasterRCNNResnetKerasFeatureExtractor, self).__init__( + is_training, first_stage_features_stride, batch_norm_trainable, + weight_decay) + self.classification_backbone = None + self._variable_dict = {} + self._resnet_v1_base_model = resnet_v1_base_model + self._resnet_v1_base_model_name = resnet_v1_base_model_name + + def preprocess(self, resized_inputs): + """Faster R-CNN Resnet V1 preprocessing. + + VGG style channel mean subtraction as described here: + https://gist.github.com/ksimonyan/211839e770f7b538e2d8#file-readme-md + Note that if the number of channels is not equal to 3, the mean subtraction + will be skipped and the original resized_inputs will be returned. + + Args: + resized_inputs: A [batch, height_in, width_in, channels] float32 tensor + representing a batch of images with values between 0 and 255.0. + + Returns: + preprocessed_inputs: A [batch, height_out, width_out, channels] float32 + tensor representing a batch of images. + + """ + if resized_inputs.shape.as_list()[3] == 3: + channel_means = [123.68, 116.779, 103.939] + return resized_inputs - [[channel_means]] + else: + return resized_inputs + + def get_proposal_feature_extractor_model(self, name=None): + """Returns a model that extracts first stage RPN features. + + Extracts features using the first half of the Resnet v1 network. + + Args: + name: A scope name to construct all variables within. + + Returns: + A Keras model that takes preprocessed_inputs: + A [batch, height, width, channels] float32 tensor + representing a batch of images. + + And returns rpn_feature_map: + A tensor with shape [batch, height, width, depth] + """ + if not self.classification_backbone: + self.classification_backbone = self._resnet_v1_base_model( + batchnorm_training=self._train_batch_norm, + conv_hyperparams=None, + weight_decay=self._weight_decay, + classes=None, + weights=None, + include_top=False + ) + with tf.name_scope(name): + with tf.name_scope('ResnetV1'): + + conv4_last_layer = _RESNET_MODEL_CONV4_LAST_LAYERS[ + self._resnet_v1_base_model_name] + proposal_features = self.classification_backbone.get_layer( + name=conv4_last_layer).output + keras_model = tf.keras.Model( + inputs=self.classification_backbone.inputs, + outputs=proposal_features) + for variable in keras_model.variables: + self._variable_dict[variable.name[:-2]] = variable + return keras_model + + def get_box_classifier_feature_extractor_model(self, name=None): + """Returns a model that extracts second stage box classifier features. + + This function reconstructs the "second half" of the ResNet v1 + network after the part defined in `get_proposal_feature_extractor_model`. + + Args: + name: A scope name to construct all variables within. + + Returns: + A Keras model that takes proposal_feature_maps: + A 4-D float tensor with shape + [batch_size * self.max_num_proposals, crop_height, crop_width, depth] + representing the feature map cropped to each proposal. + And returns proposal_classifier_features: + A 4-D float tensor with shape + [batch_size * self.max_num_proposals, height, width, depth] + representing box classifier features for each proposal. + """ + if not self.classification_backbone: + self.classification_backbone = self._resnet_v1_base_model( + batchnorm_training=self._train_batch_norm, + conv_hyperparams=None, + weight_decay=self._weight_decay, + classes=None, + weights=None, + include_top=False + ) + with tf.name_scope(name): + with tf.name_scope('ResnetV1'): + conv4_last_layer = _RESNET_MODEL_CONV4_LAST_LAYERS[ + self._resnet_v1_base_model_name] + proposal_feature_maps = self.classification_backbone.get_layer( + name=conv4_last_layer).output + proposal_classifier_features = self.classification_backbone.get_layer( + name='conv5_block3_out').output + + keras_model = model_util.extract_submodel( + model=self.classification_backbone, + inputs=proposal_feature_maps, + outputs=proposal_classifier_features) + for variable in keras_model.variables: + self._variable_dict[variable.name[:-2]] = variable + return keras_model + + def restore_from_classification_checkpoint_fn( + self, + first_stage_feature_extractor_scope, + second_stage_feature_extractor_scope): + """Returns a map for restoring from an (object-based) checkpoint. + + Args: + first_stage_feature_extractor_scope: A scope name for the first stage + feature extractor (unused). + second_stage_feature_extractor_scope: A scope name for the second stage + feature extractor (unused). + + Returns: + A dict mapping keys to Keras models + """ + return {'feature_extractor': self.classification_backbone} + + +class FasterRCNNResnet50KerasFeatureExtractor( + FasterRCNNResnetKerasFeatureExtractor): + """Faster R-CNN with Resnet50 feature extractor implementation.""" + + def __init__(self, + is_training, + first_stage_features_stride=16, + batch_norm_trainable=False, + weight_decay=0.0): + """Constructor. + + Args: + is_training: See base class. + first_stage_features_stride: See base class. + batch_norm_trainable: See base class. + weight_decay: See base class. + """ + super(FasterRCNNResnet50KerasFeatureExtractor, self).__init__( + is_training=is_training, + resnet_v1_base_model=resnet_v1.resnet_v1_50, + resnet_v1_base_model_name='resnet_v1_50', + first_stage_features_stride=first_stage_features_stride, + batch_norm_trainable=batch_norm_trainable, + weight_decay=weight_decay) + + +class FasterRCNNResnet101KerasFeatureExtractor( + FasterRCNNResnetKerasFeatureExtractor): + """Faster R-CNN with Resnet101 feature extractor implementation.""" + + def __init__(self, + is_training, + first_stage_features_stride=16, + batch_norm_trainable=False, + weight_decay=0.0): + """Constructor. + + Args: + is_training: See base class. + first_stage_features_stride: See base class. + batch_norm_trainable: See base class. + weight_decay: See base class. + """ + super(FasterRCNNResnet101KerasFeatureExtractor, self).__init__( + is_training=is_training, + resnet_v1_base_model=resnet_v1.resnet_v1_101, + resnet_v1_base_model_name='resnet_v1_101', + first_stage_features_stride=first_stage_features_stride, + batch_norm_trainable=batch_norm_trainable, + weight_decay=weight_decay) + + +class FasterRCNNResnet152KerasFeatureExtractor( + FasterRCNNResnetKerasFeatureExtractor): + """Faster R-CNN with Resnet152 feature extractor implementation.""" + + def __init__(self, + is_training, + first_stage_features_stride=16, + batch_norm_trainable=False, + weight_decay=0.0): + """Constructor. + + Args: + is_training: See base class. + first_stage_features_stride: See base class. + batch_norm_trainable: See base class. + weight_decay: See base class. + """ + super(FasterRCNNResnet152KerasFeatureExtractor, self).__init__( + is_training=is_training, + resnet_v1_base_model=resnet_v1.resnet_v1_152, + resnet_v1_base_model_name='resnet_v1_152', + first_stage_features_stride=first_stage_features_stride, + batch_norm_trainable=batch_norm_trainable, + weight_decay=weight_decay) diff --git a/research/object_detection/models/faster_rcnn_resnet_keras_feature_extractor_tf2_test.py b/research/object_detection/models/faster_rcnn_resnet_keras_feature_extractor_tf2_test.py new file mode 100644 index 000000000..15e8a5fbf --- /dev/null +++ b/research/object_detection/models/faster_rcnn_resnet_keras_feature_extractor_tf2_test.py @@ -0,0 +1,80 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""Tests for models.faster_rcnn_resnet_keras_feature_extractor.""" +import unittest +import tensorflow.compat.v1 as tf + +from object_detection.models import faster_rcnn_resnet_keras_feature_extractor as frcnn_res +from object_detection.utils import tf_version + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class FasterRcnnResnetKerasFeatureExtractorTest(tf.test.TestCase): + + def _build_feature_extractor(self, architecture='resnet_v1_50'): + return frcnn_res.FasterRCNNResnet50KerasFeatureExtractor( + is_training=False, + first_stage_features_stride=16, + batch_norm_trainable=False, + weight_decay=0.0) + + def test_extract_proposal_features_returns_expected_size(self): + feature_extractor = self._build_feature_extractor() + preprocessed_inputs = tf.random_uniform( + [1, 224, 224, 3], maxval=255, dtype=tf.float32) + rpn_feature_map = feature_extractor.get_proposal_feature_extractor_model( + name='TestScope')(preprocessed_inputs) + features_shape = tf.shape(rpn_feature_map) + self.assertAllEqual(features_shape.numpy(), [1, 14, 14, 1024]) + + def test_extract_proposal_features_half_size_input(self): + feature_extractor = self._build_feature_extractor() + preprocessed_inputs = tf.random_uniform( + [1, 112, 112, 3], maxval=255, dtype=tf.float32) + rpn_feature_map = feature_extractor.get_proposal_feature_extractor_model( + name='TestScope')(preprocessed_inputs) + features_shape = tf.shape(rpn_feature_map) + self.assertAllEqual(features_shape.numpy(), [1, 7, 7, 1024]) + + def test_extract_proposal_features_dies_with_incorrect_rank_inputs(self): + feature_extractor = self._build_feature_extractor() + preprocessed_inputs = tf.random_uniform( + [224, 224, 3], maxval=255, dtype=tf.float32) + with self.assertRaises(tf.errors.InvalidArgumentError): + feature_extractor.get_proposal_feature_extractor_model( + name='TestScope')(preprocessed_inputs) + + def test_extract_box_classifier_features_returns_expected_size(self): + feature_extractor = self._build_feature_extractor() + proposal_feature_maps = tf.random_uniform( + [3, 7, 7, 1024], maxval=255, dtype=tf.float32) + model = feature_extractor.get_box_classifier_feature_extractor_model( + name='TestScope') + proposal_classifier_features = ( + model(proposal_feature_maps)) + features_shape = tf.shape(proposal_classifier_features) + # Note: due to a slight mismatch in slim and keras resnet definitions + # the output shape of the box classifier is slightly different compared to + # that of the slim implementation. The keras version is more `canonical` + # in that it more accurately reflects the original authors' implementation. + # TODO(jonathanhuang): make the output shape match that of the slim + # implementation by using atrous convolutions. + self.assertAllEqual(features_shape.numpy(), [3, 4, 4, 2048]) + + +if __name__ == '__main__': + tf.enable_v2_behavior() + tf.test.main() diff --git a/research/object_detection/models/faster_rcnn_resnet_v1_feature_extractor_test.py b/research/object_detection/models/faster_rcnn_resnet_v1_feature_extractor_tf1_test.py similarity index 98% rename from research/object_detection/models/faster_rcnn_resnet_v1_feature_extractor_test.py rename to research/object_detection/models/faster_rcnn_resnet_v1_feature_extractor_tf1_test.py index 0b5055a0c..3d47da04a 100644 --- a/research/object_detection/models/faster_rcnn_resnet_v1_feature_extractor_test.py +++ b/research/object_detection/models/faster_rcnn_resnet_v1_feature_extractor_tf1_test.py @@ -14,13 +14,15 @@ # ============================================================================== """Tests for object_detection.models.faster_rcnn_resnet_v1_feature_extractor.""" - +import unittest import numpy as np import tensorflow.compat.v1 as tf from object_detection.models import faster_rcnn_resnet_v1_feature_extractor as faster_rcnn_resnet_v1 +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class FasterRcnnResnetV1FeatureExtractorTest(tf.test.TestCase): def _build_feature_extractor(self, diff --git a/research/object_detection/models/feature_map_generators_test.py b/research/object_detection/models/feature_map_generators_test.py index 49ba09bdf..951e7760b 100644 --- a/research/object_detection/models/feature_map_generators_test.py +++ b/research/object_detection/models/feature_map_generators_test.py @@ -14,7 +14,7 @@ # ============================================================================== """Tests for feature map generators.""" - +import unittest from absl.testing import parameterized import numpy as np @@ -25,6 +25,9 @@ from google.protobuf import text_format from object_detection.builders import hyperparams_builder from object_detection.models import feature_map_generators from object_detection.protos import hyperparams_pb2 +from object_detection.utils import test_case +from object_detection.utils import test_utils +from object_detection.utils import tf_version INCEPTION_V2_LAYOUT = { 'from_layer': ['Mixed_3c', 'Mixed_4c', 'Mixed_5c', '', '', ''], @@ -52,11 +55,7 @@ SSD_MOBILENET_V1_WEIGHT_SHARED_LAYOUT = { } -@parameterized.parameters( - {'use_keras': False}, - {'use_keras': True}, -) -class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase): +class MultiResolutionFeatureMapGeneratorTest(test_case.TestCase): def _build_conv_hyperparams(self): conv_hyperparams = hyperparams_pb2.Hyperparams() @@ -73,9 +72,9 @@ class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase): text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams) return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams) - def _build_feature_map_generator(self, feature_map_layout, use_keras, + def _build_feature_map_generator(self, feature_map_layout, pool_residual=False): - if use_keras: + if tf_version.is_tf2(): return feature_map_generators.KerasMultiResolutionFeatureMaps( feature_map_layout=feature_map_layout, depth_multiplier=1, @@ -97,17 +96,18 @@ class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase): pool_residual=pool_residual) return feature_map_generator - def test_get_expected_feature_map_shapes_with_inception_v2(self, use_keras): - image_features = { - 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32), - 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32), - 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32) - } - feature_map_generator = self._build_feature_map_generator( - feature_map_layout=INCEPTION_V2_LAYOUT, - use_keras=use_keras - ) - feature_maps = feature_map_generator(image_features) + def test_get_expected_feature_map_shapes_with_inception_v2(self): + with test_utils.GraphContextOrNone() as g: + image_features = { + 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32), + 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32), + 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32) + } + feature_map_generator = self._build_feature_map_generator( + feature_map_layout=INCEPTION_V2_LAYOUT) + def graph_fn(): + feature_maps = feature_map_generator(image_features) + return feature_maps expected_feature_map_shapes = { 'Mixed_3c': (4, 28, 28, 256), @@ -116,29 +116,25 @@ class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase): 'Mixed_5c_2_Conv2d_3_3x3_s2_512': (4, 4, 4, 512), 'Mixed_5c_2_Conv2d_4_3x3_s2_256': (4, 2, 2, 256), 'Mixed_5c_2_Conv2d_5_3x3_s2_256': (4, 1, 1, 256)} - - init_op = tf.global_variables_initializer() - with self.test_session() as sess: - sess.run(init_op) - out_feature_maps = sess.run(feature_maps) - out_feature_map_shapes = dict( - (key, value.shape) for key, value in out_feature_maps.items()) - self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes) + out_feature_maps = self.execute(graph_fn, [], g) + out_feature_map_shapes = dict( + (key, value.shape) for key, value in out_feature_maps.items()) + self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes) def test_get_expected_feature_map_shapes_with_inception_v2_use_depthwise( - self, use_keras): - image_features = { - 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32), - 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32), - 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32) - } - layout_copy = INCEPTION_V2_LAYOUT.copy() - layout_copy['use_depthwise'] = True - feature_map_generator = self._build_feature_map_generator( - feature_map_layout=layout_copy, - use_keras=use_keras - ) - feature_maps = feature_map_generator(image_features) + self): + with test_utils.GraphContextOrNone() as g: + image_features = { + 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32), + 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32), + 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32) + } + layout_copy = INCEPTION_V2_LAYOUT.copy() + layout_copy['use_depthwise'] = True + feature_map_generator = self._build_feature_map_generator( + feature_map_layout=layout_copy) + def graph_fn(): + return feature_map_generator(image_features) expected_feature_map_shapes = { 'Mixed_3c': (4, 28, 28, 256), @@ -147,29 +143,25 @@ class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase): 'Mixed_5c_2_Conv2d_3_3x3_s2_512': (4, 4, 4, 512), 'Mixed_5c_2_Conv2d_4_3x3_s2_256': (4, 2, 2, 256), 'Mixed_5c_2_Conv2d_5_3x3_s2_256': (4, 1, 1, 256)} - - init_op = tf.global_variables_initializer() - with self.test_session() as sess: - sess.run(init_op) - out_feature_maps = sess.run(feature_maps) - out_feature_map_shapes = dict( - (key, value.shape) for key, value in out_feature_maps.items()) - self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes) - - def test_get_expected_feature_map_shapes_use_explicit_padding( - self, use_keras): - image_features = { - 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32), - 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32), - 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32) - } - layout_copy = INCEPTION_V2_LAYOUT.copy() - layout_copy['use_explicit_padding'] = True - feature_map_generator = self._build_feature_map_generator( - feature_map_layout=layout_copy, - use_keras=use_keras - ) - feature_maps = feature_map_generator(image_features) + out_feature_maps = self.execute(graph_fn, [], g) + out_feature_map_shapes = dict( + (key, value.shape) for key, value in out_feature_maps.items()) + self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes) + + def test_get_expected_feature_map_shapes_use_explicit_padding(self): + with test_utils.GraphContextOrNone() as g: + image_features = { + 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32), + 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32), + 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32) + } + layout_copy = INCEPTION_V2_LAYOUT.copy() + layout_copy['use_explicit_padding'] = True + feature_map_generator = self._build_feature_map_generator( + feature_map_layout=layout_copy, + ) + def graph_fn(): + return feature_map_generator(image_features) expected_feature_map_shapes = { 'Mixed_3c': (4, 28, 28, 256), @@ -178,27 +170,24 @@ class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase): 'Mixed_5c_2_Conv2d_3_3x3_s2_512': (4, 4, 4, 512), 'Mixed_5c_2_Conv2d_4_3x3_s2_256': (4, 2, 2, 256), 'Mixed_5c_2_Conv2d_5_3x3_s2_256': (4, 1, 1, 256)} + out_feature_maps = self.execute(graph_fn, [], g) + out_feature_map_shapes = dict( + (key, value.shape) for key, value in out_feature_maps.items()) + self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes) + + def test_get_expected_feature_map_shapes_with_inception_v3(self): + with test_utils.GraphContextOrNone() as g: + image_features = { + 'Mixed_5d': tf.random_uniform([4, 35, 35, 256], dtype=tf.float32), + 'Mixed_6e': tf.random_uniform([4, 17, 17, 576], dtype=tf.float32), + 'Mixed_7c': tf.random_uniform([4, 8, 8, 1024], dtype=tf.float32) + } - init_op = tf.global_variables_initializer() - with self.test_session() as sess: - sess.run(init_op) - out_feature_maps = sess.run(feature_maps) - out_feature_map_shapes = dict( - (key, value.shape) for key, value in out_feature_maps.items()) - self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes) - - def test_get_expected_feature_map_shapes_with_inception_v3(self, use_keras): - image_features = { - 'Mixed_5d': tf.random_uniform([4, 35, 35, 256], dtype=tf.float32), - 'Mixed_6e': tf.random_uniform([4, 17, 17, 576], dtype=tf.float32), - 'Mixed_7c': tf.random_uniform([4, 8, 8, 1024], dtype=tf.float32) - } - - feature_map_generator = self._build_feature_map_generator( - feature_map_layout=INCEPTION_V3_LAYOUT, - use_keras=use_keras - ) - feature_maps = feature_map_generator(image_features) + feature_map_generator = self._build_feature_map_generator( + feature_map_layout=INCEPTION_V3_LAYOUT, + ) + def graph_fn(): + return feature_map_generator(image_features) expected_feature_map_shapes = { 'Mixed_5d': (4, 35, 35, 256), @@ -207,29 +196,26 @@ class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase): 'Mixed_7c_2_Conv2d_3_3x3_s2_512': (4, 4, 4, 512), 'Mixed_7c_2_Conv2d_4_3x3_s2_256': (4, 2, 2, 256), 'Mixed_7c_2_Conv2d_5_3x3_s2_128': (4, 1, 1, 128)} - - init_op = tf.global_variables_initializer() - with self.test_session() as sess: - sess.run(init_op) - out_feature_maps = sess.run(feature_maps) - out_feature_map_shapes = dict( - (key, value.shape) for key, value in out_feature_maps.items()) - self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes) + out_feature_maps = self.execute(graph_fn, [], g) + out_feature_map_shapes = dict( + (key, value.shape) for key, value in out_feature_maps.items()) + self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes) def test_get_expected_feature_map_shapes_with_embedded_ssd_mobilenet_v1( - self, use_keras): - image_features = { - 'Conv2d_11_pointwise': tf.random_uniform([4, 16, 16, 512], - dtype=tf.float32), - 'Conv2d_13_pointwise': tf.random_uniform([4, 8, 8, 1024], - dtype=tf.float32), - } + self): + with test_utils.GraphContextOrNone() as g: + image_features = { + 'Conv2d_11_pointwise': tf.random_uniform([4, 16, 16, 512], + dtype=tf.float32), + 'Conv2d_13_pointwise': tf.random_uniform([4, 8, 8, 1024], + dtype=tf.float32), + } - feature_map_generator = self._build_feature_map_generator( - feature_map_layout=EMBEDDED_SSD_MOBILENET_V1_LAYOUT, - use_keras=use_keras - ) - feature_maps = feature_map_generator(image_features) + feature_map_generator = self._build_feature_map_generator( + feature_map_layout=EMBEDDED_SSD_MOBILENET_V1_LAYOUT, + ) + def graph_fn(): + return feature_map_generator(image_features) expected_feature_map_shapes = { 'Conv2d_11_pointwise': (4, 16, 16, 512), @@ -237,55 +223,50 @@ class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase): 'Conv2d_13_pointwise_2_Conv2d_2_3x3_s2_512': (4, 4, 4, 512), 'Conv2d_13_pointwise_2_Conv2d_3_3x3_s2_256': (4, 2, 2, 256), 'Conv2d_13_pointwise_2_Conv2d_4_2x2_s2_256': (4, 1, 1, 256)} - - init_op = tf.global_variables_initializer() - with self.test_session() as sess: - sess.run(init_op) - out_feature_maps = sess.run(feature_maps) - out_feature_map_shapes = dict( - (key, value.shape) for key, value in out_feature_maps.items()) - self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes) + out_feature_maps = self.execute(graph_fn, [], g) + out_feature_map_shapes = dict( + (key, value.shape) for key, value in out_feature_maps.items()) + self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes) def test_feature_map_shapes_with_pool_residual_ssd_mobilenet_v1( - self, use_keras): - image_features = { - 'Conv2d_13_pointwise': tf.random_uniform([4, 8, 8, 1024], - dtype=tf.float32), - } + self): + with test_utils.GraphContextOrNone() as g: + image_features = { + 'Conv2d_13_pointwise': tf.random_uniform([4, 8, 8, 1024], + dtype=tf.float32), + } - feature_map_generator = self._build_feature_map_generator( - feature_map_layout=SSD_MOBILENET_V1_WEIGHT_SHARED_LAYOUT, - use_keras=use_keras, - pool_residual=True - ) - feature_maps = feature_map_generator(image_features) + feature_map_generator = self._build_feature_map_generator( + feature_map_layout=SSD_MOBILENET_V1_WEIGHT_SHARED_LAYOUT, + pool_residual=True + ) + def graph_fn(): + return feature_map_generator(image_features) expected_feature_map_shapes = { 'Conv2d_13_pointwise': (4, 8, 8, 1024), 'Conv2d_13_pointwise_2_Conv2d_1_3x3_s2_256': (4, 4, 4, 256), 'Conv2d_13_pointwise_2_Conv2d_2_3x3_s2_256': (4, 2, 2, 256), 'Conv2d_13_pointwise_2_Conv2d_3_3x3_s2_256': (4, 1, 1, 256)} + out_feature_maps = self.execute(graph_fn, [], g) + out_feature_map_shapes = dict( + (key, value.shape) for key, value in out_feature_maps.items()) + self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes) + + def test_get_expected_variable_names_with_inception_v2(self): + with test_utils.GraphContextOrNone() as g: + image_features = { + 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32), + 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32), + 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32) + } + feature_map_generator = self._build_feature_map_generator( + feature_map_layout=INCEPTION_V2_LAYOUT, + ) + def graph_fn(): + return feature_map_generator(image_features) - init_op = tf.global_variables_initializer() - with self.test_session() as sess: - sess.run(init_op) - out_feature_maps = sess.run(feature_maps) - out_feature_map_shapes = dict( - (key, value.shape) for key, value in out_feature_maps.items()) - self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes) - - def test_get_expected_variable_names_with_inception_v2(self, use_keras): - image_features = { - 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32), - 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32), - 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32) - } - feature_map_generator = self._build_feature_map_generator( - feature_map_layout=INCEPTION_V2_LAYOUT, - use_keras=use_keras - ) - feature_maps = feature_map_generator(image_features) - + self.execute(graph_fn, [], g) expected_slim_variables = set([ 'Mixed_5c_1_Conv2d_3_1x1_256/weights', 'Mixed_5c_1_Conv2d_3_1x1_256/biases', @@ -316,32 +297,32 @@ class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase): 'FeatureMaps/Mixed_5c_2_Conv2d_5_3x3_s2_256_conv/bias', ]) - init_op = tf.global_variables_initializer() - with self.test_session() as sess: - sess.run(init_op) - sess.run(feature_maps) + if tf_version.is_tf2(): actual_variable_set = set( - [var.op.name for var in tf.trainable_variables()]) - if use_keras: - self.assertSetEqual(expected_keras_variables, actual_variable_set) - else: - self.assertSetEqual(expected_slim_variables, actual_variable_set) + [var.name.split(':')[0] for var in feature_map_generator.variables]) + self.assertSetEqual(expected_keras_variables, actual_variable_set) + else: + with g.as_default(): + actual_variable_set = set( + [var.op.name for var in tf.trainable_variables()]) + self.assertSetEqual(expected_slim_variables, actual_variable_set) def test_get_expected_variable_names_with_inception_v2_use_depthwise( - self, - use_keras): - image_features = { - 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32), - 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32), - 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32) - } - layout_copy = INCEPTION_V2_LAYOUT.copy() - layout_copy['use_depthwise'] = True - feature_map_generator = self._build_feature_map_generator( - feature_map_layout=layout_copy, - use_keras=use_keras - ) - feature_maps = feature_map_generator(image_features) + self): + with test_utils.GraphContextOrNone() as g: + image_features = { + 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32), + 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32), + 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32) + } + layout_copy = INCEPTION_V2_LAYOUT.copy() + layout_copy['use_depthwise'] = True + feature_map_generator = self._build_feature_map_generator( + feature_map_layout=layout_copy, + ) + def graph_fn(): + return feature_map_generator(image_features) + self.execute(graph_fn, [], g) expected_slim_variables = set([ 'Mixed_5c_1_Conv2d_3_1x1_256/weights', @@ -391,23 +372,20 @@ class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase): 'FeatureMaps/Mixed_5c_2_Conv2d_5_3x3_s2_256_conv/bias', ]) - init_op = tf.global_variables_initializer() - with self.test_session() as sess: - sess.run(init_op) - sess.run(feature_maps) + if tf_version.is_tf2(): actual_variable_set = set( - [var.op.name for var in tf.trainable_variables()]) - if use_keras: - self.assertSetEqual(expected_keras_variables, actual_variable_set) - else: - self.assertSetEqual(expected_slim_variables, actual_variable_set) + [var.name.split(':')[0] for var in feature_map_generator.variables]) + self.assertSetEqual(expected_keras_variables, actual_variable_set) + else: + with g.as_default(): + actual_variable_set = set( + [var.op.name for var in tf.trainable_variables()]) + self.assertSetEqual(expected_slim_variables, actual_variable_set) -@parameterized.parameters({'use_native_resize_op': True, 'use_keras': False}, - {'use_native_resize_op': False, 'use_keras': False}, - {'use_native_resize_op': True, 'use_keras': True}, - {'use_native_resize_op': False, 'use_keras': True}) -class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase): +@parameterized.parameters({'use_native_resize_op': True}, + {'use_native_resize_op': False}) +class FPNFeatureMapGeneratorTest(test_case.TestCase, parameterized.TestCase): def _build_conv_hyperparams(self): conv_hyperparams = hyperparams_pb2.Hyperparams() @@ -425,10 +403,10 @@ class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase): return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams) def _build_feature_map_generator( - self, image_features, depth, use_keras, use_bounded_activations=False, + self, image_features, depth, use_bounded_activations=False, use_native_resize_op=False, use_explicit_padding=False, use_depthwise=False): - if use_keras: + if tf_version.is_tf2(): return feature_map_generators.KerasFpnTopDownFeatureMaps( num_levels=len(image_features), depth=depth, @@ -454,19 +432,20 @@ class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase): return feature_map_generator def test_get_expected_feature_map_shapes( - self, use_native_resize_op, use_keras): - image_features = [ - ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)), - ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)), - ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)), - ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32)) - ] - feature_map_generator = self._build_feature_map_generator( - image_features=image_features, - depth=128, - use_keras=use_keras, - use_native_resize_op=use_native_resize_op) - feature_maps = feature_map_generator(image_features) + self, use_native_resize_op): + with test_utils.GraphContextOrNone() as g: + image_features = [ + ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)), + ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)), + ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)), + ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32)) + ] + feature_map_generator = self._build_feature_map_generator( + image_features=image_features, + depth=128, + use_native_resize_op=use_native_resize_op) + def graph_fn(): + return feature_map_generator(image_features) expected_feature_map_shapes = { 'top_down_block2': (4, 8, 8, 128), @@ -474,30 +453,27 @@ class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase): 'top_down_block4': (4, 2, 2, 128), 'top_down_block5': (4, 1, 1, 128) } - - init_op = tf.global_variables_initializer() - with self.test_session() as sess: - sess.run(init_op) - out_feature_maps = sess.run(feature_maps) - out_feature_map_shapes = {key: value.shape - for key, value in out_feature_maps.items()} - self.assertDictEqual(out_feature_map_shapes, expected_feature_map_shapes) + out_feature_maps = self.execute(graph_fn, [], g) + out_feature_map_shapes = dict( + (key, value.shape) for key, value in out_feature_maps.items()) + self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes) def test_get_expected_feature_map_shapes_with_explicit_padding( - self, use_native_resize_op, use_keras): - image_features = [ - ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)), - ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)), - ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)), - ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32)) - ] - feature_map_generator = self._build_feature_map_generator( - image_features=image_features, - depth=128, - use_keras=use_keras, - use_explicit_padding=True, - use_native_resize_op=use_native_resize_op) - feature_maps = feature_map_generator(image_features) + self, use_native_resize_op): + with test_utils.GraphContextOrNone() as g: + image_features = [ + ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)), + ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)), + ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)), + ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32)) + ] + feature_map_generator = self._build_feature_map_generator( + image_features=image_features, + depth=128, + use_explicit_padding=True, + use_native_resize_op=use_native_resize_op) + def graph_fn(): + return feature_map_generator(image_features) expected_feature_map_shapes = { 'top_down_block2': (4, 8, 8, 128), @@ -505,19 +481,15 @@ class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase): 'top_down_block4': (4, 2, 2, 128), 'top_down_block5': (4, 1, 1, 128) } + out_feature_maps = self.execute(graph_fn, [], g) + out_feature_map_shapes = dict( + (key, value.shape) for key, value in out_feature_maps.items()) + self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes) - init_op = tf.global_variables_initializer() - with self.test_session() as sess: - sess.run(init_op) - out_feature_maps = sess.run(feature_maps) - out_feature_map_shapes = {key: value.shape - for key, value in out_feature_maps.items()} - self.assertDictEqual(out_feature_map_shapes, expected_feature_map_shapes) - + @unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') def test_use_bounded_activations_add_operations( - self, use_native_resize_op, use_keras): - tf_graph = tf.Graph() - with tf_graph.as_default(): + self, use_native_resize_op): + with test_utils.GraphContextOrNone() as g: image_features = [('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)), ('block3', @@ -529,34 +501,23 @@ class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase): feature_map_generator = self._build_feature_map_generator( image_features=image_features, depth=128, - use_keras=use_keras, use_bounded_activations=True, use_native_resize_op=use_native_resize_op) - feature_map_generator(image_features) - - if use_keras: - expected_added_operations = dict.fromkeys([ - 'FeatureMaps/top_down/clip_by_value/clip_by_value', - 'FeatureMaps/top_down/clip_by_value_1/clip_by_value', - 'FeatureMaps/top_down/clip_by_value_2/clip_by_value', - 'FeatureMaps/top_down/clip_by_value_3/clip_by_value', - 'FeatureMaps/top_down/clip_by_value_4/clip_by_value', - 'FeatureMaps/top_down/clip_by_value_5/clip_by_value', - 'FeatureMaps/top_down/clip_by_value_6/clip_by_value', - ]) - else: - expected_added_operations = dict.fromkeys([ - 'top_down/clip_by_value', 'top_down/clip_by_value_1', - 'top_down/clip_by_value_2', 'top_down/clip_by_value_3', - 'top_down/clip_by_value_4', 'top_down/clip_by_value_5', - 'top_down/clip_by_value_6' - ]) - - op_names = {op.name: None for op in tf_graph.get_operations()} - self.assertDictContainsSubset(expected_added_operations, op_names) + def graph_fn(): + return feature_map_generator(image_features) + self.execute(graph_fn, [], g) + expected_added_operations = dict.fromkeys([ + 'top_down/clip_by_value', 'top_down/clip_by_value_1', + 'top_down/clip_by_value_2', 'top_down/clip_by_value_3', + 'top_down/clip_by_value_4', 'top_down/clip_by_value_5', + 'top_down/clip_by_value_6' + ]) + op_names = {op.name: None for op in g.get_operations()} + self.assertDictContainsSubset(expected_added_operations, op_names) + @unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') def test_use_bounded_activations_clip_value( - self, use_native_resize_op, use_keras): + self, use_native_resize_op): tf_graph = tf.Graph() with tf_graph.as_default(): image_features = [ @@ -568,28 +529,16 @@ class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase): feature_map_generator = self._build_feature_map_generator( image_features=image_features, depth=128, - use_keras=use_keras, use_bounded_activations=True, use_native_resize_op=use_native_resize_op) feature_map_generator(image_features) - if use_keras: - expected_clip_by_value_ops = dict.fromkeys([ - 'FeatureMaps/top_down/clip_by_value/clip_by_value', - 'FeatureMaps/top_down/clip_by_value_1/clip_by_value', - 'FeatureMaps/top_down/clip_by_value_2/clip_by_value', - 'FeatureMaps/top_down/clip_by_value_3/clip_by_value', - 'FeatureMaps/top_down/clip_by_value_4/clip_by_value', - 'FeatureMaps/top_down/clip_by_value_5/clip_by_value', - 'FeatureMaps/top_down/clip_by_value_6/clip_by_value', - ]) - else: - expected_clip_by_value_ops = [ - 'top_down/clip_by_value', 'top_down/clip_by_value_1', - 'top_down/clip_by_value_2', 'top_down/clip_by_value_3', - 'top_down/clip_by_value_4', 'top_down/clip_by_value_5', - 'top_down/clip_by_value_6' - ] + expected_clip_by_value_ops = [ + 'top_down/clip_by_value', 'top_down/clip_by_value_1', + 'top_down/clip_by_value_2', 'top_down/clip_by_value_3', + 'top_down/clip_by_value_4', 'top_down/clip_by_value_5', + 'top_down/clip_by_value_6' + ] # Gathers activation tensors before and after clip_by_value operations. activations = {} @@ -631,20 +580,21 @@ class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase): self.assertLessEqual(after_clipping_upper_bound, expected_upper_bound) def test_get_expected_feature_map_shapes_with_depthwise( - self, use_native_resize_op, use_keras): - image_features = [ - ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)), - ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)), - ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)), - ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32)) - ] - feature_map_generator = self._build_feature_map_generator( - image_features=image_features, - depth=128, - use_keras=use_keras, - use_depthwise=True, - use_native_resize_op=use_native_resize_op) - feature_maps = feature_map_generator(image_features) + self, use_native_resize_op): + with test_utils.GraphContextOrNone() as g: + image_features = [ + ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)), + ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)), + ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)), + ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32)) + ] + feature_map_generator = self._build_feature_map_generator( + image_features=image_features, + depth=128, + use_depthwise=True, + use_native_resize_op=use_native_resize_op) + def graph_fn(): + return feature_map_generator(image_features) expected_feature_map_shapes = { 'top_down_block2': (4, 8, 8, 128), @@ -652,30 +602,27 @@ class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase): 'top_down_block4': (4, 2, 2, 128), 'top_down_block5': (4, 1, 1, 128) } - - init_op = tf.global_variables_initializer() - with self.test_session() as sess: - sess.run(init_op) - out_feature_maps = sess.run(feature_maps) - out_feature_map_shapes = {key: value.shape - for key, value in out_feature_maps.items()} - self.assertDictEqual(out_feature_map_shapes, expected_feature_map_shapes) + out_feature_maps = self.execute(graph_fn, [], g) + out_feature_map_shapes = dict( + (key, value.shape) for key, value in out_feature_maps.items()) + self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes) def test_get_expected_variable_names( - self, use_native_resize_op, use_keras): - image_features = [ - ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)), - ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)), - ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)), - ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32)) - ] - feature_map_generator = self._build_feature_map_generator( - image_features=image_features, - depth=128, - use_keras=use_keras, - use_native_resize_op=use_native_resize_op) - feature_maps = feature_map_generator(image_features) - + self, use_native_resize_op): + with test_utils.GraphContextOrNone() as g: + image_features = [ + ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)), + ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)), + ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)), + ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32)) + ] + feature_map_generator = self._build_feature_map_generator( + image_features=image_features, + depth=128, + use_native_resize_op=use_native_resize_op) + def graph_fn(): + return feature_map_generator(image_features) + self.execute(graph_fn, [], g) expected_slim_variables = set([ 'projection_1/weights', 'projection_1/biases', @@ -709,33 +656,34 @@ class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase): 'FeatureMaps/top_down/smoothing_3_conv/kernel', 'FeatureMaps/top_down/smoothing_3_conv/bias' ]) - init_op = tf.global_variables_initializer() - with self.test_session() as sess: - sess.run(init_op) - sess.run(feature_maps) + + if tf_version.is_tf2(): actual_variable_set = set( - [var.op.name for var in tf.trainable_variables()]) - if use_keras: - self.assertSetEqual(expected_keras_variables, actual_variable_set) - else: - self.assertSetEqual(expected_slim_variables, actual_variable_set) + [var.name.split(':')[0] for var in feature_map_generator.variables]) + self.assertSetEqual(expected_keras_variables, actual_variable_set) + else: + with g.as_default(): + actual_variable_set = set( + [var.op.name for var in tf.trainable_variables()]) + self.assertSetEqual(expected_slim_variables, actual_variable_set) def test_get_expected_variable_names_with_depthwise( - self, use_native_resize_op, use_keras): - image_features = [ - ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)), - ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)), - ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)), - ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32)) - ] - feature_map_generator = self._build_feature_map_generator( - image_features=image_features, - depth=128, - use_keras=use_keras, - use_depthwise=True, - use_native_resize_op=use_native_resize_op) - feature_maps = feature_map_generator(image_features) - + self, use_native_resize_op): + with test_utils.GraphContextOrNone() as g: + image_features = [ + ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)), + ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)), + ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)), + ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32)) + ] + feature_map_generator = self._build_feature_map_generator( + image_features=image_features, + depth=128, + use_depthwise=True, + use_native_resize_op=use_native_resize_op) + def graph_fn(): + return feature_map_generator(image_features) + self.execute(graph_fn, [], g) expected_slim_variables = set([ 'projection_1/weights', 'projection_1/biases', @@ -775,16 +723,16 @@ class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase): 'FeatureMaps/top_down/smoothing_3_depthwise_conv/pointwise_kernel', 'FeatureMaps/top_down/smoothing_3_depthwise_conv/bias' ]) - init_op = tf.global_variables_initializer() - with self.test_session() as sess: - sess.run(init_op) - sess.run(feature_maps) + + if tf_version.is_tf2(): actual_variable_set = set( - [var.op.name for var in tf.trainable_variables()]) - if use_keras: - self.assertSetEqual(expected_keras_variables, actual_variable_set) - else: - self.assertSetEqual(expected_slim_variables, actual_variable_set) + [var.name.split(':')[0] for var in feature_map_generator.variables]) + self.assertSetEqual(expected_keras_variables, actual_variable_set) + else: + with g.as_default(): + actual_variable_set = set( + [var.op.name for var in tf.trainable_variables()]) + self.assertSetEqual(expected_slim_variables, actual_variable_set) class GetDepthFunctionTest(tf.test.TestCase): @@ -804,6 +752,7 @@ class GetDepthFunctionTest(tf.test.TestCase): {'replace_pool_with_conv': False}, {'replace_pool_with_conv': True}, ) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class PoolingPyramidFeatureMapGeneratorTest(tf.test.TestCase): def test_get_expected_feature_map_shapes(self, replace_pool_with_conv): diff --git a/research/object_detection/models/keras_models/convert_keras_models.py b/research/object_detection/models/keras_models/convert_keras_models.py new file mode 100644 index 000000000..a34af981b --- /dev/null +++ b/research/object_detection/models/keras_models/convert_keras_models.py @@ -0,0 +1,85 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Write keras weights into a tensorflow checkpoint. + +The imagenet weights in `keras.applications` are downloaded from github. +This script converts them into the tensorflow checkpoint format and stores them +on disk where they can be easily accessible during training. +""" + +from __future__ import print_function + +import os + +from absl import app +import numpy as np +import tensorflow.compat.v1 as tf + +FLAGS = tf.flags.FLAGS + + +tf.flags.DEFINE_string('model', 'resnet_v2_101', + 'The model to load. The following are supported: ' + '"resnet_v1_50", "resnet_v1_101", "resnet_v2_50", ' + '"resnet_v2_101"') +tf.flags.DEFINE_string('output_path', None, + 'The directory to output weights in.') +tf.flags.DEFINE_boolean('verify_weights', True, + ('Verify the weights are loaded correctly by making ' + 'sure the predictions are the same before and after ' + 'saving.')) + + +def init_model(name): + """Creates a Keras Model with the specific ResNet version.""" + if name == 'resnet_v1_50': + model = tf.keras.applications.ResNet50(weights='imagenet') + elif name == 'resnet_v1_101': + model = tf.keras.applications.ResNet101(weights='imagenet') + elif name == 'resnet_v2_50': + model = tf.keras.applications.ResNet50V2(weights='imagenet') + elif name == 'resnet_v2_101': + model = tf.keras.applications.ResNet101V2(weights='imagenet') + else: + raise ValueError('Model {} not supported'.format(FLAGS.model)) + + return model + + +def main(_): + + model = init_model(FLAGS.model) + + path = os.path.join(FLAGS.output_path, FLAGS.model) + tf.gfile.MakeDirs(path) + weights_path = os.path.join(path, 'weights') + ckpt = tf.train.Checkpoint(feature_extractor=model) + saved_path = ckpt.save(weights_path) + + if FLAGS.verify_weights: + imgs = np.random.randn(1, 224, 224, 3).astype(np.float32) + keras_preds = model(imgs) + + model = init_model(FLAGS.model) + ckpt.restore(saved_path) + loaded_weights_pred = model(imgs).numpy() + + if not np.all(np.isclose(keras_preds, loaded_weights_pred)): + raise RuntimeError('The model was not saved correctly.') + + +if __name__ == '__main__': + tf.enable_v2_behavior() + app.run(main) diff --git a/research/object_detection/models/keras_models/hourglass_network_test.py b/research/object_detection/models/keras_models/hourglass_network_tf2_test.py similarity index 96% rename from research/object_detection/models/keras_models/hourglass_network_test.py rename to research/object_detection/models/keras_models/hourglass_network_tf2_test.py index 2e05eb992..d90b950ec 100644 --- a/research/object_detection/models/keras_models/hourglass_network_test.py +++ b/research/object_detection/models/keras_models/hourglass_network_tf2_test.py @@ -13,14 +13,16 @@ # limitations under the License. # ============================================================================== """Testing the Hourglass network.""" - +import unittest from absl.testing import parameterized import numpy as np import tensorflow.compat.v1 as tf from object_detection.models.keras_models import hourglass_network as hourglass +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class HourglassFeatureExtractorTest(tf.test.TestCase, parameterized.TestCase): def test_identity_layer(self): @@ -95,5 +97,4 @@ class HourglassFeatureExtractorTest(tf.test.TestCase, parameterized.TestCase): if __name__ == '__main__': - tf.enable_v2_behavior() tf.test.main() diff --git a/research/object_detection/models/keras_models/inception_resnet_v2_test.py b/research/object_detection/models/keras_models/inception_resnet_v2_tf2_test.py similarity index 97% rename from research/object_detection/models/keras_models/inception_resnet_v2_test.py rename to research/object_detection/models/keras_models/inception_resnet_v2_tf2_test.py index 5706e679c..4cbcc54ad 100644 --- a/research/object_detection/models/keras_models/inception_resnet_v2_test.py +++ b/research/object_detection/models/keras_models/inception_resnet_v2_tf2_test.py @@ -30,13 +30,14 @@ consistent. from __future__ import absolute_import from __future__ import division from __future__ import print_function - +import unittest import numpy as np from six.moves import zip import tensorflow.compat.v1 as tf from object_detection.models.keras_models import inception_resnet_v2 from object_detection.utils import test_case +from object_detection.utils import tf_version _KERAS_TO_SLIM_ENDPOINT_NAMES = { 'activation': 'Conv2d_1a_3x3', @@ -100,6 +101,7 @@ _NUM_CHANNELS = 3 _BATCH_SIZE = 2 +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class InceptionResnetV2Test(test_case.TestCase): def _create_application_with_layer_outputs( @@ -166,8 +168,7 @@ class InceptionResnetV2Test(test_case.TestCase): model = self._create_application_with_layer_outputs( layer_names=layer_names, batchnorm_training=False) - preprocessed_inputs = tf.placeholder( - tf.float32, (4, None, None, _NUM_CHANNELS)) + preprocessed_inputs = tf.random.uniform([4, 40, 40, _NUM_CHANNELS]) model(preprocessed_inputs) return model.variables diff --git a/research/object_detection/models/keras_models/mobilenet_v1_test.py b/research/object_detection/models/keras_models/mobilenet_v1_tf2_test.py similarity index 85% rename from research/object_detection/models/keras_models/mobilenet_v1_test.py rename to research/object_detection/models/keras_models/mobilenet_v1_tf2_test.py index 72cc1f144..7e46999d9 100644 --- a/research/object_detection/models/keras_models/mobilenet_v1_test.py +++ b/research/object_detection/models/keras_models/mobilenet_v1_tf2_test.py @@ -29,7 +29,7 @@ consistent. from __future__ import absolute_import from __future__ import division from __future__ import print_function - +import unittest import numpy as np from six.moves import zip import tensorflow.compat.v1 as tf @@ -42,6 +42,7 @@ from object_detection.models.keras_models import model_utils from object_detection.models.keras_models import test_utils from object_detection.protos import hyperparams_pb2 from object_detection.utils import test_case +from object_detection.utils import tf_version _KERAS_LAYERS_TO_CHECK = [ 'conv1_relu', @@ -64,6 +65,7 @@ _NUM_CHANNELS = 3 _BATCH_SIZE = 2 +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class MobilenetV1Test(test_case.TestCase): def _build_conv_hyperparams(self): @@ -118,19 +120,17 @@ class MobilenetV1Test(test_case.TestCase): self, image_height, image_width, depth_multiplier, expected_feature_map_shape, use_explicit_padding=False, min_depth=8, layer_names=None, conv_defs=None): - def graph_fn(image_tensor): - model = self._create_application_with_layer_outputs( - layer_names=layer_names, - batchnorm_training=False, - use_explicit_padding=use_explicit_padding, - min_depth=min_depth, - alpha=depth_multiplier, - conv_defs=conv_defs) - return model(image_tensor) + model = self._create_application_with_layer_outputs( + layer_names=layer_names, + batchnorm_training=False, + use_explicit_padding=use_explicit_padding, + min_depth=min_depth, + alpha=depth_multiplier, + conv_defs=conv_defs) image_tensor = np.random.rand(_BATCH_SIZE, image_height, image_width, _NUM_CHANNELS).astype(np.float32) - feature_maps = self.execute(graph_fn, [image_tensor]) + feature_maps = model(image_tensor) for feature_map, expected_shape in zip(feature_maps, expected_feature_map_shape): @@ -140,36 +140,29 @@ class MobilenetV1Test(test_case.TestCase): self, image_height, image_width, depth_multiplier, expected_feature_map_shape, use_explicit_padding=False, min_depth=8, layer_names=None): - def graph_fn(image_height, image_width): - image_tensor = tf.random_uniform([_BATCH_SIZE, image_height, image_width, - _NUM_CHANNELS], dtype=tf.float32) - model = self._create_application_with_layer_outputs( - layer_names=layer_names, - batchnorm_training=False, - use_explicit_padding=use_explicit_padding, - alpha=depth_multiplier) - return model(image_tensor) + image_tensor = tf.random_uniform([_BATCH_SIZE, image_height, image_width, + _NUM_CHANNELS], dtype=tf.float32) + model = self._create_application_with_layer_outputs( + layer_names=layer_names, + batchnorm_training=False, + use_explicit_padding=use_explicit_padding, + alpha=depth_multiplier) - feature_maps = self.execute_cpu(graph_fn, [ - np.array(image_height, dtype=np.int32), - np.array(image_width, dtype=np.int32) - ]) + feature_maps = model(image_tensor) for feature_map, expected_shape in zip(feature_maps, expected_feature_map_shape): self.assertAllEqual(feature_map.shape, expected_shape) def _get_variables(self, depth_multiplier, layer_names=None): - g = tf.Graph() - with g.as_default(): - preprocessed_inputs = tf.placeholder( - tf.float32, (4, None, None, _NUM_CHANNELS)) - model = self._create_application_with_layer_outputs( - layer_names=layer_names, - batchnorm_training=False, use_explicit_padding=False, - alpha=depth_multiplier) - model(preprocessed_inputs) - return g.get_collection(tf.GraphKeys.GLOBAL_VARIABLES) + tf.keras.backend.clear_session() + model = self._create_application_with_layer_outputs( + layer_names=layer_names, + batchnorm_training=False, use_explicit_padding=False, + alpha=depth_multiplier) + preprocessed_inputs = tf.random.uniform([2, 40, 40, 3]) + model(preprocessed_inputs) + return model.variables def test_returns_correct_shapes_128(self): image_height = 128 diff --git a/research/object_detection/models/keras_models/mobilenet_v2_test.py b/research/object_detection/models/keras_models/mobilenet_v2_tf2_test.py similarity index 84% rename from research/object_detection/models/keras_models/mobilenet_v2_test.py rename to research/object_detection/models/keras_models/mobilenet_v2_tf2_test.py index cfdd11978..2a53a9b63 100644 --- a/research/object_detection/models/keras_models/mobilenet_v2_test.py +++ b/research/object_detection/models/keras_models/mobilenet_v2_tf2_test.py @@ -18,7 +18,7 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function - +import unittest import numpy as np from six.moves import zip import tensorflow.compat.v1 as tf @@ -31,6 +31,7 @@ from object_detection.models.keras_models import model_utils from object_detection.models.keras_models import test_utils from object_detection.protos import hyperparams_pb2 from object_detection.utils import test_case +from object_detection.utils import tf_version _layers_to_check = [ 'Conv1_relu', @@ -53,6 +54,7 @@ _layers_to_check = [ 'out_relu'] +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class MobilenetV2Test(test_case.TestCase): def _build_conv_hyperparams(self): @@ -86,6 +88,8 @@ class MobilenetV2Test(test_case.TestCase): min_depth=None, conv_defs=None): """Constructs Keras mobilenetv2 that extracts intermediate layer outputs.""" + # Have to clear the Keras backend to ensure isolation in layer naming + tf.keras.backend.clear_session() if not layer_names: layer_names = _layers_to_check full_model = mobilenet_v2.mobilenet_v2( @@ -107,19 +111,17 @@ class MobilenetV2Test(test_case.TestCase): self, batch_size, image_height, image_width, depth_multiplier, expected_feature_map_shapes, use_explicit_padding=False, min_depth=None, layer_names=None, conv_defs=None): - def graph_fn(image_tensor): - model = self._create_application_with_layer_outputs( - layer_names=layer_names, - batchnorm_training=False, - use_explicit_padding=use_explicit_padding, - min_depth=min_depth, - alpha=depth_multiplier, - conv_defs=conv_defs) - return model(image_tensor) + model = self._create_application_with_layer_outputs( + layer_names=layer_names, + batchnorm_training=False, + use_explicit_padding=use_explicit_padding, + min_depth=min_depth, + alpha=depth_multiplier, + conv_defs=conv_defs) image_tensor = np.random.rand(batch_size, image_height, image_width, 3).astype(np.float32) - feature_maps = self.execute(graph_fn, [image_tensor]) + feature_maps = model([image_tensor]) for feature_map, expected_shape in zip(feature_maps, expected_feature_map_shapes): @@ -129,34 +131,30 @@ class MobilenetV2Test(test_case.TestCase): self, batch_size, image_height, image_width, depth_multiplier, expected_feature_map_shapes, use_explicit_padding=False, layer_names=None): - def graph_fn(image_height, image_width): - image_tensor = tf.random_uniform([batch_size, image_height, image_width, - 3], dtype=tf.float32) - model = self._create_application_with_layer_outputs( - layer_names=layer_names, - batchnorm_training=False, use_explicit_padding=use_explicit_padding, - alpha=depth_multiplier) - return model(image_tensor) - - feature_maps = self.execute_cpu(graph_fn, [ - np.array(image_height, dtype=np.int32), - np.array(image_width, dtype=np.int32) - ]) - + height = tf.random.uniform([], minval=image_height, maxval=image_height+1, + dtype=tf.int32) + width = tf.random.uniform([], minval=image_width, maxval=image_width+1, + dtype=tf.int32) + image_tensor = tf.random.uniform([batch_size, height, width, + 3], dtype=tf.float32) + model = self._create_application_with_layer_outputs( + layer_names=layer_names, + batchnorm_training=False, use_explicit_padding=use_explicit_padding, + alpha=depth_multiplier) + feature_maps = model(image_tensor) for feature_map, expected_shape in zip(feature_maps, expected_feature_map_shapes): self.assertAllEqual(feature_map.shape, expected_shape) def _get_variables(self, depth_multiplier, layer_names=None): - g = tf.Graph() - with g.as_default(): - preprocessed_inputs = tf.placeholder(tf.float32, (4, None, None, 3)) - model = self._create_application_with_layer_outputs( - layer_names=layer_names, - batchnorm_training=False, use_explicit_padding=False, - alpha=depth_multiplier) - model(preprocessed_inputs) - return g.get_collection(tf.GraphKeys.GLOBAL_VARIABLES) + tf.keras.backend.clear_session() + model = self._create_application_with_layer_outputs( + layer_names=layer_names, + batchnorm_training=False, use_explicit_padding=False, + alpha=depth_multiplier) + preprocessed_inputs = tf.random.uniform([2, 40, 40, 3]) + model(preprocessed_inputs) + return model.variables def test_returns_correct_shapes_128(self): image_height = 128 diff --git a/research/object_detection/models/keras_models/resnet_v1_test.py b/research/object_detection/models/keras_models/resnet_v1_tf2_test.py similarity index 97% rename from research/object_detection/models/keras_models/resnet_v1_test.py rename to research/object_detection/models/keras_models/resnet_v1_tf2_test.py index 7b0c2a8e0..71cc5f22b 100644 --- a/research/object_detection/models/keras_models/resnet_v1_test.py +++ b/research/object_detection/models/keras_models/resnet_v1_tf2_test.py @@ -19,7 +19,7 @@ object detection. To verify the consistency of the two models, we compare: 1. Output shape of each layer given different inputs. 2. Number of global variables. """ - +import unittest import numpy as np from six.moves import zip import tensorflow.compat.v1 as tf @@ -30,6 +30,7 @@ from object_detection.builders import hyperparams_builder from object_detection.models.keras_models import resnet_v1 from object_detection.protos import hyperparams_pb2 from object_detection.utils import test_case +from object_detection.utils import tf_version _EXPECTED_SHAPES_224_RESNET50 = { 'conv2_block3_out': (4, 56, 56, 256), @@ -65,6 +66,7 @@ _NUM_CHANNELS = 3 _BATCH_SIZE = 4 +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class ResnetV1Test(test_case.TestCase): def _build_conv_hyperparams(self): @@ -146,8 +148,7 @@ class ResnetV1Test(test_case.TestCase): tf.keras.backend.clear_session() model = self._create_application_with_layer_outputs( model_index, batchnorm_training=False) - preprocessed_inputs = tf.placeholder(tf.float32, - (4, None, None, _NUM_CHANNELS)) + preprocessed_inputs = tf.random.uniform([2, 40, 40, _NUM_CHANNELS]) model(preprocessed_inputs) return model.variables diff --git a/research/object_detection/models/ssd_feature_extractor_test.py b/research/object_detection/models/ssd_feature_extractor_test.py index 913a9f6a5..29c43e376 100644 --- a/research/object_detection/models/ssd_feature_extractor_test.py +++ b/research/object_detection/models/ssd_feature_extractor_test.py @@ -31,6 +31,7 @@ from google.protobuf import text_format from object_detection.builders import hyperparams_builder from object_detection.protos import hyperparams_pb2 from object_detection.utils import test_case +from object_detection.utils import test_utils class SsdFeatureExtractorTestBase(test_case.TestCase): @@ -89,14 +90,13 @@ class SsdFeatureExtractorTestBase(test_case.TestCase): """ pass - def _extract_features(self, - image_tensor, - depth_multiplier, - pad_to_multiple, - use_explicit_padding=False, - use_depthwise=False, - num_layers=6, - use_keras=False): + def _create_features(self, + depth_multiplier, + pad_to_multiple, + use_explicit_padding=False, + use_depthwise=False, + num_layers=6, + use_keras=False): kwargs = {} if use_explicit_padding: kwargs.update({'use_explicit_padding': use_explicit_padding}) @@ -110,6 +110,12 @@ class SsdFeatureExtractorTestBase(test_case.TestCase): depth_multiplier, pad_to_multiple, **kwargs) + return feature_extractor + + def _extract_features(self, + image_tensor, + feature_extractor, + use_keras=False): if use_keras: feature_maps = feature_extractor(image_tensor) else: @@ -127,10 +133,8 @@ class SsdFeatureExtractorTestBase(test_case.TestCase): num_layers=6, use_keras=False, use_depthwise=False): - - def graph_fn(image_tensor): - return self._extract_features( - image_tensor, + with test_utils.GraphContextOrNone() as g: + feature_extractor = self._create_features( depth_multiplier, pad_to_multiple, use_explicit_padding=use_explicit_padding, @@ -138,9 +142,15 @@ class SsdFeatureExtractorTestBase(test_case.TestCase): use_keras=use_keras, use_depthwise=use_depthwise) + def graph_fn(image_tensor): + return self._extract_features( + image_tensor, + feature_extractor, + use_keras=use_keras) + image_tensor = np.random.rand(batch_size, image_height, image_width, 3).astype(np.float32) - feature_maps = self.execute(graph_fn, [image_tensor]) + feature_maps = self.execute(graph_fn, [image_tensor], graph=g) for feature_map, expected_shape in zip( feature_maps, expected_feature_map_shapes): self.assertAllEqual(feature_map.shape, expected_shape) @@ -158,11 +168,8 @@ class SsdFeatureExtractorTestBase(test_case.TestCase): use_keras=False, use_depthwise=False): - def graph_fn(image_height, image_width): - image_tensor = tf.random_uniform([batch_size, image_height, image_width, - 3], dtype=tf.float32) - return self._extract_features( - image_tensor, + with test_utils.GraphContextOrNone() as g: + feature_extractor = self._create_features( depth_multiplier, pad_to_multiple, use_explicit_padding=use_explicit_padding, @@ -170,10 +177,18 @@ class SsdFeatureExtractorTestBase(test_case.TestCase): use_keras=use_keras, use_depthwise=use_depthwise) + def graph_fn(image_height, image_width): + image_tensor = tf.random_uniform([batch_size, image_height, image_width, + 3], dtype=tf.float32) + return self._extract_features( + image_tensor, + feature_extractor, + use_keras=use_keras) + feature_maps = self.execute_cpu(graph_fn, [ np.array(image_height, dtype=np.int32), np.array(image_width, dtype=np.int32) - ]) + ], graph=g) for feature_map, expected_shape in zip( feature_maps, expected_feature_map_shapes): self.assertAllEqual(feature_map.shape, expected_shape) @@ -186,19 +201,33 @@ class SsdFeatureExtractorTestBase(test_case.TestCase): pad_to_multiple, use_keras=False, use_depthwise=False): - preprocessed_inputs = tf.placeholder(tf.float32, (4, None, None, 3)) - feature_maps = self._extract_features( - preprocessed_inputs, - depth_multiplier, - pad_to_multiple, - use_keras=use_keras, - use_depthwise=use_depthwise) - test_preprocessed_image = np.random.rand(4, image_height, image_width, 3) - with self.test_session() as sess: - sess.run(tf.global_variables_initializer()) + + with test_utils.GraphContextOrNone() as g: + batch = 4 + width = tf.random.uniform([], minval=image_width, maxval=image_width+1, + dtype=tf.int32) + height = tf.random.uniform([], minval=image_height, maxval=image_height+1, + dtype=tf.int32) + shape = tf.stack([batch, height, width, 3]) + preprocessed_inputs = tf.random.uniform(shape) + feature_extractor = self._create_features( + depth_multiplier, + pad_to_multiple, + use_keras=use_keras, + use_depthwise=use_depthwise) + + def graph_fn(): + feature_maps = self._extract_features( + preprocessed_inputs, + feature_extractor, + use_keras=use_keras) + return feature_maps + if self.is_tf2(): + with self.assertRaises(ValueError): + self.execute_cpu(graph_fn, [], graph=g) + else: with self.assertRaises(tf.errors.InvalidArgumentError): - sess.run(feature_maps, - feed_dict={preprocessed_inputs: test_preprocessed_image}) + self.execute_cpu(graph_fn, [], graph=g) def check_feature_extractor_variables_under_scope(self, depth_multiplier, @@ -221,11 +250,14 @@ class SsdFeatureExtractorTestBase(test_case.TestCase): use_depthwise=False): g = tf.Graph() with g.as_default(): - preprocessed_inputs = tf.placeholder(tf.float32, (4, None, None, 3)) - self._extract_features( - preprocessed_inputs, + feature_extractor = self._create_features( depth_multiplier, pad_to_multiple, use_keras=use_keras, use_depthwise=use_depthwise) + preprocessed_inputs = tf.placeholder(tf.float32, (4, None, None, 3)) + self._extract_features( + preprocessed_inputs, + feature_extractor, + use_keras=use_keras) return g.get_collection(tf.GraphKeys.GLOBAL_VARIABLES) diff --git a/research/object_detection/models/ssd_inception_v2_feature_extractor_test.py b/research/object_detection/models/ssd_inception_v2_feature_extractor_tf1_test.py similarity index 98% rename from research/object_detection/models/ssd_inception_v2_feature_extractor_test.py rename to research/object_detection/models/ssd_inception_v2_feature_extractor_tf1_test.py index 34921609c..1e33ed70e 100644 --- a/research/object_detection/models/ssd_inception_v2_feature_extractor_test.py +++ b/research/object_detection/models/ssd_inception_v2_feature_extractor_tf1_test.py @@ -14,13 +14,16 @@ # ============================================================================== """Tests for object_detection.models.ssd_inception_v2_feature_extractor.""" +import unittest import numpy as np import tensorflow.compat.v1 as tf from object_detection.models import ssd_feature_extractor_test from object_detection.models import ssd_inception_v2_feature_extractor +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class SsdInceptionV2FeatureExtractorTest( ssd_feature_extractor_test.SsdFeatureExtractorTestBase): diff --git a/research/object_detection/models/ssd_inception_v3_feature_extractor_test.py b/research/object_detection/models/ssd_inception_v3_feature_extractor_tf1_test.py similarity index 98% rename from research/object_detection/models/ssd_inception_v3_feature_extractor_test.py rename to research/object_detection/models/ssd_inception_v3_feature_extractor_tf1_test.py index 1e706c1e8..a0cbb4515 100644 --- a/research/object_detection/models/ssd_inception_v3_feature_extractor_test.py +++ b/research/object_detection/models/ssd_inception_v3_feature_extractor_tf1_test.py @@ -14,13 +14,16 @@ # ============================================================================== """Tests for object_detection.models.ssd_inception_v3_feature_extractor.""" +import unittest import numpy as np import tensorflow.compat.v1 as tf from object_detection.models import ssd_feature_extractor_test from object_detection.models import ssd_inception_v3_feature_extractor +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class SsdInceptionV3FeatureExtractorTest( ssd_feature_extractor_test.SsdFeatureExtractorTestBase): diff --git a/research/object_detection/models/ssd_mobiledet_feature_extractor.py b/research/object_detection/models/ssd_mobiledet_feature_extractor.py index 33d7e053b..019d7543b 100644 --- a/research/object_detection/models/ssd_mobiledet_feature_extractor.py +++ b/research/object_detection/models/ssd_mobiledet_feature_extractor.py @@ -290,6 +290,72 @@ def mobiledet_edgetpu_backbone(h, multiplier=1.0): return endpoints +def mobiledet_gpu_backbone(h, multiplier=1.0): + """Build a MobileDet GPU backbone.""" + + def _scale(filters): + return _scale_filters(filters, multiplier) + + ibn = functools.partial(_inverted_bottleneck, activation_fn=tf.nn.relu6) + fused = functools.partial(_fused_conv, activation_fn=tf.nn.relu6) + tucker = functools.partial(_tucker_conv, activation_fn=tf.nn.relu6) + + endpoints = {} + # block 0 + h = _conv(h, _scale(32), 3, strides=2, activation_fn=tf.nn.relu6) + + # block 1 + h = tucker( + h, + _scale(16), + input_rank_ratio=0.25, + output_rank_ratio=0.25, + residual=False) + endpoints['C1'] = h + + # block 2 + h = fused(h, _scale(32), expansion=8, strides=2, residual=False) + h = tucker(h, _scale(32), input_rank_ratio=0.25, output_rank_ratio=0.25) + h = tucker(h, _scale(32), input_rank_ratio=0.25, output_rank_ratio=0.25) + h = tucker(h, _scale(32), input_rank_ratio=0.25, output_rank_ratio=0.25) + endpoints['C2'] = h + + # block 3 + h = fused( + h, _scale(64), expansion=8, kernel_size=3, strides=2, residual=False) + h = fused(h, _scale(64), expansion=8) + h = fused(h, _scale(64), expansion=8) + h = fused(h, _scale(64), expansion=4) + endpoints['C3'] = h + + # block 4 + h = fused( + h, _scale(128), expansion=8, kernel_size=3, strides=2, residual=False) + h = fused(h, _scale(128), expansion=4) + h = fused(h, _scale(128), expansion=4) + h = fused(h, _scale(128), expansion=4) + + # block 5 + h = fused( + h, _scale(128), expansion=8, kernel_size=3, strides=1, residual=False) + h = fused(h, _scale(128), expansion=8) + h = fused(h, _scale(128), expansion=8) + h = fused(h, _scale(128), expansion=8) + endpoints['C4'] = h + + # block 6 + h = fused( + h, _scale(128), expansion=4, kernel_size=3, strides=2, residual=False) + h = fused(h, _scale(128), expansion=4) + h = fused(h, _scale(128), expansion=4) + h = fused(h, _scale(128), expansion=4) + + # block 7 + h = ibn(h, _scale(384), expansion=8, kernel_size=3, strides=1, residual=False) + endpoints['C5'] = h + return endpoints + + class SSDMobileDetFeatureExtractorBase(ssd_meta_arch.SSDFeatureExtractor): """Base class of SSD feature extractor using MobileDet features.""" @@ -490,3 +556,31 @@ class SSDMobileDetEdgeTPUFeatureExtractor(SSDMobileDetFeatureExtractorBase): use_depthwise=use_depthwise, override_base_feature_extractor_hyperparams=override_base_feature_extractor_hyperparams, scope_name=scope_name) + + +class SSDMobileDetGPUFeatureExtractor(SSDMobileDetFeatureExtractorBase): + """MobileDet-GPU feature extractor.""" + + def __init__(self, + is_training, + depth_multiplier, + min_depth, + pad_to_multiple, + conv_hyperparams_fn, + reuse_weights=None, + use_explicit_padding=False, + use_depthwise=False, + override_base_feature_extractor_hyperparams=False, + scope_name='MobileDetGPU'): + super(SSDMobileDetGPUFeatureExtractor, self).__init__( + backbone_fn=mobiledet_gpu_backbone, + is_training=is_training, + depth_multiplier=depth_multiplier, + min_depth=min_depth, + pad_to_multiple=pad_to_multiple, + conv_hyperparams_fn=conv_hyperparams_fn, + reuse_weights=reuse_weights, + use_explicit_padding=use_explicit_padding, + use_depthwise=use_depthwise, + override_base_feature_extractor_hyperparams=override_base_feature_extractor_hyperparams, + scope_name=scope_name) diff --git a/research/object_detection/models/ssd_mobiledet_feature_extractor_test.py b/research/object_detection/models/ssd_mobiledet_feature_extractor_tf1_test.py similarity index 86% rename from research/object_detection/models/ssd_mobiledet_feature_extractor_test.py rename to research/object_detection/models/ssd_mobiledet_feature_extractor_tf1_test.py index c2c1ef692..2af37554b 100644 --- a/research/object_detection/models/ssd_mobiledet_feature_extractor_test.py +++ b/research/object_detection/models/ssd_mobiledet_feature_extractor_tf1_test.py @@ -13,14 +13,20 @@ # limitations under the License. # ============================================================================== """Tests for ssd_mobiledet_feature_extractor.""" - +import unittest import tensorflow.compat.v1 as tf -from tensorflow.contrib import quantize as contrib_quantize from object_detection.models import ssd_feature_extractor_test from object_detection.models import ssd_mobiledet_feature_extractor +from object_detection.utils import tf_version + +try: + from tensorflow.contrib import quantize as contrib_quantize # pylint: disable=g-import-not-at-top +except: # pylint: disable=bare-except + pass +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class SSDMobileDetFeatureExtractorTest( ssd_feature_extractor_test.SsdFeatureExtractorTestBase): @@ -105,6 +111,19 @@ class SSDMobileDetFeatureExtractorTest( for expected_shape, x in zip(expected_feature_map_shapes, feature_maps): self.assertTrue(x.shape.is_compatible_with(expected_shape)) + def test_mobiledet_gpu_returns_correct_shapes(self): + expected_feature_map_shapes = [(2, 40, 20, 128), (2, 20, 10, 384), + (2, 10, 5, 512), (2, 5, 3, 256), + (2, 3, 2, 256), (2, 2, 1, 128)] + feature_extractor = self._create_feature_extractor( + ssd_mobiledet_feature_extractor.SSDMobileDetGPUFeatureExtractor) + image = tf.random.normal((2, 640, 320, 3)) + feature_maps = feature_extractor.extract_features(image) + + self.assertEqual(len(expected_feature_map_shapes), len(feature_maps)) + for expected_shape, x in zip(expected_feature_map_shapes, feature_maps): + self.assertTrue(x.shape.is_compatible_with(expected_shape)) + def _check_quantization(self, model_fn): checkpoint_dir = self.get_temp_dir() diff --git a/research/object_detection/models/ssd_mobilenet_edgetpu_feature_extractor_test.py b/research/object_detection/models/ssd_mobilenet_edgetpu_feature_extractor_tf1_test.py similarity index 94% rename from research/object_detection/models/ssd_mobilenet_edgetpu_feature_extractor_test.py rename to research/object_detection/models/ssd_mobilenet_edgetpu_feature_extractor_tf1_test.py index 186122028..841fe5a14 100644 --- a/research/object_detection/models/ssd_mobilenet_edgetpu_feature_extractor_test.py +++ b/research/object_detection/models/ssd_mobilenet_edgetpu_feature_extractor_tf1_test.py @@ -13,13 +13,15 @@ # limitations under the License. # ============================================================================== """Tests for ssd_mobilenet_edgetpu_feature_extractor.""" - +import unittest import tensorflow.compat.v1 as tf from object_detection.models import ssd_mobilenet_edgetpu_feature_extractor from object_detection.models import ssd_mobilenet_edgetpu_feature_extractor_testbase +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class SsdMobilenetEdgeTPUFeatureExtractorTest( ssd_mobilenet_edgetpu_feature_extractor_testbase ._SsdMobilenetEdgeTPUFeatureExtractorTestBase): diff --git a/research/object_detection/models/ssd_mobilenet_v1_feature_extractor_test.py b/research/object_detection/models/ssd_mobilenet_v1_feature_extractor_tf1_test.py similarity index 77% rename from research/object_detection/models/ssd_mobilenet_v1_feature_extractor_test.py rename to research/object_detection/models/ssd_mobilenet_v1_feature_extractor_tf1_test.py index eaf8776af..2f1d48396 100644 --- a/research/object_detection/models/ssd_mobilenet_v1_feature_extractor_test.py +++ b/research/object_detection/models/ssd_mobilenet_v1_feature_extractor_tf1_test.py @@ -17,20 +17,16 @@ By using parameterized test decorator, this test serves for both Slim-based and Keras-based Mobilenet V1 feature extractors in SSD. """ -from absl.testing import parameterized - +import unittest import numpy as np import tensorflow.compat.v1 as tf from object_detection.models import ssd_feature_extractor_test from object_detection.models import ssd_mobilenet_v1_feature_extractor -from object_detection.models import ssd_mobilenet_v1_keras_feature_extractor +from object_detection.utils import tf_version -@parameterized.parameters( - {'use_keras': False}, - {'use_keras': True}, -) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class SsdMobilenetV1FeatureExtractorTest( ssd_feature_extractor_test.SsdFeatureExtractorTestBase): @@ -59,31 +55,17 @@ class SsdMobilenetV1FeatureExtractorTest( an ssd_meta_arch.SSDFeatureExtractor object. """ min_depth = 32 - if use_keras: - return (ssd_mobilenet_v1_keras_feature_extractor - .SSDMobileNetV1KerasFeatureExtractor( - is_training=is_training, - depth_multiplier=depth_multiplier, - min_depth=min_depth, - pad_to_multiple=pad_to_multiple, - conv_hyperparams=self._build_conv_hyperparams( - add_batch_norm=False), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - use_explicit_padding=use_explicit_padding, - num_layers=num_layers, - name='MobilenetV1')) - else: - return ssd_mobilenet_v1_feature_extractor.SSDMobileNetV1FeatureExtractor( - is_training, - depth_multiplier, - min_depth, - pad_to_multiple, - self.conv_hyperparams_fn, - use_explicit_padding=use_explicit_padding, - num_layers=num_layers) + del use_keras + return ssd_mobilenet_v1_feature_extractor.SSDMobileNetV1FeatureExtractor( + is_training, + depth_multiplier, + min_depth, + pad_to_multiple, + self.conv_hyperparams_fn, + use_explicit_padding=use_explicit_padding, + num_layers=num_layers) - def test_extract_features_returns_correct_shapes_128(self, use_keras): + def test_extract_features_returns_correct_shapes_128(self): image_height = 128 image_width = 128 depth_multiplier = 1.0 @@ -99,7 +81,7 @@ class SsdMobilenetV1FeatureExtractorTest( pad_to_multiple, expected_feature_map_shape, use_explicit_padding=False, - use_keras=use_keras) + use_keras=False) self.check_extract_features_returns_correct_shape( 2, image_height, @@ -108,9 +90,9 @@ class SsdMobilenetV1FeatureExtractorTest( pad_to_multiple, expected_feature_map_shape, use_explicit_padding=True, - use_keras=use_keras) + use_keras=False) - def test_extract_features_returns_correct_shapes_299(self, use_keras): + def test_extract_features_returns_correct_shapes_299(self): image_height = 299 image_width = 299 depth_multiplier = 1.0 @@ -126,7 +108,7 @@ class SsdMobilenetV1FeatureExtractorTest( pad_to_multiple, expected_feature_map_shape, use_explicit_padding=False, - use_keras=use_keras) + use_keras=False) self.check_extract_features_returns_correct_shape( 2, image_height, @@ -135,9 +117,9 @@ class SsdMobilenetV1FeatureExtractorTest( pad_to_multiple, expected_feature_map_shape, use_explicit_padding=True, - use_keras=use_keras) + use_keras=False) - def test_extract_features_with_dynamic_image_shape(self, use_keras): + def test_extract_features_with_dynamic_image_shape(self): image_height = 128 image_width = 128 depth_multiplier = 1.0 @@ -153,7 +135,7 @@ class SsdMobilenetV1FeatureExtractorTest( pad_to_multiple, expected_feature_map_shape, use_explicit_padding=False, - use_keras=use_keras) + use_keras=False) self.check_extract_features_returns_correct_shape( 2, image_height, @@ -162,10 +144,10 @@ class SsdMobilenetV1FeatureExtractorTest( pad_to_multiple, expected_feature_map_shape, use_explicit_padding=True, - use_keras=use_keras) + use_keras=False) def test_extract_features_returns_correct_shapes_enforcing_min_depth( - self, use_keras): + self): image_height = 299 image_width = 299 depth_multiplier = 0.5**12 @@ -181,7 +163,7 @@ class SsdMobilenetV1FeatureExtractorTest( pad_to_multiple, expected_feature_map_shape, use_explicit_padding=False, - use_keras=use_keras) + use_keras=False) self.check_extract_features_returns_correct_shape( 2, image_height, @@ -190,10 +172,10 @@ class SsdMobilenetV1FeatureExtractorTest( pad_to_multiple, expected_feature_map_shape, use_explicit_padding=True, - use_keras=use_keras) + use_keras=False) def test_extract_features_returns_correct_shapes_with_pad_to_multiple( - self, use_keras): + self): image_height = 299 image_width = 299 depth_multiplier = 1.0 @@ -209,7 +191,7 @@ class SsdMobilenetV1FeatureExtractorTest( pad_to_multiple, expected_feature_map_shape, use_explicit_padding=False, - use_keras=use_keras) + use_keras=False) self.check_extract_features_returns_correct_shape( 2, image_height, @@ -218,10 +200,10 @@ class SsdMobilenetV1FeatureExtractorTest( pad_to_multiple, expected_feature_map_shape, use_explicit_padding=True, - use_keras=use_keras) + use_keras=False) def test_extract_features_raises_error_with_invalid_image_size( - self, use_keras): + self): image_height = 32 image_width = 32 depth_multiplier = 1.0 @@ -231,34 +213,34 @@ class SsdMobilenetV1FeatureExtractorTest( image_width, depth_multiplier, pad_to_multiple, - use_keras=use_keras) + use_keras=False) - def test_preprocess_returns_correct_value_range(self, use_keras): + def test_preprocess_returns_correct_value_range(self): image_height = 128 image_width = 128 depth_multiplier = 1 pad_to_multiple = 1 test_image = np.random.rand(2, image_height, image_width, 3) feature_extractor = self._create_feature_extractor( - depth_multiplier, pad_to_multiple, use_keras=use_keras) + depth_multiplier, pad_to_multiple, use_keras=False) preprocessed_image = feature_extractor.preprocess(test_image) self.assertTrue(np.all(np.less_equal(np.abs(preprocessed_image), 1.0))) - def test_variables_only_created_in_scope(self, use_keras): + def test_variables_only_created_in_scope(self): depth_multiplier = 1 pad_to_multiple = 1 scope_name = 'MobilenetV1' self.check_feature_extractor_variables_under_scope( - depth_multiplier, pad_to_multiple, scope_name, use_keras=use_keras) + depth_multiplier, pad_to_multiple, scope_name, use_keras=False) - def test_variable_count(self, use_keras): + def test_variable_count(self): depth_multiplier = 1 pad_to_multiple = 1 variables = self.get_feature_extractor_variables( - depth_multiplier, pad_to_multiple, use_keras=use_keras) + depth_multiplier, pad_to_multiple, use_keras=False) self.assertEqual(len(variables), 151) - def test_has_fused_batchnorm(self, use_keras): + def test_has_fused_batchnorm(self): image_height = 40 image_width = 40 depth_multiplier = 1 @@ -266,17 +248,14 @@ class SsdMobilenetV1FeatureExtractorTest( image_placeholder = tf.placeholder(tf.float32, [1, image_height, image_width, 3]) feature_extractor = self._create_feature_extractor( - depth_multiplier, pad_to_multiple, use_keras=use_keras) + depth_multiplier, pad_to_multiple, use_keras=False) preprocessed_image = feature_extractor.preprocess(image_placeholder) - if use_keras: - _ = feature_extractor(preprocessed_image) - else: - _ = feature_extractor.extract_features(preprocessed_image) + _ = feature_extractor.extract_features(preprocessed_image) self.assertTrue( any('FusedBatchNorm' in op.type for op in tf.get_default_graph().get_operations())) - def test_extract_features_with_fewer_layers(self, use_keras): + def test_extract_features_with_fewer_layers(self): image_height = 128 image_width = 128 depth_multiplier = 1.0 @@ -286,7 +265,7 @@ class SsdMobilenetV1FeatureExtractorTest( self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=False, num_layers=4, - use_keras=use_keras) + use_keras=False) if __name__ == '__main__': diff --git a/research/object_detection/models/ssd_mobilenet_v1_feature_extractor_tf2_test.py b/research/object_detection/models/ssd_mobilenet_v1_feature_extractor_tf2_test.py new file mode 100644 index 000000000..b60537b88 --- /dev/null +++ b/research/object_detection/models/ssd_mobilenet_v1_feature_extractor_tf2_test.py @@ -0,0 +1,248 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for SSD Mobilenet V1 feature extractors. + +By using parameterized test decorator, this test serves for both Slim-based and +Keras-based Mobilenet V1 feature extractors in SSD. +""" +import unittest +import numpy as np +import tensorflow.compat.v1 as tf + +from object_detection.models import ssd_feature_extractor_test +from object_detection.models import ssd_mobilenet_v1_keras_feature_extractor +from object_detection.utils import tf_version + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class SsdMobilenetV1FeatureExtractorTest( + ssd_feature_extractor_test.SsdFeatureExtractorTestBase): + + def _create_feature_extractor(self, + depth_multiplier, + pad_to_multiple, + use_explicit_padding=False, + num_layers=6, + is_training=False, + use_keras=False): + """Constructs a new feature extractor. + + Args: + depth_multiplier: float depth multiplier for feature extractor + pad_to_multiple: the nearest multiple to zero pad the input height and + width dimensions to. + use_explicit_padding: Use 'VALID' padding for convolutions, but prepad + inputs so that the output dimensions are the same as if 'SAME' padding + were used. + num_layers: number of SSD layers. + is_training: whether the network is in training mode. + use_keras: if True builds a keras-based feature extractor, if False builds + a slim-based one. + + Returns: + an ssd_meta_arch.SSDFeatureExtractor object. + """ + del use_keras + min_depth = 32 + return (ssd_mobilenet_v1_keras_feature_extractor + .SSDMobileNetV1KerasFeatureExtractor( + is_training=is_training, + depth_multiplier=depth_multiplier, + min_depth=min_depth, + pad_to_multiple=pad_to_multiple, + conv_hyperparams=self._build_conv_hyperparams( + add_batch_norm=False), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + use_explicit_padding=use_explicit_padding, + num_layers=num_layers, + name='MobilenetV1')) + + def test_extract_features_returns_correct_shapes_128(self): + image_height = 128 + image_width = 128 + depth_multiplier = 1.0 + pad_to_multiple = 1 + expected_feature_map_shape = [(2, 8, 8, 512), (2, 4, 4, 1024), + (2, 2, 2, 512), (2, 1, 1, 256), + (2, 1, 1, 256), (2, 1, 1, 128)] + self.check_extract_features_returns_correct_shape( + 2, + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + expected_feature_map_shape, + use_explicit_padding=False, + use_keras=True) + self.check_extract_features_returns_correct_shape( + 2, + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + expected_feature_map_shape, + use_explicit_padding=True, + use_keras=True) + + def test_extract_features_returns_correct_shapes_299(self): + image_height = 299 + image_width = 299 + depth_multiplier = 1.0 + pad_to_multiple = 1 + expected_feature_map_shape = [(2, 19, 19, 512), (2, 10, 10, 1024), + (2, 5, 5, 512), (2, 3, 3, 256), + (2, 2, 2, 256), (2, 1, 1, 128)] + self.check_extract_features_returns_correct_shape( + 2, + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + expected_feature_map_shape, + use_explicit_padding=False, + use_keras=True) + self.check_extract_features_returns_correct_shape( + 2, + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + expected_feature_map_shape, + use_explicit_padding=True, + use_keras=True) + + def test_extract_features_with_dynamic_image_shape(self): + image_height = 128 + image_width = 128 + depth_multiplier = 1.0 + pad_to_multiple = 1 + expected_feature_map_shape = [(2, 8, 8, 512), (2, 4, 4, 1024), + (2, 2, 2, 512), (2, 1, 1, 256), + (2, 1, 1, 256), (2, 1, 1, 128)] + self.check_extract_features_returns_correct_shapes_with_dynamic_inputs( + 2, + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + expected_feature_map_shape, + use_explicit_padding=False, + use_keras=True) + self.check_extract_features_returns_correct_shape( + 2, + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + expected_feature_map_shape, + use_explicit_padding=True, + use_keras=True) + + def test_extract_features_returns_correct_shapes_enforcing_min_depth( + self): + image_height = 299 + image_width = 299 + depth_multiplier = 0.5**12 + pad_to_multiple = 1 + expected_feature_map_shape = [(2, 19, 19, 32), (2, 10, 10, 32), + (2, 5, 5, 32), (2, 3, 3, 32), (2, 2, 2, 32), + (2, 1, 1, 32)] + self.check_extract_features_returns_correct_shape( + 2, + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + expected_feature_map_shape, + use_explicit_padding=False, + use_keras=True) + self.check_extract_features_returns_correct_shape( + 2, + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + expected_feature_map_shape, + use_explicit_padding=True, + use_keras=True) + + def test_extract_features_returns_correct_shapes_with_pad_to_multiple( + self): + image_height = 299 + image_width = 299 + depth_multiplier = 1.0 + pad_to_multiple = 32 + expected_feature_map_shape = [(2, 20, 20, 512), (2, 10, 10, 1024), + (2, 5, 5, 512), (2, 3, 3, 256), + (2, 2, 2, 256), (2, 1, 1, 128)] + self.check_extract_features_returns_correct_shape( + 2, + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + expected_feature_map_shape, + use_explicit_padding=False, + use_keras=True) + self.check_extract_features_returns_correct_shape( + 2, + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + expected_feature_map_shape, + use_explicit_padding=True, + use_keras=True) + + def test_extract_features_raises_error_with_invalid_image_size( + self): + image_height = 32 + image_width = 32 + depth_multiplier = 1.0 + pad_to_multiple = 1 + self.check_extract_features_raises_error_with_invalid_image_size( + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + use_keras=True) + + def test_preprocess_returns_correct_value_range(self): + image_height = 128 + image_width = 128 + depth_multiplier = 1 + pad_to_multiple = 1 + test_image = np.random.rand(2, image_height, image_width, 3) + feature_extractor = self._create_feature_extractor( + depth_multiplier, pad_to_multiple, use_keras=True) + preprocessed_image = feature_extractor.preprocess(test_image) + self.assertTrue(np.all(np.less_equal(np.abs(preprocessed_image), 1.0))) + + def test_extract_features_with_fewer_layers(self): + image_height = 128 + image_width = 128 + depth_multiplier = 1.0 + pad_to_multiple = 1 + expected_feature_map_shape = [(2, 8, 8, 512), (2, 4, 4, 1024), + (2, 2, 2, 512), (2, 1, 1, 256)] + self.check_extract_features_returns_correct_shape( + 2, image_height, image_width, depth_multiplier, pad_to_multiple, + expected_feature_map_shape, use_explicit_padding=False, num_layers=4, + use_keras=True) + + +if __name__ == '__main__': + tf.test.main() diff --git a/research/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor_test.py b/research/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor_tf1_test.py similarity index 76% rename from research/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor_test.py rename to research/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor_tf1_test.py index 131afed8a..449b7803d 100644 --- a/research/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor_test.py +++ b/research/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor_tf1_test.py @@ -18,19 +18,16 @@ By using parameterized test decorator, this test serves for both Slim-based and Keras-based Mobilenet V1 FPN feature extractors in SSD. """ -from absl.testing import parameterized +import unittest import numpy as np import tensorflow.compat.v1 as tf from object_detection.models import ssd_feature_extractor_test from object_detection.models import ssd_mobilenet_v1_fpn_feature_extractor -from object_detection.models import ssd_mobilenet_v1_fpn_keras_feature_extractor +from object_detection.utils import tf_version -@parameterized.parameters( - {'use_keras': False}, - {'use_keras': True}, -) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class SsdMobilenetV1FpnFeatureExtractorTest( ssd_feature_extractor_test.SsdFeatureExtractorTestBase): @@ -52,33 +49,19 @@ class SsdMobilenetV1FpnFeatureExtractorTest( Returns: an ssd_meta_arch.SSDFeatureExtractor object. """ + del use_keras min_depth = 32 - if use_keras: - return (ssd_mobilenet_v1_fpn_keras_feature_extractor. - SSDMobileNetV1FpnKerasFeatureExtractor( - is_training=is_training, - depth_multiplier=depth_multiplier, - min_depth=min_depth, - pad_to_multiple=pad_to_multiple, - conv_hyperparams=self._build_conv_hyperparams( - add_batch_norm=False), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - use_explicit_padding=use_explicit_padding, - use_depthwise=True, - name='MobilenetV1_FPN')) - else: - return (ssd_mobilenet_v1_fpn_feature_extractor. - SSDMobileNetV1FpnFeatureExtractor( - is_training, - depth_multiplier, - min_depth, - pad_to_multiple, - self.conv_hyperparams_fn, - use_depthwise=True, - use_explicit_padding=use_explicit_padding)) - - def test_extract_features_returns_correct_shapes_256(self, use_keras): + return (ssd_mobilenet_v1_fpn_feature_extractor. + SSDMobileNetV1FpnFeatureExtractor( + is_training, + depth_multiplier, + min_depth, + pad_to_multiple, + self.conv_hyperparams_fn, + use_depthwise=True, + use_explicit_padding=use_explicit_padding)) + + def test_extract_features_returns_correct_shapes_256(self): image_height = 256 image_width = 256 depth_multiplier = 1.0 @@ -89,13 +72,13 @@ class SsdMobilenetV1FpnFeatureExtractorTest( self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=False, - use_keras=use_keras) + use_keras=False) self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=True, - use_keras=use_keras) + use_keras=False) - def test_extract_features_returns_correct_shapes_384(self, use_keras): + def test_extract_features_returns_correct_shapes_384(self): image_height = 320 image_width = 320 depth_multiplier = 1.0 @@ -106,13 +89,13 @@ class SsdMobilenetV1FpnFeatureExtractorTest( self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=False, - use_keras=use_keras) + use_keras=False) self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=True, - use_keras=use_keras) + use_keras=False) - def test_extract_features_with_dynamic_image_shape(self, use_keras): + def test_extract_features_with_dynamic_image_shape(self): image_height = 256 image_width = 256 depth_multiplier = 1.0 @@ -123,14 +106,14 @@ class SsdMobilenetV1FpnFeatureExtractorTest( self.check_extract_features_returns_correct_shapes_with_dynamic_inputs( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=False, - use_keras=use_keras) + use_keras=False) self.check_extract_features_returns_correct_shapes_with_dynamic_inputs( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=True, - use_keras=use_keras) + use_keras=False) def test_extract_features_returns_correct_shapes_with_pad_to_multiple( - self, use_keras): + self): image_height = 299 image_width = 299 depth_multiplier = 1.0 @@ -141,14 +124,14 @@ class SsdMobilenetV1FpnFeatureExtractorTest( self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=False, - use_keras=use_keras) + use_keras=False) self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=True, - use_keras=use_keras) + use_keras=False) def test_extract_features_returns_correct_shapes_enforcing_min_depth( - self, use_keras): + self): image_height = 256 image_width = 256 depth_multiplier = 0.5**12 @@ -159,23 +142,23 @@ class SsdMobilenetV1FpnFeatureExtractorTest( self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=False, - use_keras=use_keras) + use_keras=False) self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=True, - use_keras=use_keras) + use_keras=False) def test_extract_features_raises_error_with_invalid_image_size( - self, use_keras): + self): image_height = 32 image_width = 32 depth_multiplier = 1.0 pad_to_multiple = 1 self.check_extract_features_raises_error_with_invalid_image_size( image_height, image_width, depth_multiplier, pad_to_multiple, - use_keras=use_keras) + use_keras=False) - def test_preprocess_returns_correct_value_range(self, use_keras): + def test_preprocess_returns_correct_value_range(self): image_height = 256 image_width = 256 depth_multiplier = 1 @@ -183,25 +166,25 @@ class SsdMobilenetV1FpnFeatureExtractorTest( test_image = np.random.rand(2, image_height, image_width, 3) feature_extractor = self._create_feature_extractor(depth_multiplier, pad_to_multiple, - use_keras=use_keras) + use_keras=False) preprocessed_image = feature_extractor.preprocess(test_image) self.assertTrue(np.all(np.less_equal(np.abs(preprocessed_image), 1.0))) - def test_variables_only_created_in_scope(self, use_keras): + def test_variables_only_created_in_scope(self): depth_multiplier = 1 pad_to_multiple = 1 scope_name = 'MobilenetV1' self.check_feature_extractor_variables_under_scope( - depth_multiplier, pad_to_multiple, scope_name, use_keras=use_keras) + depth_multiplier, pad_to_multiple, scope_name, use_keras=False) - def test_variable_count(self, use_keras): + def test_variable_count(self): depth_multiplier = 1 pad_to_multiple = 1 variables = self.get_feature_extractor_variables( - depth_multiplier, pad_to_multiple, use_keras=use_keras) + depth_multiplier, pad_to_multiple, use_keras=False) self.assertEqual(len(variables), 153) - def test_fused_batchnorm(self, use_keras): + def test_fused_batchnorm(self): image_height = 256 image_width = 256 depth_multiplier = 1 @@ -210,12 +193,9 @@ class SsdMobilenetV1FpnFeatureExtractorTest( [1, image_height, image_width, 3]) feature_extractor = self._create_feature_extractor(depth_multiplier, pad_to_multiple, - use_keras=use_keras) + use_keras=False) preprocessed_image = feature_extractor.preprocess(image_placeholder) - if use_keras: - _ = feature_extractor(preprocessed_image) - else: - _ = feature_extractor.extract_features(preprocessed_image) + _ = feature_extractor.extract_features(preprocessed_image) self.assertTrue( any('FusedBatchNorm' in op.type diff --git a/research/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor_tf2_test.py b/research/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor_tf2_test.py new file mode 100644 index 000000000..307cfa8b0 --- /dev/null +++ b/research/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor_tf2_test.py @@ -0,0 +1,179 @@ +# Copyright 2018 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""Tests for ssd_mobilenet_v1_fpn_feature_extractor. + +By using parameterized test decorator, this test serves for both Slim-based and +Keras-based Mobilenet V1 FPN feature extractors in SSD. +""" +import unittest +import numpy as np +import tensorflow.compat.v1 as tf + +from object_detection.models import ssd_feature_extractor_test +from object_detection.models import ssd_mobilenet_v1_fpn_keras_feature_extractor +from object_detection.utils import tf_version + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class SsdMobilenetV1FpnFeatureExtractorTest( + ssd_feature_extractor_test.SsdFeatureExtractorTestBase): + + def _create_feature_extractor(self, depth_multiplier, pad_to_multiple, + is_training=True, use_explicit_padding=False, + use_keras=True): + """Constructs a new feature extractor. + + Args: + depth_multiplier: float depth multiplier for feature extractor + pad_to_multiple: the nearest multiple to zero pad the input height and + width dimensions to. + is_training: whether the network is in training mode. + use_explicit_padding: Use 'VALID' padding for convolutions, but prepad + inputs so that the output dimensions are the same as if 'SAME' padding + were used. + use_keras: if True builds a keras-based feature extractor, if False builds + a slim-based one. + Returns: + an ssd_meta_arch.SSDFeatureExtractor object. + """ + min_depth = 32 + del use_keras + return (ssd_mobilenet_v1_fpn_keras_feature_extractor. + SSDMobileNetV1FpnKerasFeatureExtractor( + is_training=is_training, + depth_multiplier=depth_multiplier, + min_depth=min_depth, + pad_to_multiple=pad_to_multiple, + conv_hyperparams=self._build_conv_hyperparams( + add_batch_norm=False), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + use_explicit_padding=use_explicit_padding, + use_depthwise=True, + name='MobilenetV1_FPN')) + + def test_extract_features_returns_correct_shapes_256(self): + image_height = 256 + image_width = 256 + depth_multiplier = 1.0 + pad_to_multiple = 1 + expected_feature_map_shape = [(2, 32, 32, 256), (2, 16, 16, 256), + (2, 8, 8, 256), (2, 4, 4, 256), + (2, 2, 2, 256)] + self.check_extract_features_returns_correct_shape( + 2, image_height, image_width, depth_multiplier, pad_to_multiple, + expected_feature_map_shape, use_explicit_padding=False, + use_keras=True) + self.check_extract_features_returns_correct_shape( + 2, image_height, image_width, depth_multiplier, pad_to_multiple, + expected_feature_map_shape, use_explicit_padding=True, + use_keras=True) + + def test_extract_features_returns_correct_shapes_384(self): + image_height = 320 + image_width = 320 + depth_multiplier = 1.0 + pad_to_multiple = 1 + expected_feature_map_shape = [(2, 40, 40, 256), (2, 20, 20, 256), + (2, 10, 10, 256), (2, 5, 5, 256), + (2, 3, 3, 256)] + self.check_extract_features_returns_correct_shape( + 2, image_height, image_width, depth_multiplier, pad_to_multiple, + expected_feature_map_shape, use_explicit_padding=False, + use_keras=True) + self.check_extract_features_returns_correct_shape( + 2, image_height, image_width, depth_multiplier, pad_to_multiple, + expected_feature_map_shape, use_explicit_padding=True, + use_keras=True) + + def test_extract_features_with_dynamic_image_shape(self): + image_height = 256 + image_width = 256 + depth_multiplier = 1.0 + pad_to_multiple = 1 + expected_feature_map_shape = [(2, 32, 32, 256), (2, 16, 16, 256), + (2, 8, 8, 256), (2, 4, 4, 256), + (2, 2, 2, 256)] + self.check_extract_features_returns_correct_shapes_with_dynamic_inputs( + 2, image_height, image_width, depth_multiplier, pad_to_multiple, + expected_feature_map_shape, use_explicit_padding=False, + use_keras=True) + self.check_extract_features_returns_correct_shapes_with_dynamic_inputs( + 2, image_height, image_width, depth_multiplier, pad_to_multiple, + expected_feature_map_shape, use_explicit_padding=True, + use_keras=True) + + def test_extract_features_returns_correct_shapes_with_pad_to_multiple( + self): + image_height = 299 + image_width = 299 + depth_multiplier = 1.0 + pad_to_multiple = 32 + expected_feature_map_shape = [(2, 40, 40, 256), (2, 20, 20, 256), + (2, 10, 10, 256), (2, 5, 5, 256), + (2, 3, 3, 256)] + self.check_extract_features_returns_correct_shape( + 2, image_height, image_width, depth_multiplier, pad_to_multiple, + expected_feature_map_shape, use_explicit_padding=False, + use_keras=True) + self.check_extract_features_returns_correct_shape( + 2, image_height, image_width, depth_multiplier, pad_to_multiple, + expected_feature_map_shape, use_explicit_padding=True, + use_keras=True) + + def test_extract_features_returns_correct_shapes_enforcing_min_depth( + self): + image_height = 256 + image_width = 256 + depth_multiplier = 0.5**12 + pad_to_multiple = 1 + expected_feature_map_shape = [(2, 32, 32, 32), (2, 16, 16, 32), + (2, 8, 8, 32), (2, 4, 4, 32), + (2, 2, 2, 32)] + self.check_extract_features_returns_correct_shape( + 2, image_height, image_width, depth_multiplier, pad_to_multiple, + expected_feature_map_shape, use_explicit_padding=False, + use_keras=True) + self.check_extract_features_returns_correct_shape( + 2, image_height, image_width, depth_multiplier, pad_to_multiple, + expected_feature_map_shape, use_explicit_padding=True, + use_keras=True) + + def test_extract_features_raises_error_with_invalid_image_size( + self): + image_height = 32 + image_width = 32 + depth_multiplier = 1.0 + pad_to_multiple = 1 + self.check_extract_features_raises_error_with_invalid_image_size( + image_height, image_width, depth_multiplier, pad_to_multiple, + use_keras=True) + + def test_preprocess_returns_correct_value_range(self): + image_height = 256 + image_width = 256 + depth_multiplier = 1 + pad_to_multiple = 1 + test_image = np.random.rand(2, image_height, image_width, 3) + feature_extractor = self._create_feature_extractor(depth_multiplier, + pad_to_multiple, + use_keras=True) + preprocessed_image = feature_extractor.preprocess(test_image) + self.assertTrue(np.all(np.less_equal(np.abs(preprocessed_image), 1.0))) + + +if __name__ == '__main__': + tf.test.main() diff --git a/research/object_detection/models/ssd_mobilenet_v1_fpn_keras_feature_extractor.py b/research/object_detection/models/ssd_mobilenet_v1_fpn_keras_feature_extractor.py index 53d3fdbd4..779293187 100644 --- a/research/object_detection/models/ssd_mobilenet_v1_fpn_keras_feature_extractor.py +++ b/research/object_detection/models/ssd_mobilenet_v1_fpn_keras_feature_extractor.py @@ -123,7 +123,7 @@ class SSDMobileNetV1FpnKerasFeatureExtractor( 'Conv2d_3_pointwise', 'Conv2d_5_pointwise', 'Conv2d_11_pointwise', 'Conv2d_13_pointwise' ] - self._mobilenet_v1 = None + self.classification_backbone = None self._fpn_features_generator = None self._coarse_feature_layers = [] @@ -147,7 +147,7 @@ class SSDMobileNetV1FpnKerasFeatureExtractor( name='conv_pw_11_relu').output conv2d_13_pointwise = full_mobilenet_v1.get_layer( name='conv_pw_13_relu').output - self._mobilenet_v1 = tf.keras.Model( + self.classification_backbone = tf.keras.Model( inputs=full_mobilenet_v1.inputs, outputs=[conv2d_3_pointwise, conv2d_5_pointwise, conv2d_11_pointwise, conv2d_13_pointwise] @@ -218,7 +218,7 @@ class SSDMobileNetV1FpnKerasFeatureExtractor( preprocessed_inputs = shape_utils.check_min_image_dim( 33, preprocessed_inputs) - image_features = self._mobilenet_v1( + image_features = self.classification_backbone( ops.pad_to_multiple(preprocessed_inputs, self._pad_to_multiple)) feature_block_list = [] @@ -243,3 +243,14 @@ class SSDMobileNetV1FpnKerasFeatureExtractor( last_feature_map = layer(last_feature_map) feature_maps.append(last_feature_map) return feature_maps + + def restore_from_classification_checkpoint_fn(self, feature_extractor_scope): + """Returns a map for restoring from an (object-based) checkpoint. + + Args: + feature_extractor_scope: A scope name for the feature extractor (unused). + + Returns: + A dict mapping keys to Keras models + """ + return {'feature_extractor': self.classification_backbone} diff --git a/research/object_detection/models/ssd_mobilenet_v1_keras_feature_extractor.py b/research/object_detection/models/ssd_mobilenet_v1_keras_feature_extractor.py index 679dc25db..82b48c1a8 100644 --- a/research/object_detection/models/ssd_mobilenet_v1_keras_feature_extractor.py +++ b/research/object_detection/models/ssd_mobilenet_v1_keras_feature_extractor.py @@ -93,7 +93,7 @@ class SSDMobileNetV1KerasFeatureExtractor( 'use_explicit_padding': self._use_explicit_padding, 'use_depthwise': self._use_depthwise, } - self._mobilenet_v1 = None + self.classification_backbone = None self._feature_map_generator = None def build(self, input_shape): @@ -111,7 +111,7 @@ class SSDMobileNetV1KerasFeatureExtractor( name='conv_pw_11_relu').output conv2d_13_pointwise = full_mobilenet_v1.get_layer( name='conv_pw_13_relu').output - self._mobilenet_v1 = tf.keras.Model( + self.classification_backbone = tf.keras.Model( inputs=full_mobilenet_v1.inputs, outputs=[conv2d_11_pointwise, conv2d_13_pointwise]) self._feature_map_generator = ( @@ -155,7 +155,7 @@ class SSDMobileNetV1KerasFeatureExtractor( preprocessed_inputs = shape_utils.check_min_image_dim( 33, preprocessed_inputs) - image_features = self._mobilenet_v1( + image_features = self.classification_backbone( ops.pad_to_multiple(preprocessed_inputs, self._pad_to_multiple)) feature_maps = self._feature_map_generator({ @@ -163,3 +163,14 @@ class SSDMobileNetV1KerasFeatureExtractor( 'Conv2d_13_pointwise': image_features[1]}) return list(feature_maps.values()) + + def restore_from_classification_checkpoint_fn(self, feature_extractor_scope): + """Returns a map for restoring from an (object-based) checkpoint. + + Args: + feature_extractor_scope: A scope name for the feature extractor (unused). + + Returns: + A dict mapping keys to Keras models + """ + return {'feature_extractor': self.classification_backbone} diff --git a/research/object_detection/models/ssd_mobilenet_v1_ppn_feature_extractor_test.py b/research/object_detection/models/ssd_mobilenet_v1_ppn_feature_extractor_tf1_test.py similarity index 98% rename from research/object_detection/models/ssd_mobilenet_v1_ppn_feature_extractor_test.py rename to research/object_detection/models/ssd_mobilenet_v1_ppn_feature_extractor_tf1_test.py index c5a9cd807..b5918c0df 100644 --- a/research/object_detection/models/ssd_mobilenet_v1_ppn_feature_extractor_test.py +++ b/research/object_detection/models/ssd_mobilenet_v1_ppn_feature_extractor_tf1_test.py @@ -14,13 +14,16 @@ # ============================================================================== """Tests for ssd_mobilenet_v1_ppn_feature_extractor.""" +import unittest import numpy as np import tensorflow.compat.v1 as tf from object_detection.models import ssd_feature_extractor_test from object_detection.models import ssd_mobilenet_v1_ppn_feature_extractor +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class SsdMobilenetV1PpnFeatureExtractorTest( ssd_feature_extractor_test.SsdFeatureExtractorTestBase): diff --git a/research/object_detection/models/ssd_mobilenet_v2_feature_extractor_test.py b/research/object_detection/models/ssd_mobilenet_v2_feature_extractor_tf1_test.py similarity index 70% rename from research/object_detection/models/ssd_mobilenet_v2_feature_extractor_test.py rename to research/object_detection/models/ssd_mobilenet_v2_feature_extractor_tf1_test.py index 40eee93db..96f9bc26e 100644 --- a/research/object_detection/models/ssd_mobilenet_v2_feature_extractor_test.py +++ b/research/object_detection/models/ssd_mobilenet_v2_feature_extractor_tf1_test.py @@ -14,20 +14,17 @@ # ============================================================================== """Tests for ssd_mobilenet_v2_feature_extractor.""" -from absl.testing import parameterized +import unittest import numpy as np import tensorflow.compat.v1 as tf from object_detection.models import ssd_feature_extractor_test from object_detection.models import ssd_mobilenet_v2_feature_extractor -from object_detection.models import ssd_mobilenet_v2_keras_feature_extractor +from object_detection.utils import tf_version -@parameterized.parameters( - {'use_keras': False}, - {'use_keras': True}, -) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class SsdMobilenetV2FeatureExtractorTest( ssd_feature_extractor_test.SsdFeatureExtractorTestBase): @@ -35,8 +32,7 @@ class SsdMobilenetV2FeatureExtractorTest( depth_multiplier, pad_to_multiple, use_explicit_padding=False, - num_layers=6, - use_keras=False): + num_layers=6): """Constructs a new feature extractor. Args: @@ -47,36 +43,20 @@ class SsdMobilenetV2FeatureExtractorTest( inputs so that the output dimensions are the same as if 'SAME' padding were used. num_layers: number of SSD layers. - use_keras: if True builds a keras-based feature extractor, if False builds - a slim-based one. Returns: an ssd_meta_arch.SSDFeatureExtractor object. """ min_depth = 32 - if use_keras: - return (ssd_mobilenet_v2_keras_feature_extractor. - SSDMobileNetV2KerasFeatureExtractor( - is_training=False, - depth_multiplier=depth_multiplier, - min_depth=min_depth, - pad_to_multiple=pad_to_multiple, - conv_hyperparams=self._build_conv_hyperparams(), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - use_explicit_padding=use_explicit_padding, - num_layers=num_layers, - name='MobilenetV2')) - else: - return ssd_mobilenet_v2_feature_extractor.SSDMobileNetV2FeatureExtractor( - False, - depth_multiplier, - min_depth, - pad_to_multiple, - self.conv_hyperparams_fn, - use_explicit_padding=use_explicit_padding, - num_layers=num_layers) - - def test_extract_features_returns_correct_shapes_128(self, use_keras): + return ssd_mobilenet_v2_feature_extractor.SSDMobileNetV2FeatureExtractor( + False, + depth_multiplier, + min_depth, + pad_to_multiple, + self.conv_hyperparams_fn, + use_explicit_padding=use_explicit_padding, + num_layers=num_layers) + + def test_extract_features_returns_correct_shapes_128(self): image_height = 128 image_width = 128 depth_multiplier = 1.0 @@ -86,10 +66,10 @@ class SsdMobilenetV2FeatureExtractorTest( (2, 1, 1, 256), (2, 1, 1, 128)] self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, - expected_feature_map_shape, use_keras=use_keras) + expected_feature_map_shape) def test_extract_features_returns_correct_shapes_128_explicit_padding( - self, use_keras): + self): image_height = 128 image_width = 128 depth_multiplier = 1.0 @@ -99,11 +79,10 @@ class SsdMobilenetV2FeatureExtractorTest( (2, 1, 1, 256), (2, 1, 1, 128)] self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, - expected_feature_map_shape, use_explicit_padding=True, - use_keras=use_keras) + expected_feature_map_shape, use_explicit_padding=True) def test_extract_features_returns_correct_shapes_with_dynamic_inputs( - self, use_keras): + self): image_height = 128 image_width = 128 depth_multiplier = 1.0 @@ -113,9 +92,9 @@ class SsdMobilenetV2FeatureExtractorTest( (2, 1, 1, 256), (2, 1, 1, 128)] self.check_extract_features_returns_correct_shapes_with_dynamic_inputs( 2, image_height, image_width, depth_multiplier, pad_to_multiple, - expected_feature_map_shape, use_keras=use_keras) + expected_feature_map_shape) - def test_extract_features_returns_correct_shapes_299(self, use_keras): + def test_extract_features_returns_correct_shapes_299(self): image_height = 299 image_width = 299 depth_multiplier = 1.0 @@ -125,10 +104,10 @@ class SsdMobilenetV2FeatureExtractorTest( (2, 2, 2, 256), (2, 1, 1, 128)] self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, - expected_feature_map_shape, use_keras=use_keras) + expected_feature_map_shape) def test_extract_features_returns_correct_shapes_enforcing_min_depth( - self, use_keras): + self): image_height = 299 image_width = 299 depth_multiplier = 0.5**12 @@ -138,10 +117,10 @@ class SsdMobilenetV2FeatureExtractorTest( (2, 2, 2, 32), (2, 1, 1, 32)] self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, - expected_feature_map_shape, use_keras=use_keras) + expected_feature_map_shape) def test_extract_features_returns_correct_shapes_with_pad_to_multiple( - self, use_keras): + self): image_height = 299 image_width = 299 depth_multiplier = 1.0 @@ -151,45 +130,43 @@ class SsdMobilenetV2FeatureExtractorTest( (2, 2, 2, 256), (2, 1, 1, 128)] self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, - expected_feature_map_shape, use_keras=use_keras) + expected_feature_map_shape) def test_extract_features_raises_error_with_invalid_image_size( - self, use_keras): + self): image_height = 32 image_width = 32 depth_multiplier = 1.0 pad_to_multiple = 1 self.check_extract_features_raises_error_with_invalid_image_size( - image_height, image_width, depth_multiplier, pad_to_multiple, - use_keras=use_keras) + image_height, image_width, depth_multiplier, pad_to_multiple) - def test_preprocess_returns_correct_value_range(self, use_keras): + def test_preprocess_returns_correct_value_range(self): image_height = 128 image_width = 128 depth_multiplier = 1 pad_to_multiple = 1 test_image = np.random.rand(4, image_height, image_width, 3) feature_extractor = self._create_feature_extractor(depth_multiplier, - pad_to_multiple, - use_keras=use_keras) + pad_to_multiple) preprocessed_image = feature_extractor.preprocess(test_image) self.assertTrue(np.all(np.less_equal(np.abs(preprocessed_image), 1.0))) - def test_variables_only_created_in_scope(self, use_keras): + def test_variables_only_created_in_scope(self): depth_multiplier = 1 pad_to_multiple = 1 scope_name = 'MobilenetV2' self.check_feature_extractor_variables_under_scope( - depth_multiplier, pad_to_multiple, scope_name, use_keras=use_keras) + depth_multiplier, pad_to_multiple, scope_name) - def test_variable_count(self, use_keras): + def test_variable_count(self): depth_multiplier = 1 pad_to_multiple = 1 variables = self.get_feature_extractor_variables( - depth_multiplier, pad_to_multiple, use_keras=use_keras) + depth_multiplier, pad_to_multiple) self.assertEqual(len(variables), 292) - def test_has_fused_batchnorm(self, use_keras): + def test_has_fused_batchnorm(self): image_height = 40 image_width = 40 depth_multiplier = 1 @@ -197,17 +174,13 @@ class SsdMobilenetV2FeatureExtractorTest( image_placeholder = tf.placeholder(tf.float32, [1, image_height, image_width, 3]) feature_extractor = self._create_feature_extractor(depth_multiplier, - pad_to_multiple, - use_keras=use_keras) + pad_to_multiple) preprocessed_image = feature_extractor.preprocess(image_placeholder) - if use_keras: - _ = feature_extractor(preprocessed_image) - else: - _ = feature_extractor.extract_features(preprocessed_image) + _ = feature_extractor.extract_features(preprocessed_image) self.assertTrue(any('FusedBatchNorm' in op.type for op in tf.get_default_graph().get_operations())) - def test_extract_features_with_fewer_layers(self, use_keras): + def test_extract_features_with_fewer_layers(self): image_height = 128 image_width = 128 depth_multiplier = 1.0 @@ -216,8 +189,7 @@ class SsdMobilenetV2FeatureExtractorTest( (2, 2, 2, 512), (2, 1, 1, 256)] self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, - expected_feature_map_shape, use_explicit_padding=False, num_layers=4, - use_keras=use_keras) + expected_feature_map_shape, use_explicit_padding=False, num_layers=4) if __name__ == '__main__': diff --git a/research/object_detection/models/ssd_mobilenet_v2_feature_extractor_tf2_test.py b/research/object_detection/models/ssd_mobilenet_v2_feature_extractor_tf2_test.py new file mode 100644 index 000000000..6d4cb5afc --- /dev/null +++ b/research/object_detection/models/ssd_mobilenet_v2_feature_extractor_tf2_test.py @@ -0,0 +1,192 @@ +# Copyright 2018 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""Tests for ssd_mobilenet_v2_feature_extractor.""" +import unittest + +import numpy as np +import tensorflow.compat.v1 as tf + +from object_detection.models import ssd_feature_extractor_test +from object_detection.models import ssd_mobilenet_v2_keras_feature_extractor +from object_detection.utils import tf_version + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class SsdMobilenetV2FeatureExtractorTest( + ssd_feature_extractor_test.SsdFeatureExtractorTestBase): + + def _create_feature_extractor(self, + depth_multiplier, + pad_to_multiple, + use_explicit_padding=False, + num_layers=6, + use_keras=False): + """Constructs a new feature extractor. + + Args: + depth_multiplier: float depth multiplier for feature extractor + pad_to_multiple: the nearest multiple to zero pad the input height and + width dimensions to. + use_explicit_padding: use 'VALID' padding for convolutions, but prepad + inputs so that the output dimensions are the same as if 'SAME' padding + were used. + num_layers: number of SSD layers. + use_keras: unused argument. + + Returns: + an ssd_meta_arch.SSDFeatureExtractor object. + """ + del use_keras + min_depth = 32 + return (ssd_mobilenet_v2_keras_feature_extractor. + SSDMobileNetV2KerasFeatureExtractor( + is_training=False, + depth_multiplier=depth_multiplier, + min_depth=min_depth, + pad_to_multiple=pad_to_multiple, + conv_hyperparams=self._build_conv_hyperparams(), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + use_explicit_padding=use_explicit_padding, + num_layers=num_layers, + name='MobilenetV2')) + + def test_extract_features_returns_correct_shapes_128(self): + image_height = 128 + image_width = 128 + depth_multiplier = 1.0 + pad_to_multiple = 1 + expected_feature_map_shape = [(2, 8, 8, 576), (2, 4, 4, 1280), + (2, 2, 2, 512), (2, 1, 1, 256), + (2, 1, 1, 256), (2, 1, 1, 128)] + self.check_extract_features_returns_correct_shape( + 2, image_height, image_width, depth_multiplier, pad_to_multiple, + expected_feature_map_shape, use_keras=True) + + def test_extract_features_returns_correct_shapes_128_explicit_padding( + self): + image_height = 128 + image_width = 128 + depth_multiplier = 1.0 + pad_to_multiple = 1 + expected_feature_map_shape = [(2, 8, 8, 576), (2, 4, 4, 1280), + (2, 2, 2, 512), (2, 1, 1, 256), + (2, 1, 1, 256), (2, 1, 1, 128)] + self.check_extract_features_returns_correct_shape( + 2, image_height, image_width, depth_multiplier, pad_to_multiple, + expected_feature_map_shape, use_explicit_padding=True, use_keras=True) + + def test_extract_features_returns_correct_shapes_with_dynamic_inputs( + self): + image_height = 128 + image_width = 128 + depth_multiplier = 1.0 + pad_to_multiple = 1 + expected_feature_map_shape = [(2, 8, 8, 576), (2, 4, 4, 1280), + (2, 2, 2, 512), (2, 1, 1, 256), + (2, 1, 1, 256), (2, 1, 1, 128)] + self.check_extract_features_returns_correct_shapes_with_dynamic_inputs( + 2, image_height, image_width, depth_multiplier, pad_to_multiple, + expected_feature_map_shape, use_keras=True) + + def test_extract_features_returns_correct_shapes_299(self): + image_height = 299 + image_width = 299 + depth_multiplier = 1.0 + pad_to_multiple = 1 + expected_feature_map_shape = [(2, 19, 19, 576), (2, 10, 10, 1280), + (2, 5, 5, 512), (2, 3, 3, 256), + (2, 2, 2, 256), (2, 1, 1, 128)] + self.check_extract_features_returns_correct_shape( + 2, image_height, image_width, depth_multiplier, pad_to_multiple, + expected_feature_map_shape, use_keras=True) + + def test_extract_features_returns_correct_shapes_enforcing_min_depth( + self): + image_height = 299 + image_width = 299 + depth_multiplier = 0.5**12 + pad_to_multiple = 1 + expected_feature_map_shape = [(2, 19, 19, 192), (2, 10, 10, 32), + (2, 5, 5, 32), (2, 3, 3, 32), + (2, 2, 2, 32), (2, 1, 1, 32)] + self.check_extract_features_returns_correct_shape( + 2, image_height, image_width, depth_multiplier, pad_to_multiple, + expected_feature_map_shape, use_keras=True) + + def test_extract_features_returns_correct_shapes_with_pad_to_multiple( + self): + image_height = 299 + image_width = 299 + depth_multiplier = 1.0 + pad_to_multiple = 32 + expected_feature_map_shape = [(2, 20, 20, 576), (2, 10, 10, 1280), + (2, 5, 5, 512), (2, 3, 3, 256), + (2, 2, 2, 256), (2, 1, 1, 128)] + self.check_extract_features_returns_correct_shape( + 2, image_height, image_width, depth_multiplier, pad_to_multiple, + expected_feature_map_shape, use_keras=True) + + def test_extract_features_raises_error_with_invalid_image_size( + self): + image_height = 32 + image_width = 32 + depth_multiplier = 1.0 + pad_to_multiple = 1 + self.check_extract_features_raises_error_with_invalid_image_size( + image_height, image_width, depth_multiplier, pad_to_multiple, + use_keras=True) + + def test_preprocess_returns_correct_value_range(self): + image_height = 128 + image_width = 128 + depth_multiplier = 1 + pad_to_multiple = 1 + test_image = np.random.rand(4, image_height, image_width, 3) + feature_extractor = self._create_feature_extractor(depth_multiplier, + pad_to_multiple) + preprocessed_image = feature_extractor.preprocess(test_image) + self.assertTrue(np.all(np.less_equal(np.abs(preprocessed_image), 1.0))) + + def test_variables_only_created_in_scope(self): + depth_multiplier = 1 + pad_to_multiple = 1 + scope_name = 'MobilenetV2' + self.check_feature_extractor_variables_under_scope( + depth_multiplier, pad_to_multiple, scope_name, use_keras=True) + + def test_variable_count(self): + depth_multiplier = 1 + pad_to_multiple = 1 + variables = self.get_feature_extractor_variables( + depth_multiplier, pad_to_multiple, use_keras=True) + self.assertEqual(len(variables), 292) + + def test_extract_features_with_fewer_layers(self): + image_height = 128 + image_width = 128 + depth_multiplier = 1.0 + pad_to_multiple = 1 + expected_feature_map_shape = [(2, 8, 8, 576), (2, 4, 4, 1280), + (2, 2, 2, 512), (2, 1, 1, 256)] + self.check_extract_features_returns_correct_shape( + 2, image_height, image_width, depth_multiplier, pad_to_multiple, + expected_feature_map_shape, use_explicit_padding=False, num_layers=4, + use_keras=True) + + +if __name__ == '__main__': + tf.test.main() diff --git a/research/object_detection/models/ssd_mobilenet_v2_fpn_feature_extractor_test.py b/research/object_detection/models/ssd_mobilenet_v2_fpn_feature_extractor_tf1_test.py similarity index 70% rename from research/object_detection/models/ssd_mobilenet_v2_fpn_feature_extractor_test.py rename to research/object_detection/models/ssd_mobilenet_v2_fpn_feature_extractor_tf1_test.py index f5bb42b68..9cdbed5fb 100644 --- a/research/object_detection/models/ssd_mobilenet_v2_fpn_feature_extractor_test.py +++ b/research/object_detection/models/ssd_mobilenet_v2_fpn_feature_extractor_tf1_test.py @@ -18,31 +18,23 @@ By using parameterized test decorator, this test serves for both Slim-based and Keras-based Mobilenet V2 FPN feature extractors in SSD. """ +import unittest from absl.testing import parameterized import numpy as np import tensorflow.compat.v1 as tf from object_detection.models import ssd_feature_extractor_test from object_detection.models import ssd_mobilenet_v2_fpn_feature_extractor -from object_detection.models import ssd_mobilenet_v2_fpn_keras_feature_extractor +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') @parameterized.parameters( { - 'use_depthwise': False, - 'use_keras': True + 'use_depthwise': False }, { - 'use_depthwise': True, - 'use_keras': True - }, - { - 'use_depthwise': False, - 'use_keras': False - }, - { - 'use_depthwise': True, - 'use_keras': False + 'use_depthwise': True }, ) class SsdMobilenetV2FpnFeatureExtractorTest( @@ -71,34 +63,20 @@ class SsdMobilenetV2FpnFeatureExtractorTest( Returns: an ssd_meta_arch.SSDFeatureExtractor object. """ + del use_keras min_depth = 32 - if use_keras: - return (ssd_mobilenet_v2_fpn_keras_feature_extractor - .SSDMobileNetV2FpnKerasFeatureExtractor( - is_training=is_training, - depth_multiplier=depth_multiplier, - min_depth=min_depth, - pad_to_multiple=pad_to_multiple, - conv_hyperparams=self._build_conv_hyperparams( - add_batch_norm=False), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - use_explicit_padding=use_explicit_padding, - use_depthwise=use_depthwise, - name='MobilenetV2_FPN')) - else: - return (ssd_mobilenet_v2_fpn_feature_extractor - .SSDMobileNetV2FpnFeatureExtractor( - is_training, - depth_multiplier, - min_depth, - pad_to_multiple, - self.conv_hyperparams_fn, - use_depthwise=use_depthwise, - use_explicit_padding=use_explicit_padding)) + return (ssd_mobilenet_v2_fpn_feature_extractor + .SSDMobileNetV2FpnFeatureExtractor( + is_training, + depth_multiplier, + min_depth, + pad_to_multiple, + self.conv_hyperparams_fn, + use_depthwise=use_depthwise, + use_explicit_padding=use_explicit_padding)) - def test_extract_features_returns_correct_shapes_256(self, use_keras, - use_depthwise): + def test_extract_features_returns_correct_shapes_256(self, use_depthwise): + use_keras = False image_height = 256 image_width = 256 depth_multiplier = 1.0 @@ -127,8 +105,8 @@ class SsdMobilenetV2FpnFeatureExtractorTest( use_keras=use_keras, use_depthwise=use_depthwise) - def test_extract_features_returns_correct_shapes_384(self, use_keras, - use_depthwise): + def test_extract_features_returns_correct_shapes_384(self, use_depthwise): + use_keras = False image_height = 320 image_width = 320 depth_multiplier = 1.0 @@ -157,8 +135,9 @@ class SsdMobilenetV2FpnFeatureExtractorTest( use_keras=use_keras, use_depthwise=use_depthwise) - def test_extract_features_with_dynamic_image_shape(self, use_keras, + def test_extract_features_with_dynamic_image_shape(self, use_depthwise): + use_keras = False image_height = 256 image_width = 256 depth_multiplier = 1.0 @@ -188,7 +167,8 @@ class SsdMobilenetV2FpnFeatureExtractorTest( use_depthwise=use_depthwise) def test_extract_features_returns_correct_shapes_with_pad_to_multiple( - self, use_keras, use_depthwise): + self, use_depthwise): + use_keras = False image_height = 299 image_width = 299 depth_multiplier = 1.0 @@ -218,7 +198,8 @@ class SsdMobilenetV2FpnFeatureExtractorTest( use_depthwise=use_depthwise) def test_extract_features_returns_correct_shapes_enforcing_min_depth( - self, use_keras, use_depthwise): + self, use_depthwise): + use_keras = False image_height = 256 image_width = 256 depth_multiplier = 0.5**12 @@ -248,7 +229,8 @@ class SsdMobilenetV2FpnFeatureExtractorTest( use_depthwise=use_depthwise) def test_extract_features_raises_error_with_invalid_image_size( - self, use_keras, use_depthwise): + self, use_depthwise): + use_keras = False image_height = 32 image_width = 32 depth_multiplier = 1.0 @@ -261,8 +243,9 @@ class SsdMobilenetV2FpnFeatureExtractorTest( use_keras=use_keras, use_depthwise=use_depthwise) - def test_preprocess_returns_correct_value_range(self, use_keras, + def test_preprocess_returns_correct_value_range(self, use_depthwise): + use_keras = False image_height = 256 image_width = 256 depth_multiplier = 1 @@ -276,7 +259,8 @@ class SsdMobilenetV2FpnFeatureExtractorTest( preprocessed_image = feature_extractor.preprocess(test_image) self.assertTrue(np.all(np.less_equal(np.abs(preprocessed_image), 1.0))) - def test_variables_only_created_in_scope(self, use_keras, use_depthwise): + def test_variables_only_created_in_scope(self, use_depthwise): + use_keras = False depth_multiplier = 1 pad_to_multiple = 1 scope_name = 'MobilenetV2' @@ -287,7 +271,8 @@ class SsdMobilenetV2FpnFeatureExtractorTest( use_keras=use_keras, use_depthwise=use_depthwise) - def test_fused_batchnorm(self, use_keras, use_depthwise): + def test_fused_batchnorm(self, use_depthwise): + use_keras = False image_height = 256 image_width = 256 depth_multiplier = 1 @@ -300,15 +285,13 @@ class SsdMobilenetV2FpnFeatureExtractorTest( use_keras=use_keras, use_depthwise=use_depthwise) preprocessed_image = feature_extractor.preprocess(image_placeholder) - if use_keras: - _ = feature_extractor(preprocessed_image) - else: - _ = feature_extractor.extract_features(preprocessed_image) + _ = feature_extractor.extract_features(preprocessed_image) self.assertTrue( any('FusedBatchNorm' in op.type for op in tf.get_default_graph().get_operations())) - def test_variable_count(self, use_keras, use_depthwise): + def test_variable_count(self, use_depthwise): + use_keras = False depth_multiplier = 1 pad_to_multiple = 1 variables = self.get_feature_extractor_variables( @@ -321,8 +304,9 @@ class SsdMobilenetV2FpnFeatureExtractorTest( expected_variables_len = 278 self.assertEqual(len(variables), expected_variables_len) - def test_get_expected_feature_map_variable_names(self, use_keras, + def test_get_expected_feature_map_variable_names(self, use_depthwise): + use_keras = False depth_multiplier = 1.0 pad_to_multiple = 1 @@ -360,44 +344,6 @@ class SsdMobilenetV2FpnFeatureExtractorTest( 'MobilenetV2/fpn/projection_2/weights', 'MobilenetV2/fpn/projection_3/weights', ]) - keras_expected_feature_maps_variables = set([ - # Keras Mobilenet V2 feature maps - 'MobilenetV2_FPN/block_4_depthwise/depthwise_kernel', - 'MobilenetV2_FPN/block_7_depthwise/depthwise_kernel', - 'MobilenetV2_FPN/block_14_depthwise/depthwise_kernel', - 'MobilenetV2_FPN/Conv_1/kernel', - # FPN layers - 'MobilenetV2_FPN/bottom_up_Conv2d_20_conv/kernel', - 'MobilenetV2_FPN/bottom_up_Conv2d_21_conv/kernel', - 'MobilenetV2_FPN/FeatureMaps/top_down/smoothing_1_conv/kernel', - 'MobilenetV2_FPN/FeatureMaps/top_down/smoothing_2_conv/kernel', - 'MobilenetV2_FPN/FeatureMaps/top_down/projection_1/kernel', - 'MobilenetV2_FPN/FeatureMaps/top_down/projection_2/kernel', - 'MobilenetV2_FPN/FeatureMaps/top_down/projection_3/kernel' - ]) - keras_expected_feature_maps_variables_with_depthwise = set([ - # Keras Mobilenet V2 feature maps - 'MobilenetV2_FPN/block_4_depthwise/depthwise_kernel', - 'MobilenetV2_FPN/block_7_depthwise/depthwise_kernel', - 'MobilenetV2_FPN/block_14_depthwise/depthwise_kernel', - 'MobilenetV2_FPN/Conv_1/kernel', - # FPN layers - 'MobilenetV2_FPN/bottom_up_Conv2d_20_depthwise_conv/depthwise_kernel', - 'MobilenetV2_FPN/bottom_up_Conv2d_20_depthwise_conv/pointwise_kernel', - 'MobilenetV2_FPN/bottom_up_Conv2d_21_depthwise_conv/depthwise_kernel', - 'MobilenetV2_FPN/bottom_up_Conv2d_21_depthwise_conv/pointwise_kernel', - ('MobilenetV2_FPN/FeatureMaps/top_down/smoothing_1_depthwise_conv/' - 'depthwise_kernel'), - ('MobilenetV2_FPN/FeatureMaps/top_down/smoothing_1_depthwise_conv/' - 'pointwise_kernel'), - ('MobilenetV2_FPN/FeatureMaps/top_down/smoothing_2_depthwise_conv/' - 'depthwise_kernel'), - ('MobilenetV2_FPN/FeatureMaps/top_down/smoothing_2_depthwise_conv/' - 'pointwise_kernel'), - 'MobilenetV2_FPN/FeatureMaps/top_down/projection_1/kernel', - 'MobilenetV2_FPN/FeatureMaps/top_down/projection_2/kernel', - 'MobilenetV2_FPN/FeatureMaps/top_down/projection_3/kernel' - ]) g = tf.Graph() with g.as_default(): @@ -407,18 +353,12 @@ class SsdMobilenetV2FpnFeatureExtractorTest( pad_to_multiple, use_keras=use_keras, use_depthwise=use_depthwise) - if use_keras: - _ = feature_extractor(preprocessed_inputs) - expected_feature_maps_variables = keras_expected_feature_maps_variables - if use_depthwise: - expected_feature_maps_variables = ( - keras_expected_feature_maps_variables_with_depthwise) - else: - _ = feature_extractor.extract_features(preprocessed_inputs) - expected_feature_maps_variables = slim_expected_feature_maps_variables - if use_depthwise: - expected_feature_maps_variables = ( - slim_expected_feature_maps_variables_with_depthwise) + + _ = feature_extractor.extract_features(preprocessed_inputs) + expected_feature_maps_variables = slim_expected_feature_maps_variables + if use_depthwise: + expected_feature_maps_variables = ( + slim_expected_feature_maps_variables_with_depthwise) actual_variable_set = set([ var.op.name for var in g.get_collection(tf.GraphKeys.GLOBAL_VARIABLES) ]) diff --git a/research/object_detection/models/ssd_mobilenet_v2_fpn_feature_extractor_tf2_test.py b/research/object_detection/models/ssd_mobilenet_v2_fpn_feature_extractor_tf2_test.py new file mode 100644 index 000000000..44522ac94 --- /dev/null +++ b/research/object_detection/models/ssd_mobilenet_v2_fpn_feature_extractor_tf2_test.py @@ -0,0 +1,269 @@ +# Copyright 2018 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""Tests for ssd_mobilenet_v2_fpn_feature_extractor. + +By using parameterized test decorator, this test serves for both Slim-based and +Keras-based Mobilenet V2 FPN feature extractors in SSD. +""" +import unittest +from absl.testing import parameterized +import numpy as np +import tensorflow.compat.v1 as tf + +from object_detection.models import ssd_feature_extractor_test +from object_detection.models import ssd_mobilenet_v2_fpn_keras_feature_extractor +from object_detection.utils import tf_version + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +@parameterized.parameters( + { + 'use_depthwise': False, + }, + { + 'use_depthwise': True, + }, +) +class SsdMobilenetV2FpnFeatureExtractorTest( + ssd_feature_extractor_test.SsdFeatureExtractorTestBase): + + def _create_feature_extractor(self, + depth_multiplier, + pad_to_multiple, + is_training=True, + use_explicit_padding=False, + use_keras=False, + use_depthwise=False): + """Constructs a new feature extractor. + + Args: + depth_multiplier: float depth multiplier for feature extractor + pad_to_multiple: the nearest multiple to zero pad the input height and + width dimensions to. + is_training: whether the network is in training mode. + use_explicit_padding: Use 'VALID' padding for convolutions, but prepad + inputs so that the output dimensions are the same as if 'SAME' padding + were used. + use_keras: if True builds a keras-based feature extractor, if False builds + a slim-based one. + use_depthwise: Whether to use depthwise convolutions. + Returns: + an ssd_meta_arch.SSDFeatureExtractor object. + """ + del use_keras + min_depth = 32 + return (ssd_mobilenet_v2_fpn_keras_feature_extractor + .SSDMobileNetV2FpnKerasFeatureExtractor( + is_training=is_training, + depth_multiplier=depth_multiplier, + min_depth=min_depth, + pad_to_multiple=pad_to_multiple, + conv_hyperparams=self._build_conv_hyperparams( + add_batch_norm=False), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + use_explicit_padding=use_explicit_padding, + use_depthwise=use_depthwise, + name='MobilenetV2_FPN')) + + def test_extract_features_returns_correct_shapes_256(self, + use_depthwise): + use_keras = True + image_height = 256 + image_width = 256 + depth_multiplier = 1.0 + pad_to_multiple = 1 + expected_feature_map_shape = [(2, 32, 32, 256), (2, 16, 16, 256), + (2, 8, 8, 256), (2, 4, 4, 256), + (2, 2, 2, 256)] + self.check_extract_features_returns_correct_shape( + 2, + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + expected_feature_map_shape, + use_explicit_padding=False, + use_keras=use_keras, + use_depthwise=use_depthwise) + self.check_extract_features_returns_correct_shape( + 2, + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + expected_feature_map_shape, + use_explicit_padding=True, + use_keras=use_keras, + use_depthwise=use_depthwise) + + def test_extract_features_returns_correct_shapes_384(self, + use_depthwise): + use_keras = True + image_height = 320 + image_width = 320 + depth_multiplier = 1.0 + pad_to_multiple = 1 + expected_feature_map_shape = [(2, 40, 40, 256), (2, 20, 20, 256), + (2, 10, 10, 256), (2, 5, 5, 256), + (2, 3, 3, 256)] + self.check_extract_features_returns_correct_shape( + 2, + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + expected_feature_map_shape, + use_explicit_padding=False, + use_keras=use_keras, + use_depthwise=use_depthwise) + self.check_extract_features_returns_correct_shape( + 2, + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + expected_feature_map_shape, + use_explicit_padding=True, + use_keras=use_keras, + use_depthwise=use_depthwise) + + def test_extract_features_with_dynamic_image_shape(self, + use_depthwise): + use_keras = True + image_height = 256 + image_width = 256 + depth_multiplier = 1.0 + pad_to_multiple = 1 + expected_feature_map_shape = [(2, 32, 32, 256), (2, 16, 16, 256), + (2, 8, 8, 256), (2, 4, 4, 256), + (2, 2, 2, 256)] + self.check_extract_features_returns_correct_shapes_with_dynamic_inputs( + 2, + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + expected_feature_map_shape, + use_explicit_padding=False, + use_keras=use_keras, + use_depthwise=use_depthwise) + self.check_extract_features_returns_correct_shapes_with_dynamic_inputs( + 2, + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + expected_feature_map_shape, + use_explicit_padding=True, + use_keras=use_keras, + use_depthwise=use_depthwise) + + def test_extract_features_returns_correct_shapes_with_pad_to_multiple( + self, use_depthwise): + use_keras = True + image_height = 299 + image_width = 299 + depth_multiplier = 1.0 + pad_to_multiple = 32 + expected_feature_map_shape = [(2, 40, 40, 256), (2, 20, 20, 256), + (2, 10, 10, 256), (2, 5, 5, 256), + (2, 3, 3, 256)] + self.check_extract_features_returns_correct_shape( + 2, + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + expected_feature_map_shape, + use_explicit_padding=False, + use_keras=use_keras, + use_depthwise=use_depthwise) + self.check_extract_features_returns_correct_shape( + 2, + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + expected_feature_map_shape, + use_explicit_padding=True, + use_keras=use_keras, + use_depthwise=use_depthwise) + + def test_extract_features_returns_correct_shapes_enforcing_min_depth( + self, use_depthwise): + use_keras = True + image_height = 256 + image_width = 256 + depth_multiplier = 0.5**12 + pad_to_multiple = 1 + expected_feature_map_shape = [(2, 32, 32, 32), (2, 16, 16, 32), + (2, 8, 8, 32), (2, 4, 4, 32), + (2, 2, 2, 32)] + self.check_extract_features_returns_correct_shape( + 2, + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + expected_feature_map_shape, + use_explicit_padding=False, + use_keras=use_keras, + use_depthwise=use_depthwise) + self.check_extract_features_returns_correct_shape( + 2, + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + expected_feature_map_shape, + use_explicit_padding=True, + use_keras=use_keras, + use_depthwise=use_depthwise) + + def test_extract_features_raises_error_with_invalid_image_size( + self, use_depthwise=False): + use_keras = True + image_height = 32 + image_width = 32 + depth_multiplier = 1.0 + pad_to_multiple = 1 + self.check_extract_features_raises_error_with_invalid_image_size( + image_height, + image_width, + depth_multiplier, + pad_to_multiple, + use_keras=use_keras, + use_depthwise=use_depthwise) + + def test_preprocess_returns_correct_value_range(self, + use_depthwise): + use_keras = True + image_height = 256 + image_width = 256 + depth_multiplier = 1 + pad_to_multiple = 1 + test_image = np.random.rand(2, image_height, image_width, 3) + feature_extractor = self._create_feature_extractor( + depth_multiplier, + pad_to_multiple, + use_keras=use_keras, + use_depthwise=use_depthwise) + preprocessed_image = feature_extractor.preprocess(test_image) + self.assertTrue(np.all(np.less_equal(np.abs(preprocessed_image), 1.0))) + +if __name__ == '__main__': + tf.test.main() diff --git a/research/object_detection/models/ssd_mobilenet_v2_fpn_keras_feature_extractor.py b/research/object_detection/models/ssd_mobilenet_v2_fpn_keras_feature_extractor.py index f01bec9c5..0e36e8bda 100644 --- a/research/object_detection/models/ssd_mobilenet_v2_fpn_keras_feature_extractor.py +++ b/research/object_detection/models/ssd_mobilenet_v2_fpn_keras_feature_extractor.py @@ -123,7 +123,7 @@ class SSDMobileNetV2FpnKerasFeatureExtractor( self._conv_defs = _create_modified_mobilenet_config() self._use_native_resize_op = use_native_resize_op self._feature_blocks = ['layer_4', 'layer_7', 'layer_14', 'layer_19'] - self._mobilenet_v2 = None + self.classification_backbone = None self._fpn_features_generator = None self._coarse_feature_layers = [] @@ -147,7 +147,7 @@ class SSDMobileNetV2FpnKerasFeatureExtractor( outputs.append(full_mobilenet_v2.get_layer(output_layer_name).output) layer_19 = full_mobilenet_v2.get_layer(name='out_relu').output outputs.append(layer_19) - self._mobilenet_v2 = tf.keras.Model( + self.classification_backbone = tf.keras.Model( inputs=full_mobilenet_v2.inputs, outputs=outputs) # pylint:disable=g-long-lambda @@ -216,7 +216,7 @@ class SSDMobileNetV2FpnKerasFeatureExtractor( preprocessed_inputs = shape_utils.check_min_image_dim( 33, preprocessed_inputs) - image_features = self._mobilenet_v2( + image_features = self.classification_backbone( ops.pad_to_multiple(preprocessed_inputs, self._pad_to_multiple)) feature_block_list = [] @@ -241,3 +241,14 @@ class SSDMobileNetV2FpnKerasFeatureExtractor( last_feature_map = layer(last_feature_map) feature_maps.append(last_feature_map) return feature_maps + + def restore_from_classification_checkpoint_fn(self, feature_extractor_scope): + """Returns a map for restoring from an (object-based) checkpoint. + + Args: + feature_extractor_scope: A scope name for the feature extractor (unused). + + Returns: + A dict mapping keys to Keras models + """ + return {'feature_extractor': self.classification_backbone} diff --git a/research/object_detection/models/ssd_mobilenet_v2_keras_feature_extractor.py b/research/object_detection/models/ssd_mobilenet_v2_keras_feature_extractor.py index e9260cd7a..9f0622f32 100644 --- a/research/object_detection/models/ssd_mobilenet_v2_keras_feature_extractor.py +++ b/research/object_detection/models/ssd_mobilenet_v2_keras_feature_extractor.py @@ -97,7 +97,7 @@ class SSDMobileNetV2KerasFeatureExtractor( 'use_explicit_padding': self._use_explicit_padding, } - self.mobilenet_v2 = None + self.classification_backbone = None self.feature_map_generator = None def build(self, input_shape): @@ -114,7 +114,7 @@ class SSDMobileNetV2KerasFeatureExtractor( conv2d_11_pointwise = full_mobilenet_v2.get_layer( name='block_13_expand_relu').output conv2d_13_pointwise = full_mobilenet_v2.get_layer(name='out_relu').output - self.mobilenet_v2 = tf.keras.Model( + self.classification_backbone = tf.keras.Model( inputs=full_mobilenet_v2.inputs, outputs=[conv2d_11_pointwise, conv2d_13_pointwise]) self.feature_map_generator = ( @@ -158,7 +158,7 @@ class SSDMobileNetV2KerasFeatureExtractor( preprocessed_inputs = shape_utils.check_min_image_dim( 33, preprocessed_inputs) - image_features = self.mobilenet_v2( + image_features = self.classification_backbone( ops.pad_to_multiple(preprocessed_inputs, self._pad_to_multiple)) feature_maps = self.feature_map_generator({ @@ -166,3 +166,14 @@ class SSDMobileNetV2KerasFeatureExtractor( 'layer_19': image_features[1]}) return list(feature_maps.values()) + + def restore_from_classification_checkpoint_fn(self, feature_extractor_scope): + """Returns a map for restoring from an (object-based) checkpoint. + + Args: + feature_extractor_scope: A scope name for the feature extractor (unused). + + Returns: + A dict mapping keys to Keras models + """ + return {'feature_extractor': self.classification_backbone} diff --git a/research/object_detection/models/ssd_mobilenet_v2_mnasfpn_feature_extractor_test.py b/research/object_detection/models/ssd_mobilenet_v2_mnasfpn_feature_extractor_tf1_test.py similarity index 96% rename from research/object_detection/models/ssd_mobilenet_v2_mnasfpn_feature_extractor_test.py rename to research/object_detection/models/ssd_mobilenet_v2_mnasfpn_feature_extractor_tf1_test.py index dd9aae976..032433128 100644 --- a/research/object_detection/models/ssd_mobilenet_v2_mnasfpn_feature_extractor_test.py +++ b/research/object_detection/models/ssd_mobilenet_v2_mnasfpn_feature_extractor_tf1_test.py @@ -14,13 +14,16 @@ # limitations under the License. # ============================================================================== """Tests for ssd_mobilenet_v2_nas_fpn_feature_extractor.""" +import unittest import numpy as np import tensorflow.compat.v1 as tf from object_detection.models import ssd_feature_extractor_test from object_detection.models import ssd_mobilenet_v2_mnasfpn_feature_extractor as mnasfpn_feature_extractor +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class SsdMobilenetV2MnasFPNFeatureExtractorTest( ssd_feature_extractor_test.SsdFeatureExtractorTestBase): diff --git a/research/object_detection/models/ssd_mobilenet_v3_feature_extractor_test.py b/research/object_detection/models/ssd_mobilenet_v3_feature_extractor_tf1_test.py similarity index 95% rename from research/object_detection/models/ssd_mobilenet_v3_feature_extractor_test.py rename to research/object_detection/models/ssd_mobilenet_v3_feature_extractor_tf1_test.py index 38621744d..43c02490a 100644 --- a/research/object_detection/models/ssd_mobilenet_v3_feature_extractor_test.py +++ b/research/object_detection/models/ssd_mobilenet_v3_feature_extractor_tf1_test.py @@ -13,17 +13,15 @@ # limitations under the License. # ============================================================================== """Tests for ssd_mobilenet_v3_feature_extractor.""" - +import unittest import tensorflow.compat.v1 as tf -import tf_slim as slim from object_detection.models import ssd_mobilenet_v3_feature_extractor from object_detection.models import ssd_mobilenet_v3_feature_extractor_testbase +from object_detection.utils import tf_version -slim = slim - - +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class SsdMobilenetV3LargeFeatureExtractorTest( ssd_mobilenet_v3_feature_extractor_testbase ._SsdMobilenetV3FeatureExtractorTestBase): @@ -63,6 +61,7 @@ class SsdMobilenetV3LargeFeatureExtractorTest( use_explicit_padding=use_explicit_padding)) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class SsdMobilenetV3SmallFeatureExtractorTest( ssd_mobilenet_v3_feature_extractor_testbase ._SsdMobilenetV3FeatureExtractorTestBase): diff --git a/research/object_detection/models/ssd_pnasnet_feature_extractor.py b/research/object_detection/models/ssd_pnasnet_feature_extractor.py index 802c83948..48f1dee3b 100644 --- a/research/object_detection/models/ssd_pnasnet_feature_extractor.py +++ b/research/object_detection/models/ssd_pnasnet_feature_extractor.py @@ -27,7 +27,10 @@ from object_detection.models import feature_map_generators from object_detection.utils import context_manager from object_detection.utils import ops from object_detection.utils import variables_helper -from nets.nasnet import pnasnet +try: + from nets.nasnet import pnasnet # pylint: disable=g-import-not-at-top +except: # pylint: disable=bare-except + pass def pnasnet_large_arg_scope_for_detection(is_batch_norm_training=False): diff --git a/research/object_detection/models/ssd_pnasnet_feature_extractor_test.py b/research/object_detection/models/ssd_pnasnet_feature_extractor_tf1_test.py similarity index 97% rename from research/object_detection/models/ssd_pnasnet_feature_extractor_test.py rename to research/object_detection/models/ssd_pnasnet_feature_extractor_tf1_test.py index 1f2fb0f83..d5f5bff92 100644 --- a/research/object_detection/models/ssd_pnasnet_feature_extractor_test.py +++ b/research/object_detection/models/ssd_pnasnet_feature_extractor_tf1_test.py @@ -14,13 +14,16 @@ # ============================================================================== """Tests for ssd_pnas_feature_extractor.""" +import unittest import numpy as np import tensorflow.compat.v1 as tf from object_detection.models import ssd_feature_extractor_test from object_detection.models import ssd_pnasnet_feature_extractor +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class SsdPnasNetFeatureExtractorTest( ssd_feature_extractor_test.SsdFeatureExtractorTestBase): diff --git a/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_test.py b/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_test.py deleted file mode 100644 index ddd4b0811..000000000 --- a/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_test.py +++ /dev/null @@ -1,126 +0,0 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Tests for ssd resnet v1 FPN feature extractors.""" -import tensorflow.compat.v1 as tf - -from object_detection.models import ssd_resnet_v1_fpn_feature_extractor -from object_detection.models import ssd_resnet_v1_fpn_feature_extractor_testbase -from object_detection.models import ssd_resnet_v1_fpn_keras_feature_extractor - - -class SSDResnet50V1FeatureExtractorTest( - ssd_resnet_v1_fpn_feature_extractor_testbase. - SSDResnetFPNFeatureExtractorTestBase): - """SSDResnet50v1Fpn feature extractor test.""" - - def _create_feature_extractor(self, depth_multiplier, pad_to_multiple, - use_explicit_padding=False, min_depth=32, - use_keras=False): - is_training = True - if use_keras: - return (ssd_resnet_v1_fpn_keras_feature_extractor. - SSDResNet50V1FpnKerasFeatureExtractor( - is_training=is_training, - depth_multiplier=depth_multiplier, - min_depth=min_depth, - pad_to_multiple=pad_to_multiple, - conv_hyperparams=self._build_conv_hyperparams( - add_batch_norm=False), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - name='ResNet50V1_FPN')) - else: - return ( - ssd_resnet_v1_fpn_feature_extractor.SSDResnet50V1FpnFeatureExtractor( - is_training, depth_multiplier, min_depth, pad_to_multiple, - self.conv_hyperparams_fn, - use_explicit_padding=use_explicit_padding)) - - def _resnet_scope_name(self, use_keras=False): - if use_keras: - return 'ResNet50V1_FPN' - return 'resnet_v1_50' - - -class SSDResnet101V1FeatureExtractorTest( - ssd_resnet_v1_fpn_feature_extractor_testbase. - SSDResnetFPNFeatureExtractorTestBase): - """SSDResnet101v1Fpn feature extractor test.""" - - def _create_feature_extractor(self, depth_multiplier, pad_to_multiple, - use_explicit_padding=False, min_depth=32, - use_keras=False): - is_training = True - if use_keras: - return (ssd_resnet_v1_fpn_keras_feature_extractor. - SSDResNet101V1FpnKerasFeatureExtractor( - is_training=is_training, - depth_multiplier=depth_multiplier, - min_depth=min_depth, - pad_to_multiple=pad_to_multiple, - conv_hyperparams=self._build_conv_hyperparams( - add_batch_norm=False), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - name='ResNet101V1_FPN')) - else: - return ( - ssd_resnet_v1_fpn_feature_extractor.SSDResnet101V1FpnFeatureExtractor( - is_training, depth_multiplier, min_depth, pad_to_multiple, - self.conv_hyperparams_fn, - use_explicit_padding=use_explicit_padding)) - - def _resnet_scope_name(self, use_keras): - if use_keras: - return 'ResNet101V1_FPN' - return 'resnet_v1_101' - - -class SSDResnet152V1FeatureExtractorTest( - ssd_resnet_v1_fpn_feature_extractor_testbase. - SSDResnetFPNFeatureExtractorTestBase): - """SSDResnet152v1Fpn feature extractor test.""" - - def _create_feature_extractor(self, depth_multiplier, pad_to_multiple, - use_explicit_padding=False, min_depth=32, - use_keras=False): - is_training = True - if use_keras: - return (ssd_resnet_v1_fpn_keras_feature_extractor. - SSDResNet152V1FpnKerasFeatureExtractor( - is_training=is_training, - depth_multiplier=depth_multiplier, - min_depth=min_depth, - pad_to_multiple=pad_to_multiple, - conv_hyperparams=self._build_conv_hyperparams( - add_batch_norm=False), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - name='ResNet152V1_FPN')) - else: - return ( - ssd_resnet_v1_fpn_feature_extractor.SSDResnet152V1FpnFeatureExtractor( - is_training, depth_multiplier, min_depth, pad_to_multiple, - self.conv_hyperparams_fn, - use_explicit_padding=use_explicit_padding)) - - def _resnet_scope_name(self, use_keras): - if use_keras: - return 'ResNet152V1_FPN' - return 'resnet_v1_152' - - -if __name__ == '__main__': - tf.test.main() diff --git a/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_testbase.py b/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_testbase.py index c3854444d..1ccad530e 100644 --- a/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_testbase.py +++ b/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_testbase.py @@ -19,24 +19,20 @@ from __future__ import division from __future__ import print_function import abc -from absl.testing import parameterized import numpy as np from six.moves import zip import tensorflow.compat.v1 as tf from object_detection.models import ssd_feature_extractor_test +from object_detection.utils import test_utils -@parameterized.parameters( - {'use_keras': False}, - {'use_keras': True}, -) class SSDResnetFPNFeatureExtractorTestBase( ssd_feature_extractor_test.SsdFeatureExtractorTestBase): """Helper test class for SSD Resnet v1 FPN feature extractors.""" @abc.abstractmethod - def _resnet_scope_name(self, use_keras): + def _resnet_scope_name(self): pass @abc.abstractmethod @@ -52,7 +48,7 @@ class SSDResnetFPNFeatureExtractorTestBase( use_keras=False): pass - def test_extract_features_returns_correct_shapes_256(self, use_keras): + def test_extract_features_returns_correct_shapes_256(self): image_height = 256 image_width = 256 depth_multiplier = 1.0 @@ -62,10 +58,10 @@ class SSDResnetFPNFeatureExtractorTestBase( (2, 2, 2, 256)] self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, - expected_feature_map_shape) + expected_feature_map_shape, use_keras=self.is_tf2()) def test_extract_features_returns_correct_shapes_with_dynamic_inputs( - self, use_keras): + self): image_height = 256 image_width = 256 depth_multiplier = 1.0 @@ -75,10 +71,10 @@ class SSDResnetFPNFeatureExtractorTestBase( (2, 2, 2, 256)] self.check_extract_features_returns_correct_shapes_with_dynamic_inputs( 2, image_height, image_width, depth_multiplier, pad_to_multiple, - expected_feature_map_shape, use_keras=use_keras) + expected_feature_map_shape, use_keras=self.is_tf2()) def test_extract_features_returns_correct_shapes_with_depth_multiplier( - self, use_keras): + self): image_height = 256 image_width = 256 depth_multiplier = 0.5 @@ -91,10 +87,10 @@ class SSDResnetFPNFeatureExtractorTestBase( (2, 2, 2, expected_num_channels)] self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, - expected_feature_map_shape, use_keras=use_keras) + expected_feature_map_shape, use_keras=self.is_tf2()) def test_extract_features_returns_correct_shapes_with_min_depth( - self, use_keras): + self): image_height = 256 image_width = 256 depth_multiplier = 1.0 @@ -106,23 +102,24 @@ class SSDResnetFPNFeatureExtractorTestBase( (2, 4, 4, min_depth), (2, 2, 2, min_depth)] - def graph_fn(image_tensor): + with test_utils.GraphContextOrNone() as g: + image_tensor = tf.random.uniform([2, image_height, image_width, 3]) feature_extractor = self._create_feature_extractor( depth_multiplier, pad_to_multiple, min_depth=min_depth, - use_keras=use_keras) - if use_keras: + use_keras=self.is_tf2()) + + def graph_fn(): + if self.is_tf2(): return feature_extractor(image_tensor) return feature_extractor.extract_features(image_tensor) - image_tensor = np.random.rand(2, image_height, image_width, - 3).astype(np.float32) - feature_maps = self.execute(graph_fn, [image_tensor]) + feature_maps = self.execute(graph_fn, [], graph=g) for feature_map, expected_shape in zip(feature_maps, expected_feature_map_shape): self.assertAllEqual(feature_map.shape, expected_shape) def test_extract_features_returns_correct_shapes_with_pad_to_multiple( - self, use_keras): + self): image_height = 254 image_width = 254 depth_multiplier = 1.0 @@ -133,55 +130,62 @@ class SSDResnetFPNFeatureExtractorTestBase( self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, - expected_feature_map_shape, use_keras=use_keras) + expected_feature_map_shape, use_keras=self.is_tf2()) def test_extract_features_raises_error_with_invalid_image_size( - self, use_keras): + self): image_height = 32 image_width = 32 depth_multiplier = 1.0 pad_to_multiple = 1 self.check_extract_features_raises_error_with_invalid_image_size( image_height, image_width, depth_multiplier, pad_to_multiple, - use_keras=use_keras) + use_keras=self.is_tf2()) - def test_preprocess_returns_correct_value_range(self, use_keras): + def test_preprocess_returns_correct_value_range(self): image_height = 128 image_width = 128 depth_multiplier = 1 pad_to_multiple = 1 - test_image = tf.constant(np.random.rand(4, image_height, image_width, 3)) - feature_extractor = self._create_feature_extractor(depth_multiplier, - pad_to_multiple, - use_keras=use_keras) - preprocessed_image = feature_extractor.preprocess(test_image) - with self.test_session() as sess: - test_image_out, preprocessed_image_out = sess.run( - [test_image, preprocessed_image]) - self.assertAllClose(preprocessed_image_out, - test_image_out - [[123.68, 116.779, 103.939]]) - - def test_variables_only_created_in_scope(self, use_keras): + test_image_np = np.random.rand(4, image_height, image_width, 3) + with test_utils.GraphContextOrNone() as g: + test_image = tf.constant(test_image_np) + feature_extractor = self._create_feature_extractor( + depth_multiplier, pad_to_multiple, use_keras=self.is_tf2()) + + def graph_fn(): + preprocessed_image = feature_extractor.preprocess(test_image) + return preprocessed_image + + preprocessed_image_out = self.execute(graph_fn, [], graph=g) + self.assertAllClose(preprocessed_image_out, + test_image_np - [[123.68, 116.779, 103.939]]) + + def test_variables_only_created_in_scope(self): + if self.is_tf2(): + self.skipTest('test_variables_only_created_in_scope is only tf1') depth_multiplier = 1 pad_to_multiple = 1 - scope_name = self._resnet_scope_name(use_keras) + scope_name = self._resnet_scope_name() self.check_feature_extractor_variables_under_scope( depth_multiplier, pad_to_multiple, scope_name, - use_keras=use_keras) + use_keras=self.is_tf2()) - def test_variable_count(self, use_keras): + def test_variable_count(self): + if self.is_tf2(): + self.skipTest('test_variable_count is only tf1') depth_multiplier = 1 pad_to_multiple = 1 variables = self.get_feature_extractor_variables( depth_multiplier, pad_to_multiple, - use_keras=use_keras) + use_keras=self.is_tf2()) # The number of expected variables in resnet_v1_50, resnet_v1_101, # and resnet_v1_152 is 279, 534, and 789 respectively. expected_variables_len = 279 - scope_name = self._resnet_scope_name(use_keras) + scope_name = self._resnet_scope_name() if scope_name in ('ResNet101V1_FPN', 'resnet_v1_101'): expected_variables_len = 534 elif scope_name in ('ResNet152V1_FPN', 'resnet_v1_152'): diff --git a/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_tf1_test.py b/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_tf1_test.py new file mode 100644 index 000000000..58952ff94 --- /dev/null +++ b/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_tf1_test.py @@ -0,0 +1,85 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for ssd resnet v1 FPN feature extractors.""" +import unittest +import tensorflow.compat.v1 as tf + +from object_detection.models import ssd_resnet_v1_fpn_feature_extractor +from object_detection.models import ssd_resnet_v1_fpn_feature_extractor_testbase +from object_detection.utils import tf_version + + +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') +class SSDResnet50V1FeatureExtractorTest( + ssd_resnet_v1_fpn_feature_extractor_testbase. + SSDResnetFPNFeatureExtractorTestBase): + """SSDResnet50v1Fpn feature extractor test.""" + + def _create_feature_extractor(self, depth_multiplier, pad_to_multiple, + use_explicit_padding=False, min_depth=32, + use_keras=False): + is_training = True + return ( + ssd_resnet_v1_fpn_feature_extractor.SSDResnet50V1FpnFeatureExtractor( + is_training, depth_multiplier, min_depth, pad_to_multiple, + self.conv_hyperparams_fn, + use_explicit_padding=use_explicit_padding)) + + def _resnet_scope_name(self): + return 'resnet_v1_50' + + +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') +class SSDResnet101V1FeatureExtractorTest( + ssd_resnet_v1_fpn_feature_extractor_testbase. + SSDResnetFPNFeatureExtractorTestBase): + """SSDResnet101v1Fpn feature extractor test.""" + + def _create_feature_extractor(self, depth_multiplier, pad_to_multiple, + use_explicit_padding=False, min_depth=32, + use_keras=False): + is_training = True + return ( + ssd_resnet_v1_fpn_feature_extractor.SSDResnet101V1FpnFeatureExtractor( + is_training, depth_multiplier, min_depth, pad_to_multiple, + self.conv_hyperparams_fn, + use_explicit_padding=use_explicit_padding)) + + def _resnet_scope_name(self): + return 'resnet_v1_101' + + +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') +class SSDResnet152V1FeatureExtractorTest( + ssd_resnet_v1_fpn_feature_extractor_testbase. + SSDResnetFPNFeatureExtractorTestBase): + """SSDResnet152v1Fpn feature extractor test.""" + + def _create_feature_extractor(self, depth_multiplier, pad_to_multiple, + use_explicit_padding=False, min_depth=32, + use_keras=False): + is_training = True + return ( + ssd_resnet_v1_fpn_feature_extractor.SSDResnet152V1FpnFeatureExtractor( + is_training, depth_multiplier, min_depth, pad_to_multiple, + self.conv_hyperparams_fn, + use_explicit_padding=use_explicit_padding)) + + def _resnet_scope_name(self): + return 'resnet_v1_152' + + +if __name__ == '__main__': + tf.test.main() diff --git a/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_tf2_test.py b/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_tf2_test.py new file mode 100644 index 000000000..27c54ddd0 --- /dev/null +++ b/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_tf2_test.py @@ -0,0 +1,103 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for ssd resnet v1 FPN feature extractors.""" +import unittest +import tensorflow.compat.v1 as tf + +from object_detection.models import ssd_resnet_v1_fpn_feature_extractor_testbase +from object_detection.models import ssd_resnet_v1_fpn_keras_feature_extractor +from object_detection.utils import tf_version + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class SSDResnet50V1FeatureExtractorTest( + ssd_resnet_v1_fpn_feature_extractor_testbase. + SSDResnetFPNFeatureExtractorTestBase): + """SSDResnet50v1Fpn feature extractor test.""" + + def _create_feature_extractor(self, depth_multiplier, pad_to_multiple, + use_explicit_padding=False, min_depth=32, + use_keras=True): + is_training = True + return (ssd_resnet_v1_fpn_keras_feature_extractor. + SSDResNet50V1FpnKerasFeatureExtractor( + is_training=is_training, + depth_multiplier=depth_multiplier, + min_depth=min_depth, + pad_to_multiple=pad_to_multiple, + conv_hyperparams=self._build_conv_hyperparams( + add_batch_norm=False), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + name='ResNet50V1_FPN')) + + def _resnet_scope_name(self): + return 'ResNet50V1_FPN' + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class SSDResnet101V1FeatureExtractorTest( + ssd_resnet_v1_fpn_feature_extractor_testbase. + SSDResnetFPNFeatureExtractorTestBase): + """SSDResnet101v1Fpn feature extractor test.""" + + def _create_feature_extractor(self, depth_multiplier, pad_to_multiple, + use_explicit_padding=False, min_depth=32, + use_keras=False): + is_training = True + return (ssd_resnet_v1_fpn_keras_feature_extractor. + SSDResNet101V1FpnKerasFeatureExtractor( + is_training=is_training, + depth_multiplier=depth_multiplier, + min_depth=min_depth, + pad_to_multiple=pad_to_multiple, + conv_hyperparams=self._build_conv_hyperparams( + add_batch_norm=False), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + name='ResNet101V1_FPN')) + + def _resnet_scope_name(self): + return 'ResNet101V1_FPN' + + +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class SSDResnet152V1FeatureExtractorTest( + ssd_resnet_v1_fpn_feature_extractor_testbase. + SSDResnetFPNFeatureExtractorTestBase): + """SSDResnet152v1Fpn feature extractor test.""" + + def _create_feature_extractor(self, depth_multiplier, pad_to_multiple, + use_explicit_padding=False, min_depth=32, + use_keras=False): + is_training = True + return (ssd_resnet_v1_fpn_keras_feature_extractor. + SSDResNet152V1FpnKerasFeatureExtractor( + is_training=is_training, + depth_multiplier=depth_multiplier, + min_depth=min_depth, + pad_to_multiple=pad_to_multiple, + conv_hyperparams=self._build_conv_hyperparams( + add_batch_norm=False), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + name='ResNet152V1_FPN')) + + def _resnet_scope_name(self): + return 'ResNet152V1_FPN' + + +if __name__ == '__main__': + tf.test.main() diff --git a/research/object_detection/models/ssd_resnet_v1_ppn_feature_extractor_test.py b/research/object_detection/models/ssd_resnet_v1_ppn_feature_extractor_tf1_test.py similarity index 92% rename from research/object_detection/models/ssd_resnet_v1_ppn_feature_extractor_test.py rename to research/object_detection/models/ssd_resnet_v1_ppn_feature_extractor_tf1_test.py index bfcb74cf9..bb95cb53f 100644 --- a/research/object_detection/models/ssd_resnet_v1_ppn_feature_extractor_test.py +++ b/research/object_detection/models/ssd_resnet_v1_ppn_feature_extractor_tf1_test.py @@ -13,12 +13,15 @@ # limitations under the License. # ============================================================================== """Tests for ssd resnet v1 feature extractors.""" +import unittest import tensorflow.compat.v1 as tf from object_detection.models import ssd_resnet_v1_ppn_feature_extractor from object_detection.models import ssd_resnet_v1_ppn_feature_extractor_testbase +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class SSDResnet50V1PpnFeatureExtractorTest( ssd_resnet_v1_ppn_feature_extractor_testbase. SSDResnetPpnFeatureExtractorTestBase): @@ -40,6 +43,7 @@ class SSDResnet50V1PpnFeatureExtractorTest( return 'resnet_v1_50' +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class SSDResnet101V1PpnFeatureExtractorTest( ssd_resnet_v1_ppn_feature_extractor_testbase. SSDResnetPpnFeatureExtractorTestBase): @@ -62,6 +66,7 @@ class SSDResnet101V1PpnFeatureExtractorTest( return 'resnet_v1_101' +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class SSDResnet152V1PpnFeatureExtractorTest( ssd_resnet_v1_ppn_feature_extractor_testbase. SSDResnetPpnFeatureExtractorTestBase): diff --git a/research/object_detection/predictors/convolutional_box_predictor_test.py b/research/object_detection/predictors/convolutional_box_predictor_tf1_test.py similarity index 99% rename from research/object_detection/predictors/convolutional_box_predictor_test.py rename to research/object_detection/predictors/convolutional_box_predictor_tf1_test.py index eb608e1e7..3236615df 100644 --- a/research/object_detection/predictors/convolutional_box_predictor_test.py +++ b/research/object_detection/predictors/convolutional_box_predictor_tf1_test.py @@ -19,7 +19,7 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function - +import unittest from absl.testing import parameterized import numpy as np from six.moves import range @@ -35,8 +35,10 @@ from object_detection.predictors.heads import class_head from object_detection.predictors.heads import mask_head from object_detection.protos import hyperparams_pb2 from object_detection.utils import test_case +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class ConvolutionalBoxPredictorTest(test_case.TestCase): def _build_arg_scope_with_conv_hyperparams(self): @@ -281,6 +283,7 @@ class ConvolutionalBoxPredictorTest(test_case.TestCase): self.assertEqual(bad_dangling_ops, []) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class WeightSharedConvolutionalBoxPredictorTest(test_case.TestCase): def _build_arg_scope_with_conv_hyperparams(self): diff --git a/research/object_detection/predictors/convolutional_keras_box_predictor_test.py b/research/object_detection/predictors/convolutional_keras_box_predictor_tf2_test.py similarity index 64% rename from research/object_detection/predictors/convolutional_keras_box_predictor_test.py rename to research/object_detection/predictors/convolutional_keras_box_predictor_tf2_test.py index 5db7e962f..180a6e946 100644 --- a/research/object_detection/predictors/convolutional_keras_box_predictor_test.py +++ b/research/object_detection/predictors/convolutional_keras_box_predictor_tf2_test.py @@ -14,6 +14,7 @@ # ============================================================================== """Tests for object_detection.predictors.convolutional_keras_box_predictor.""" +import unittest import numpy as np import tensorflow.compat.v1 as tf @@ -26,8 +27,10 @@ from object_detection.predictors.heads import keras_class_head from object_detection.predictors.heads import keras_mask_head from object_detection.protos import hyperparams_pb2 from object_detection.utils import test_case +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class ConvolutionalKerasBoxPredictorTest(test_case.TestCase): def _build_conv_hyperparams(self): @@ -47,23 +50,23 @@ class ConvolutionalKerasBoxPredictorTest(test_case.TestCase): return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams) def test_get_boxes_for_five_aspect_ratios_per_location(self): + conv_box_predictor = ( + box_predictor_builder.build_convolutional_keras_box_predictor( + is_training=False, + num_classes=0, + conv_hyperparams=self._build_conv_hyperparams(), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + num_predictions_per_location_list=[5], + min_depth=0, + max_depth=32, + num_layers_before_predictor=1, + use_dropout=True, + dropout_keep_prob=0.8, + kernel_size=1, + box_code_size=4 + )) def graph_fn(image_features): - conv_box_predictor = ( - box_predictor_builder.build_convolutional_keras_box_predictor( - is_training=False, - num_classes=0, - conv_hyperparams=self._build_conv_hyperparams(), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - num_predictions_per_location_list=[5], - min_depth=0, - max_depth=32, - num_layers_before_predictor=1, - use_dropout=True, - dropout_keep_prob=0.8, - kernel_size=1, - box_code_size=4 - )) box_predictions = conv_box_predictor([image_features]) box_encodings = tf.concat( box_predictions[box_predictor.BOX_ENCODINGS], axis=1) @@ -78,23 +81,23 @@ class ConvolutionalKerasBoxPredictorTest(test_case.TestCase): self.assertAllEqual(objectness_predictions.shape, [4, 320, 1]) def test_get_boxes_for_one_aspect_ratio_per_location(self): + conv_box_predictor = ( + box_predictor_builder.build_convolutional_keras_box_predictor( + is_training=False, + num_classes=0, + conv_hyperparams=self._build_conv_hyperparams(), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + num_predictions_per_location_list=[1], + min_depth=0, + max_depth=32, + num_layers_before_predictor=1, + use_dropout=True, + dropout_keep_prob=0.8, + kernel_size=1, + box_code_size=4 + )) def graph_fn(image_features): - conv_box_predictor = ( - box_predictor_builder.build_convolutional_keras_box_predictor( - is_training=False, - num_classes=0, - conv_hyperparams=self._build_conv_hyperparams(), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - num_predictions_per_location_list=[1], - min_depth=0, - max_depth=32, - num_layers_before_predictor=1, - use_dropout=True, - dropout_keep_prob=0.8, - kernel_size=1, - box_code_size=4 - )) box_predictions = conv_box_predictor([image_features]) box_encodings = tf.concat( box_predictions[box_predictor.BOX_ENCODINGS], axis=1) @@ -111,23 +114,23 @@ class ConvolutionalKerasBoxPredictorTest(test_case.TestCase): self): num_classes_without_background = 6 image_features = np.random.rand(4, 8, 8, 64).astype(np.float32) + conv_box_predictor = ( + box_predictor_builder.build_convolutional_keras_box_predictor( + is_training=False, + num_classes=num_classes_without_background, + conv_hyperparams=self._build_conv_hyperparams(), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + num_predictions_per_location_list=[5], + min_depth=0, + max_depth=32, + num_layers_before_predictor=1, + use_dropout=True, + dropout_keep_prob=0.8, + kernel_size=1, + box_code_size=4 + )) def graph_fn(image_features): - conv_box_predictor = ( - box_predictor_builder.build_convolutional_keras_box_predictor( - is_training=False, - num_classes=num_classes_without_background, - conv_hyperparams=self._build_conv_hyperparams(), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - num_predictions_per_location_list=[5], - min_depth=0, - max_depth=32, - num_layers_before_predictor=1, - use_dropout=True, - dropout_keep_prob=0.8, - kernel_size=1, - box_code_size=4 - )) box_predictions = conv_box_predictor([image_features]) box_encodings = tf.concat( box_predictions[box_predictor.BOX_ENCODINGS], axis=1) @@ -144,7 +147,7 @@ class ConvolutionalKerasBoxPredictorTest(test_case.TestCase): def test_get_predictions_with_feature_maps_of_dynamic_shape( self): - image_features = tf.placeholder(dtype=tf.float32, shape=[4, None, None, 64]) + tf.keras.backend.clear_session() conv_box_predictor = ( box_predictor_builder.build_convolutional_keras_box_predictor( is_training=False, @@ -161,28 +164,25 @@ class ConvolutionalKerasBoxPredictorTest(test_case.TestCase): kernel_size=1, box_code_size=4 )) - box_predictions = conv_box_predictor([image_features]) - box_encodings = tf.concat( - box_predictions[box_predictor.BOX_ENCODINGS], axis=1) - objectness_predictions = tf.concat( - box_predictions[box_predictor.CLASS_PREDICTIONS_WITH_BACKGROUND], - axis=1) - init_op = tf.global_variables_initializer() - + variables = [] + def graph_fn(image_features): + box_predictions = conv_box_predictor([image_features]) + variables.extend(list(conv_box_predictor.variables)) + box_encodings = tf.concat( + box_predictions[box_predictor.BOX_ENCODINGS], axis=1) + objectness_predictions = tf.concat( + box_predictions[box_predictor.CLASS_PREDICTIONS_WITH_BACKGROUND], + axis=1) + return box_encodings, objectness_predictions resolution = 32 expected_num_anchors = resolution*resolution*5 - with self.test_session() as sess: - sess.run(init_op) - (box_encodings_shape, - objectness_predictions_shape) = sess.run( - [tf.shape(box_encodings), tf.shape(objectness_predictions)], - feed_dict={image_features: - np.random.rand(4, resolution, resolution, 64)}) - actual_variable_set = set( - [var.op.name for var in tf.trainable_variables()]) - self.assertAllEqual(box_encodings_shape, [4, expected_num_anchors, 1, 4]) - self.assertAllEqual(objectness_predictions_shape, - [4, expected_num_anchors, 1]) + box_encodings, objectness_predictions = self.execute( + graph_fn, [np.random.rand(4, resolution, resolution, 64)]) + + actual_variable_set = set([var.name.split(':')[0] for var in variables]) + self.assertAllEqual(box_encodings.shape, [4, expected_num_anchors, 1, 4]) + self.assertAllEqual(objectness_predictions.shape, + [4, expected_num_anchors, 1]) expected_variable_set = set([ 'BoxPredictor/SharedConvolutions_0/Conv2d_0_1x1_32/bias', 'BoxPredictor/SharedConvolutions_0/Conv2d_0_1x1_32/kernel', @@ -195,7 +195,7 @@ class ConvolutionalKerasBoxPredictorTest(test_case.TestCase): ['box_encodings', 'class_predictions_with_background']) def test_use_depthwise_convolution(self): - image_features = tf.placeholder(dtype=tf.float32, shape=[4, None, None, 64]) + tf.keras.backend.clear_session() conv_box_predictor = ( box_predictor_builder.build_convolutional_keras_box_predictor( is_training=False, @@ -213,27 +213,25 @@ class ConvolutionalKerasBoxPredictorTest(test_case.TestCase): box_code_size=4, use_depthwise=True )) - box_predictions = conv_box_predictor([image_features]) - box_encodings = tf.concat( - box_predictions[box_predictor.BOX_ENCODINGS], axis=1) - objectness_predictions = tf.concat( - box_predictions[box_predictor.CLASS_PREDICTIONS_WITH_BACKGROUND], - axis=1) - init_op = tf.global_variables_initializer() + variables = [] + def graph_fn(image_features): + box_predictions = conv_box_predictor([image_features]) + variables.extend(list(conv_box_predictor.variables)) + box_encodings = tf.concat( + box_predictions[box_predictor.BOX_ENCODINGS], axis=1) + objectness_predictions = tf.concat( + box_predictions[box_predictor.CLASS_PREDICTIONS_WITH_BACKGROUND], + axis=1) + return box_encodings, objectness_predictions resolution = 32 expected_num_anchors = resolution*resolution*5 - with self.test_session() as sess: - sess.run(init_op) - (box_encodings_shape, - objectness_predictions_shape) = sess.run( - [tf.shape(box_encodings), tf.shape(objectness_predictions)], - feed_dict={image_features: - np.random.rand(4, resolution, resolution, 64)}) - actual_variable_set = set( - [var.op.name for var in tf.trainable_variables()]) - self.assertAllEqual(box_encodings_shape, [4, expected_num_anchors, 1, 4]) - self.assertAllEqual(objectness_predictions_shape, + box_encodings, objectness_predictions = self.execute( + graph_fn, [np.random.rand(4, resolution, resolution, 64)]) + + actual_variable_set = set([var.name.split(':')[0] for var in variables]) + self.assertAllEqual(box_encodings.shape, [4, expected_num_anchors, 1, 4]) + self.assertAllEqual(objectness_predictions.shape, [4, expected_num_anchors, 1]) expected_variable_set = set([ 'BoxPredictor/SharedConvolutions_0/Conv2d_0_1x1_32/bias', @@ -259,6 +257,7 @@ class ConvolutionalKerasBoxPredictorTest(test_case.TestCase): ['box_encodings', 'class_predictions_with_background']) +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): def _build_conv_hyperparams(self, add_batch_norm=True): @@ -288,19 +287,20 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): # pylint: disable=line-too-long def test_get_boxes_for_five_aspect_ratios_per_location(self): + conv_box_predictor = ( + box_predictor_builder + .build_weight_shared_convolutional_keras_box_predictor( + is_training=False, + num_classes=0, + conv_hyperparams=self._build_conv_hyperparams(), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + num_predictions_per_location_list=[5], + depth=32, + num_layers_before_predictor=1, + box_code_size=4)) def graph_fn(image_features): - conv_box_predictor = ( - box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor( - is_training=False, - num_classes=0, - conv_hyperparams=self._build_conv_hyperparams(), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - num_predictions_per_location_list=[5], - depth=32, - num_layers_before_predictor=1, - box_code_size=4)) box_predictions = conv_box_predictor([image_features]) box_encodings = tf.concat( box_predictions[box_predictor.BOX_ENCODINGS], axis=1) @@ -314,20 +314,21 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): self.assertAllEqual(objectness_predictions.shape, [4, 320, 1]) def test_bias_predictions_to_background_with_sigmoid_score_conversion(self): + conv_box_predictor = ( + box_predictor_builder + .build_weight_shared_convolutional_keras_box_predictor( + is_training=True, + num_classes=2, + conv_hyperparams=self._build_conv_hyperparams(), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + num_predictions_per_location_list=[5], + depth=32, + num_layers_before_predictor=1, + class_prediction_bias_init=-4.6, + box_code_size=4)) def graph_fn(image_features): - conv_box_predictor = ( - box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor( - is_training=True, - num_classes=2, - conv_hyperparams=self._build_conv_hyperparams(), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - num_predictions_per_location_list=[5], - depth=32, - num_layers_before_predictor=1, - class_prediction_bias_init=-4.6, - box_code_size=4)) box_predictions = conv_box_predictor([image_features]) class_predictions = tf.concat(box_predictions[ box_predictor.CLASS_PREDICTIONS_WITH_BACKGROUND], axis=1) @@ -339,20 +340,21 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): def test_get_multi_class_predictions_for_five_aspect_ratios_per_location( self): - num_classes_without_background = 6 + conv_box_predictor = ( + box_predictor_builder + .build_weight_shared_convolutional_keras_box_predictor( + is_training=False, + num_classes=num_classes_without_background, + conv_hyperparams=self._build_conv_hyperparams(), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + num_predictions_per_location_list=[5], + depth=32, + num_layers_before_predictor=1, + box_code_size=4)) + def graph_fn(image_features): - conv_box_predictor = ( - box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor( - is_training=False, - num_classes=num_classes_without_background, - conv_hyperparams=self._build_conv_hyperparams(), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - num_predictions_per_location_list=[5], - depth=32, - num_layers_before_predictor=1, - box_code_size=4)) box_predictions = conv_box_predictor([image_features]) box_encodings = tf.concat( box_predictions[box_predictor.BOX_ENCODINGS], axis=1) @@ -369,20 +371,21 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): def test_get_multi_class_predictions_from_two_feature_maps( self): - num_classes_without_background = 6 + conv_box_predictor = ( + box_predictor_builder + .build_weight_shared_convolutional_keras_box_predictor( + is_training=False, + num_classes=num_classes_without_background, + conv_hyperparams=self._build_conv_hyperparams(), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + num_predictions_per_location_list=[5, 5], + depth=32, + num_layers_before_predictor=1, + box_code_size=4)) + def graph_fn(image_features1, image_features2): - conv_box_predictor = ( - box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor( - is_training=False, - num_classes=num_classes_without_background, - conv_hyperparams=self._build_conv_hyperparams(), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - num_predictions_per_location_list=[5, 5], - depth=32, - num_layers_before_predictor=1, - box_code_size=4)) box_predictions = conv_box_predictor([image_features1, image_features2]) box_encodings = tf.concat( box_predictions[box_predictor.BOX_ENCODINGS], axis=1) @@ -401,20 +404,21 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): def test_get_multi_class_predictions_from_feature_maps_of_different_depth( self): - num_classes_without_background = 6 + conv_box_predictor = ( + box_predictor_builder + .build_weight_shared_convolutional_keras_box_predictor( + is_training=False, + num_classes=num_classes_without_background, + conv_hyperparams=self._build_conv_hyperparams(), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + num_predictions_per_location_list=[5, 5, 5], + depth=32, + num_layers_before_predictor=1, + box_code_size=4)) + def graph_fn(image_features1, image_features2, image_features3): - conv_box_predictor = ( - box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor( - is_training=False, - num_classes=num_classes_without_background, - conv_hyperparams=self._build_conv_hyperparams(), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - num_predictions_per_location_list=[5, 5, 5], - depth=32, - num_layers_before_predictor=1, - box_code_size=4)) box_predictions = conv_box_predictor( [image_features1, image_features2, image_features3]) box_encodings = tf.concat( @@ -435,20 +439,25 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): def test_predictions_multiple_feature_maps_share_weights_separate_batchnorm( self): + tf.keras.backend.clear_session() num_classes_without_background = 6 + conv_box_predictor = ( + box_predictor_builder + .build_weight_shared_convolutional_keras_box_predictor( + is_training=False, + num_classes=num_classes_without_background, + conv_hyperparams=self._build_conv_hyperparams(), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + num_predictions_per_location_list=[5, 5], + depth=32, + num_layers_before_predictor=2, + box_code_size=4)) + variables = [] + def graph_fn(image_features1, image_features2): - conv_box_predictor = ( - box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor( - is_training=False, - num_classes=num_classes_without_background, - conv_hyperparams=self._build_conv_hyperparams(), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - num_predictions_per_location_list=[5, 5], - depth=32, - num_layers_before_predictor=2, - box_code_size=4)) box_predictions = conv_box_predictor([image_features1, image_features2]) + variables.extend(list(conv_box_predictor.variables)) box_encodings = tf.concat( box_predictions[box_predictor.BOX_ENCODINGS], axis=1) class_predictions_with_background = tf.concat( @@ -456,25 +465,41 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): axis=1) return (box_encodings, class_predictions_with_background) - with self.test_session(graph=tf.Graph()): - graph_fn(tf.random_uniform([4, 32, 32, 3], dtype=tf.float32), - tf.random_uniform([4, 16, 16, 3], dtype=tf.float32)) - actual_variable_set = set( - [var.op.name for var in tf.trainable_variables()]) + self.execute(graph_fn, [ + np.random.rand(4, 32, 32, 3).astype(np.float32), + np.random.rand(4, 16, 16, 3).astype(np.float32) + ]) + actual_variable_set = set([var.name.split(':')[0] for var in variables]) expected_variable_set = set([ # Box prediction tower ('WeightSharedConvolutionalBoxPredictor/' 'BoxPredictionTower/conv2d_0/kernel'), ('WeightSharedConvolutionalBoxPredictor/' 'BoxPredictionTower/conv2d_0/BatchNorm/feature_0/beta'), + ('WeightSharedConvolutionalBoxPredictor/' + 'BoxPredictionTower/conv2d_0/BatchNorm/feature_0/moving_mean'), + ('WeightSharedConvolutionalBoxPredictor/' + 'BoxPredictionTower/conv2d_0/BatchNorm/feature_0/moving_variance'), ('WeightSharedConvolutionalBoxPredictor/' 'BoxPredictionTower/conv2d_0/BatchNorm/feature_1/beta'), + ('WeightSharedConvolutionalBoxPredictor/' + 'BoxPredictionTower/conv2d_0/BatchNorm/feature_1/moving_mean'), + ('WeightSharedConvolutionalBoxPredictor/' + 'BoxPredictionTower/conv2d_0/BatchNorm/feature_1/moving_variance'), ('WeightSharedConvolutionalBoxPredictor/' 'BoxPredictionTower/conv2d_1/kernel'), ('WeightSharedConvolutionalBoxPredictor/' 'BoxPredictionTower/conv2d_1/BatchNorm/feature_0/beta'), + ('WeightSharedConvolutionalBoxPredictor/' + 'BoxPredictionTower/conv2d_1/BatchNorm/feature_0/moving_mean'), + ('WeightSharedConvolutionalBoxPredictor/' + 'BoxPredictionTower/conv2d_1/BatchNorm/feature_0/moving_variance'), ('WeightSharedConvolutionalBoxPredictor/' 'BoxPredictionTower/conv2d_1/BatchNorm/feature_1/beta'), + ('WeightSharedConvolutionalBoxPredictor/' + 'BoxPredictionTower/conv2d_1/BatchNorm/feature_1/moving_mean'), + ('WeightSharedConvolutionalBoxPredictor/' + 'BoxPredictionTower/conv2d_1/BatchNorm/feature_1/moving_variance'), # Box prediction head ('WeightSharedConvolutionalBoxPredictor/' 'WeightSharedConvolutionalBoxHead/BoxPredictor/kernel'), @@ -485,14 +510,30 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): 'ClassPredictionTower/conv2d_0/kernel'), ('WeightSharedConvolutionalBoxPredictor/' 'ClassPredictionTower/conv2d_0/BatchNorm/feature_0/beta'), + ('WeightSharedConvolutionalBoxPredictor/' + 'ClassPredictionTower/conv2d_0/BatchNorm/feature_0/moving_mean'), + ('WeightSharedConvolutionalBoxPredictor/' + 'ClassPredictionTower/conv2d_0/BatchNorm/feature_0/moving_variance'), ('WeightSharedConvolutionalBoxPredictor/' 'ClassPredictionTower/conv2d_0/BatchNorm/feature_1/beta'), + ('WeightSharedConvolutionalBoxPredictor/' + 'ClassPredictionTower/conv2d_0/BatchNorm/feature_1/moving_mean'), + ('WeightSharedConvolutionalBoxPredictor/' + 'ClassPredictionTower/conv2d_0/BatchNorm/feature_1/moving_variance'), ('WeightSharedConvolutionalBoxPredictor/' 'ClassPredictionTower/conv2d_1/kernel'), ('WeightSharedConvolutionalBoxPredictor/' 'ClassPredictionTower/conv2d_1/BatchNorm/feature_0/beta'), + ('WeightSharedConvolutionalBoxPredictor/' + 'ClassPredictionTower/conv2d_1/BatchNorm/feature_0/moving_mean'), + ('WeightSharedConvolutionalBoxPredictor/' + 'ClassPredictionTower/conv2d_1/BatchNorm/feature_0/moving_variance'), ('WeightSharedConvolutionalBoxPredictor/' 'ClassPredictionTower/conv2d_1/BatchNorm/feature_1/beta'), + ('WeightSharedConvolutionalBoxPredictor/' + 'ClassPredictionTower/conv2d_1/BatchNorm/feature_1/moving_mean'), + ('WeightSharedConvolutionalBoxPredictor/' + 'ClassPredictionTower/conv2d_1/BatchNorm/feature_1/moving_variance'), # Class prediction head ('WeightSharedConvolutionalBoxPredictor/' 'WeightSharedConvolutionalClassHead/ClassPredictor/kernel'), @@ -502,21 +543,26 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): def test_predictions_multiple_feature_maps_share_weights_without_batchnorm( self): + tf.keras.backend.clear_session() num_classes_without_background = 6 + conv_box_predictor = ( + box_predictor_builder + .build_weight_shared_convolutional_keras_box_predictor( + is_training=False, + num_classes=num_classes_without_background, + conv_hyperparams=self._build_conv_hyperparams(), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + num_predictions_per_location_list=[5, 5], + depth=32, + num_layers_before_predictor=2, + box_code_size=4, + apply_batch_norm=False)) + variables = [] + def graph_fn(image_features1, image_features2): - conv_box_predictor = ( - box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor( - is_training=False, - num_classes=num_classes_without_background, - conv_hyperparams=self._build_conv_hyperparams(), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - num_predictions_per_location_list=[5, 5], - depth=32, - num_layers_before_predictor=2, - box_code_size=4, - apply_batch_norm=False)) box_predictions = conv_box_predictor([image_features1, image_features2]) + variables.extend(list(conv_box_predictor.variables)) box_encodings = tf.concat( box_predictions[box_predictor.BOX_ENCODINGS], axis=1) class_predictions_with_background = tf.concat( @@ -524,11 +570,11 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): axis=1) return (box_encodings, class_predictions_with_background) - with self.test_session(graph=tf.Graph()): - graph_fn(tf.random_uniform([4, 32, 32, 3], dtype=tf.float32), - tf.random_uniform([4, 16, 16, 3], dtype=tf.float32)) - actual_variable_set = set( - [var.op.name for var in tf.trainable_variables()]) + self.execute(graph_fn, [ + np.random.rand(4, 32, 32, 3).astype(np.float32), + np.random.rand(4, 16, 16, 3).astype(np.float32) + ]) + actual_variable_set = set([var.name.split(':')[0] for var in variables]) expected_variable_set = set([ # Box prediction tower ('WeightSharedConvolutionalBoxPredictor/' @@ -562,23 +608,27 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): def test_predictions_multiple_feature_maps_share_weights_with_depthwise( self): + tf.keras.backend.clear_session() num_classes_without_background = 6 + conv_box_predictor = ( + box_predictor_builder + .build_weight_shared_convolutional_keras_box_predictor( + is_training=False, + num_classes=num_classes_without_background, + conv_hyperparams=self._build_conv_hyperparams(add_batch_norm=False), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + num_predictions_per_location_list=[5, 5], + depth=32, + num_layers_before_predictor=2, + box_code_size=4, + apply_batch_norm=False, + use_depthwise=True)) + variables = [] + def graph_fn(image_features1, image_features2): - conv_box_predictor = ( - box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor( - is_training=False, - num_classes=num_classes_without_background, - conv_hyperparams=self._build_conv_hyperparams( - add_batch_norm=False), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - num_predictions_per_location_list=[5, 5], - depth=32, - num_layers_before_predictor=2, - box_code_size=4, - apply_batch_norm=False, - use_depthwise=True)) box_predictions = conv_box_predictor([image_features1, image_features2]) + variables.extend(list(conv_box_predictor.variables)) box_encodings = tf.concat( box_predictions[box_predictor.BOX_ENCODINGS], axis=1) class_predictions_with_background = tf.concat( @@ -586,11 +636,11 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): axis=1) return (box_encodings, class_predictions_with_background) - with self.test_session(graph=tf.Graph()): - graph_fn(tf.random_uniform([4, 32, 32, 3], dtype=tf.float32), - tf.random_uniform([4, 16, 16, 3], dtype=tf.float32)) - actual_variable_set = set( - [var.op.name for var in tf.trainable_variables()]) + self.execute(graph_fn, [ + np.random.rand(4, 32, 32, 3).astype(np.float32), + np.random.rand(4, 16, 16, 3).astype(np.float32) + ]) + actual_variable_set = set([var.name.split(':')[0] for var in variables]) expected_variable_set = set([ # Box prediction tower ('WeightSharedConvolutionalBoxPredictor/' @@ -635,23 +685,27 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): self.assertEqual(expected_variable_set, actual_variable_set) def test_no_batchnorm_params_when_batchnorm_is_not_configured(self): + tf.keras.backend.clear_session() num_classes_without_background = 6 + conv_box_predictor = ( + box_predictor_builder + .build_weight_shared_convolutional_keras_box_predictor( + is_training=False, + num_classes=num_classes_without_background, + conv_hyperparams=self._build_conv_hyperparams(add_batch_norm=False), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + num_predictions_per_location_list=[5, 5], + depth=32, + num_layers_before_predictor=2, + box_code_size=4, + apply_batch_norm=False)) + variables = [] + def graph_fn(image_features1, image_features2): - conv_box_predictor = ( - box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor( - is_training=False, - num_classes=num_classes_without_background, - conv_hyperparams=self._build_conv_hyperparams( - add_batch_norm=False), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - num_predictions_per_location_list=[5, 5], - depth=32, - num_layers_before_predictor=2, - box_code_size=4, - apply_batch_norm=False)) box_predictions = conv_box_predictor( [image_features1, image_features2]) + variables.extend(list(conv_box_predictor.variables)) box_encodings = tf.concat( box_predictions[box_predictor.BOX_ENCODINGS], axis=1) class_predictions_with_background = tf.concat( @@ -659,11 +713,11 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): axis=1) return (box_encodings, class_predictions_with_background) - with self.test_session(graph=tf.Graph()): - graph_fn(tf.random_uniform([4, 32, 32, 3], dtype=tf.float32), - tf.random_uniform([4, 16, 16, 3], dtype=tf.float32)) - actual_variable_set = set( - [var.op.name for var in tf.trainable_variables()]) + self.execute(graph_fn, [ + np.random.rand(4, 32, 32, 3).astype(np.float32), + np.random.rand(4, 16, 16, 3).astype(np.float32) + ]) + actual_variable_set = set([var.name.split(':')[0] for var in variables]) expected_variable_set = set([ # Box prediction tower ('WeightSharedConvolutionalBoxPredictor/' @@ -697,22 +751,27 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): def test_predictions_share_weights_share_tower_separate_batchnorm( self): + tf.keras.backend.clear_session() num_classes_without_background = 6 + conv_box_predictor = ( + box_predictor_builder + .build_weight_shared_convolutional_keras_box_predictor( + is_training=False, + num_classes=num_classes_without_background, + conv_hyperparams=self._build_conv_hyperparams(), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + num_predictions_per_location_list=[5, 5], + depth=32, + num_layers_before_predictor=2, + box_code_size=4, + share_prediction_tower=True)) + variables = [] + def graph_fn(image_features1, image_features2): - conv_box_predictor = ( - box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor( - is_training=False, - num_classes=num_classes_without_background, - conv_hyperparams=self._build_conv_hyperparams(), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - num_predictions_per_location_list=[5, 5], - depth=32, - num_layers_before_predictor=2, - box_code_size=4, - share_prediction_tower=True)) box_predictions = conv_box_predictor( [image_features1, image_features2]) + variables.extend(list(conv_box_predictor.variables)) box_encodings = tf.concat( box_predictions[box_predictor.BOX_ENCODINGS], axis=1) class_predictions_with_background = tf.concat( @@ -720,11 +779,11 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): axis=1) return (box_encodings, class_predictions_with_background) - with self.test_session(graph=tf.Graph()): - graph_fn(tf.random_uniform([4, 32, 32, 3], dtype=tf.float32), - tf.random_uniform([4, 16, 16, 3], dtype=tf.float32)) - actual_variable_set = set( - [var.op.name for var in tf.trainable_variables()]) + self.execute(graph_fn, [ + np.random.rand(4, 32, 32, 3).astype(np.float32), + np.random.rand(4, 16, 16, 3).astype(np.float32) + ]) + actual_variable_set = set([var.name.split(':')[0] for var in variables]) expected_variable_set = set([ # Shared prediction tower ('WeightSharedConvolutionalBoxPredictor/' @@ -733,12 +792,28 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): 'PredictionTower/conv2d_0/BatchNorm/feature_0/beta'), ('WeightSharedConvolutionalBoxPredictor/' 'PredictionTower/conv2d_0/BatchNorm/feature_1/beta'), + ('WeightSharedConvolutionalBoxPredictor/' + 'PredictionTower/conv2d_0/BatchNorm/feature_0/moving_mean'), + ('WeightSharedConvolutionalBoxPredictor/' + 'PredictionTower/conv2d_0/BatchNorm/feature_1/moving_mean'), + ('WeightSharedConvolutionalBoxPredictor/' + 'PredictionTower/conv2d_0/BatchNorm/feature_0/moving_variance'), + ('WeightSharedConvolutionalBoxPredictor/' + 'PredictionTower/conv2d_0/BatchNorm/feature_1/moving_variance'), ('WeightSharedConvolutionalBoxPredictor/' 'PredictionTower/conv2d_1/kernel'), ('WeightSharedConvolutionalBoxPredictor/' 'PredictionTower/conv2d_1/BatchNorm/feature_0/beta'), ('WeightSharedConvolutionalBoxPredictor/' 'PredictionTower/conv2d_1/BatchNorm/feature_1/beta'), + ('WeightSharedConvolutionalBoxPredictor/' + 'PredictionTower/conv2d_1/BatchNorm/feature_0/moving_mean'), + ('WeightSharedConvolutionalBoxPredictor/' + 'PredictionTower/conv2d_1/BatchNorm/feature_1/moving_mean'), + ('WeightSharedConvolutionalBoxPredictor/' + 'PredictionTower/conv2d_1/BatchNorm/feature_0/moving_variance'), + ('WeightSharedConvolutionalBoxPredictor/' + 'PredictionTower/conv2d_1/BatchNorm/feature_1/moving_variance'), # Box prediction head ('WeightSharedConvolutionalBoxPredictor/' 'WeightSharedConvolutionalBoxHead/BoxPredictor/kernel'), @@ -753,24 +828,28 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): def test_predictions_share_weights_share_tower_without_batchnorm( self): + tf.keras.backend.clear_session() num_classes_without_background = 6 + conv_box_predictor = ( + box_predictor_builder + .build_weight_shared_convolutional_keras_box_predictor( + is_training=False, + num_classes=num_classes_without_background, + conv_hyperparams=self._build_conv_hyperparams(add_batch_norm=False), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + num_predictions_per_location_list=[5, 5], + depth=32, + num_layers_before_predictor=2, + box_code_size=4, + share_prediction_tower=True, + apply_batch_norm=False)) + variables = [] + def graph_fn(image_features1, image_features2): - conv_box_predictor = ( - box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor( - is_training=False, - num_classes=num_classes_without_background, - conv_hyperparams=self._build_conv_hyperparams( - add_batch_norm=False), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - num_predictions_per_location_list=[5, 5], - depth=32, - num_layers_before_predictor=2, - box_code_size=4, - share_prediction_tower=True, - apply_batch_norm=False)) box_predictions = conv_box_predictor( [image_features1, image_features2]) + variables.extend(list(conv_box_predictor.variables)) box_encodings = tf.concat( box_predictions[box_predictor.BOX_ENCODINGS], axis=1) class_predictions_with_background = tf.concat( @@ -778,11 +857,11 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): axis=1) return (box_encodings, class_predictions_with_background) - with self.test_session(graph=tf.Graph()): - graph_fn(tf.random_uniform([4, 32, 32, 3], dtype=tf.float32), - tf.random_uniform([4, 16, 16, 3], dtype=tf.float32)) - actual_variable_set = set( - [var.op.name for var in tf.trainable_variables()]) + self.execute(graph_fn, [ + np.random.rand(4, 32, 32, 3).astype(np.float32), + np.random.rand(4, 16, 16, 3).astype(np.float32) + ]) + actual_variable_set = set([var.name.split(':')[0] for var in variables]) expected_variable_set = set([ # Shared prediction tower ('WeightSharedConvolutionalBoxPredictor/' @@ -806,40 +885,6 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): self.assertEqual(expected_variable_set, actual_variable_set) - def test_get_predictions_with_feature_maps_of_dynamic_shape( - self): - image_features = tf.placeholder(dtype=tf.float32, shape=[4, None, None, 64]) - conv_box_predictor = ( - box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor( - is_training=False, - num_classes=0, - conv_hyperparams=self._build_conv_hyperparams(), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - num_predictions_per_location_list=[5], - depth=32, - num_layers_before_predictor=1, - box_code_size=4)) - box_predictions = conv_box_predictor([image_features]) - box_encodings = tf.concat(box_predictions[box_predictor.BOX_ENCODINGS], - axis=1) - objectness_predictions = tf.concat(box_predictions[ - box_predictor.CLASS_PREDICTIONS_WITH_BACKGROUND], axis=1) - init_op = tf.global_variables_initializer() - - resolution = 32 - expected_num_anchors = resolution*resolution*5 - with self.test_session() as sess: - sess.run(init_op) - (box_encodings_shape, - objectness_predictions_shape) = sess.run( - [tf.shape(box_encodings), tf.shape(objectness_predictions)], - feed_dict={image_features: - np.random.rand(4, resolution, resolution, 64)}) - self.assertAllEqual(box_encodings_shape, [4, expected_num_anchors, 4]) - self.assertAllEqual(objectness_predictions_shape, - [4, expected_num_anchors, 1]) - def test_other_heads_predictions(self): box_code_size = 4 num_classes_without_background = 3 @@ -847,37 +892,36 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase): mask_height = 5 mask_width = 5 num_predictions_per_location = 5 - + box_prediction_head = keras_box_head.WeightSharedConvolutionalBoxHead( + box_code_size=box_code_size, + conv_hyperparams=self._build_conv_hyperparams(), + num_predictions_per_location=num_predictions_per_location) + class_prediction_head = keras_class_head.WeightSharedConvolutionalClassHead( + num_class_slots=num_classes_without_background + 1, + conv_hyperparams=self._build_conv_hyperparams(), + num_predictions_per_location=num_predictions_per_location) + other_heads = { + other_head_name: + keras_mask_head.WeightSharedConvolutionalMaskHead( + num_classes=num_classes_without_background, + conv_hyperparams=self._build_conv_hyperparams(), + num_predictions_per_location=num_predictions_per_location, + mask_height=mask_height, + mask_width=mask_width) + } + + conv_box_predictor = box_predictor.WeightSharedConvolutionalBoxPredictor( + is_training=False, + num_classes=num_classes_without_background, + box_prediction_head=box_prediction_head, + class_prediction_head=class_prediction_head, + other_heads=other_heads, + conv_hyperparams=self._build_conv_hyperparams(), + freeze_batchnorm=False, + inplace_batchnorm_update=False, + depth=32, + num_layers_before_predictor=2) def graph_fn(image_features): - box_prediction_head = keras_box_head.WeightSharedConvolutionalBoxHead( - box_code_size=box_code_size, - conv_hyperparams=self._build_conv_hyperparams(), - num_predictions_per_location=num_predictions_per_location) - class_prediction_head = keras_class_head.WeightSharedConvolutionalClassHead( - num_class_slots=num_classes_without_background + 1, - conv_hyperparams=self._build_conv_hyperparams(), - num_predictions_per_location=num_predictions_per_location) - other_heads = { - other_head_name: - keras_mask_head.WeightSharedConvolutionalMaskHead( - num_classes=num_classes_without_background, - conv_hyperparams=self._build_conv_hyperparams(), - num_predictions_per_location=num_predictions_per_location, - mask_height=mask_height, - mask_width=mask_width) - } - - conv_box_predictor = box_predictor.WeightSharedConvolutionalBoxPredictor( - is_training=False, - num_classes=num_classes_without_background, - box_prediction_head=box_prediction_head, - class_prediction_head=class_prediction_head, - other_heads=other_heads, - conv_hyperparams=self._build_conv_hyperparams(), - freeze_batchnorm=False, - inplace_batchnorm_update=False, - depth=32, - num_layers_before_predictor=2) box_predictions = conv_box_predictor([image_features]) for key, value in box_predictions.items(): box_predictions[key] = tf.concat(value, axis=1) diff --git a/research/object_detection/predictors/heads/box_head_test.py b/research/object_detection/predictors/heads/box_head_tf1_test.py similarity index 94% rename from research/object_detection/predictors/heads/box_head_test.py rename to research/object_detection/predictors/heads/box_head_tf1_test.py index dd69115e8..ab534a2bd 100644 --- a/research/object_detection/predictors/heads/box_head_test.py +++ b/research/object_detection/predictors/heads/box_head_tf1_test.py @@ -14,6 +14,7 @@ # ============================================================================== """Tests for object_detection.predictors.heads.box_head.""" +import unittest import tensorflow.compat.v1 as tf from google.protobuf import text_format @@ -21,8 +22,10 @@ from object_detection.builders import hyperparams_builder from object_detection.predictors.heads import box_head from object_detection.protos import hyperparams_pb2 from object_detection.utils import test_case +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class MaskRCNNBoxHeadTest(test_case.TestCase): def _build_arg_scope_with_hyperparams(self, @@ -59,6 +62,7 @@ class MaskRCNNBoxHeadTest(test_case.TestCase): self.assertAllEqual([64, 1, 20, 4], prediction.get_shape().as_list()) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class ConvolutionalBoxPredictorTest(test_case.TestCase): def _build_arg_scope_with_hyperparams( @@ -92,6 +96,7 @@ class ConvolutionalBoxPredictorTest(test_case.TestCase): self.assertAllEqual([64, 323, 1, 4], box_encodings.get_shape().as_list()) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class WeightSharedConvolutionalBoxPredictorTest(test_case.TestCase): def _build_arg_scope_with_hyperparams( diff --git a/research/object_detection/predictors/heads/class_head_test.py b/research/object_detection/predictors/heads/class_head_tf1_test.py similarity index 96% rename from research/object_detection/predictors/heads/class_head_test.py rename to research/object_detection/predictors/heads/class_head_tf1_test.py index eaadcdc39..3dc8fb120 100644 --- a/research/object_detection/predictors/heads/class_head_test.py +++ b/research/object_detection/predictors/heads/class_head_tf1_test.py @@ -14,6 +14,7 @@ # ============================================================================== """Tests for object_detection.predictors.heads.class_head.""" +import unittest import tensorflow.compat.v1 as tf from google.protobuf import text_format @@ -21,8 +22,10 @@ from object_detection.builders import hyperparams_builder from object_detection.predictors.heads import class_head from object_detection.protos import hyperparams_pb2 from object_detection.utils import test_case +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class MaskRCNNClassHeadTest(test_case.TestCase): def _build_arg_scope_with_hyperparams(self, @@ -81,6 +84,7 @@ class MaskRCNNClassHeadTest(test_case.TestCase): self.assertSetEqual(expected_var_names, actual_variable_set) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class ConvolutionalClassPredictorTest(test_case.TestCase): def _build_arg_scope_with_hyperparams( @@ -140,6 +144,7 @@ class ConvolutionalClassPredictorTest(test_case.TestCase): self.assertSetEqual(expected_var_names, actual_variable_set) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class WeightSharedConvolutionalClassPredictorTest(test_case.TestCase): def _build_arg_scope_with_hyperparams( diff --git a/research/object_detection/predictors/heads/keras_box_head_test.py b/research/object_detection/predictors/heads/keras_box_head_tf2_test.py similarity index 67% rename from research/object_detection/predictors/heads/keras_box_head_test.py rename to research/object_detection/predictors/heads/keras_box_head_tf2_test.py index 1dcf7ce36..e9e8b8dcc 100644 --- a/research/object_detection/predictors/heads/keras_box_head_test.py +++ b/research/object_detection/predictors/heads/keras_box_head_tf2_test.py @@ -14,6 +14,7 @@ # ============================================================================== """Tests for object_detection.predictors.heads.box_head.""" +import unittest import tensorflow.compat.v1 as tf from google.protobuf import text_format @@ -21,8 +22,10 @@ from object_detection.builders import hyperparams_builder from object_detection.predictors.heads import keras_box_head from object_detection.protos import hyperparams_pb2 from object_detection.utils import test_case +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class ConvolutionalKerasBoxHeadTest(test_case.TestCase): def _build_conv_hyperparams(self): @@ -51,10 +54,13 @@ class ConvolutionalKerasBoxHeadTest(test_case.TestCase): freeze_batchnorm=False, num_predictions_per_location=1, use_depthwise=False) - image_feature = tf.random_uniform( - [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - box_encodings = box_prediction_head(image_feature) - self.assertAllEqual([64, 323, 1, 4], box_encodings.get_shape().as_list()) + def graph_fn(): + image_feature = tf.random_uniform( + [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) + box_encodings = box_prediction_head(image_feature) + return box_encodings + box_encodings = self.execute(graph_fn, []) + self.assertAllEqual([64, 323, 1, 4], box_encodings.shape) def test_prediction_size_depthwise_true(self): conv_hyperparams = self._build_conv_hyperparams() @@ -66,12 +72,16 @@ class ConvolutionalKerasBoxHeadTest(test_case.TestCase): freeze_batchnorm=False, num_predictions_per_location=1, use_depthwise=True) - image_feature = tf.random_uniform( - [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - box_encodings = box_prediction_head(image_feature) - self.assertAllEqual([64, 323, 1, 4], box_encodings.get_shape().as_list()) + def graph_fn(): + image_feature = tf.random_uniform( + [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) + box_encodings = box_prediction_head(image_feature) + return box_encodings + box_encodings = self.execute(graph_fn, []) + self.assertAllEqual([64, 323, 1, 4], box_encodings.shape) +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class MaskRCNNKerasBoxHeadTest(test_case.TestCase): def _build_fc_hyperparams( @@ -102,12 +112,16 @@ class MaskRCNNKerasBoxHeadTest(test_case.TestCase): dropout_keep_prob=0.5, box_code_size=4, share_box_across_classes=False) - roi_pooled_features = tf.random_uniform( - [64, 7, 7, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - prediction = box_prediction_head(roi_pooled_features) - self.assertAllEqual([64, 1, 20, 4], prediction.get_shape().as_list()) + def graph_fn(): + roi_pooled_features = tf.random_uniform( + [64, 7, 7, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) + prediction = box_prediction_head(roi_pooled_features) + return prediction + prediction = self.execute(graph_fn, []) + self.assertAllEqual([64, 1, 20, 4], prediction.shape) +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class WeightSharedConvolutionalKerasBoxHead(test_case.TestCase): def _build_conv_hyperparams(self): @@ -133,10 +147,13 @@ class WeightSharedConvolutionalKerasBoxHead(test_case.TestCase): conv_hyperparams=conv_hyperparams, num_predictions_per_location=1, use_depthwise=False) - image_feature = tf.random_uniform( - [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - box_encodings = box_prediction_head(image_feature) - self.assertAllEqual([64, 323, 4], box_encodings.get_shape().as_list()) + def graph_fn(): + image_feature = tf.random_uniform( + [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) + box_encodings = box_prediction_head(image_feature) + return box_encodings + box_encodings = self.execute(graph_fn, []) + self.assertAllEqual([64, 323, 4], box_encodings.shape) def test_prediction_size_depthwise_true(self): conv_hyperparams = self._build_conv_hyperparams() @@ -145,40 +162,38 @@ class WeightSharedConvolutionalKerasBoxHead(test_case.TestCase): conv_hyperparams=conv_hyperparams, num_predictions_per_location=1, use_depthwise=True) - image_feature = tf.random_uniform( - [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - box_encodings = box_prediction_head(image_feature) - self.assertAllEqual([64, 323, 4], box_encodings.get_shape().as_list()) - - def test_variable_count_depth_wise_true(self): - g = tf.Graph() - with g.as_default(): - conv_hyperparams = self._build_conv_hyperparams() - box_prediction_head = keras_box_head.WeightSharedConvolutionalBoxHead( - box_code_size=4, - conv_hyperparams=conv_hyperparams, - num_predictions_per_location=1, - use_depthwise=True) + def graph_fn(): image_feature = tf.random_uniform( [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - _ = box_prediction_head(image_feature) - variables = g.get_collection(tf.GraphKeys.GLOBAL_VARIABLES) - self.assertEqual(len(variables), 3) + box_encodings = box_prediction_head(image_feature) + return box_encodings + box_encodings = self.execute(graph_fn, []) + self.assertAllEqual([64, 323, 4], box_encodings.shape) + + def test_variable_count_depth_wise_true(self): + conv_hyperparams = self._build_conv_hyperparams() + box_prediction_head = keras_box_head.WeightSharedConvolutionalBoxHead( + box_code_size=4, + conv_hyperparams=conv_hyperparams, + num_predictions_per_location=1, + use_depthwise=True) + image_feature = tf.random_uniform( + [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) + box_prediction_head(image_feature) + self.assertEqual(len(box_prediction_head.variables), 3) def test_variable_count_depth_wise_False(self): - g = tf.Graph() - with g.as_default(): - conv_hyperparams = self._build_conv_hyperparams() - box_prediction_head = keras_box_head.WeightSharedConvolutionalBoxHead( - box_code_size=4, - conv_hyperparams=conv_hyperparams, - num_predictions_per_location=1, - use_depthwise=False) - image_feature = tf.random_uniform( - [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - _ = box_prediction_head(image_feature) - variables = g.get_collection(tf.GraphKeys.GLOBAL_VARIABLES) - self.assertEqual(len(variables), 2) + conv_hyperparams = self._build_conv_hyperparams() + box_prediction_head = keras_box_head.WeightSharedConvolutionalBoxHead( + box_code_size=4, + conv_hyperparams=conv_hyperparams, + num_predictions_per_location=1, + use_depthwise=False) + image_feature = tf.random_uniform( + [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) + box_prediction_head(image_feature) + self.assertEqual(len(box_prediction_head.variables), 2) + if __name__ == '__main__': tf.test.main() diff --git a/research/object_detection/predictors/heads/keras_class_head_test.py b/research/object_detection/predictors/heads/keras_class_head_tf2_test.py similarity index 66% rename from research/object_detection/predictors/heads/keras_class_head_test.py rename to research/object_detection/predictors/heads/keras_class_head_tf2_test.py index 4a25efc3e..aa890ce52 100644 --- a/research/object_detection/predictors/heads/keras_class_head_test.py +++ b/research/object_detection/predictors/heads/keras_class_head_tf2_test.py @@ -14,6 +14,7 @@ # ============================================================================== """Tests for object_detection.predictors.heads.class_head.""" +import unittest import tensorflow.compat.v1 as tf from google.protobuf import text_format @@ -21,8 +22,10 @@ from object_detection.builders import hyperparams_builder from object_detection.predictors.heads import keras_class_head from object_detection.protos import hyperparams_pb2 from object_detection.utils import test_case +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class ConvolutionalKerasClassPredictorTest(test_case.TestCase): def _build_conv_hyperparams(self): @@ -53,11 +56,13 @@ class ConvolutionalKerasClassPredictorTest(test_case.TestCase): freeze_batchnorm=False, num_predictions_per_location=1, use_depthwise=False) - image_feature = tf.random_uniform( - [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - class_predictions = class_prediction_head(image_feature,) - self.assertAllEqual([64, 323, 20], - class_predictions.get_shape().as_list()) + def graph_fn(): + image_feature = tf.random_uniform( + [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) + class_predictions = class_prediction_head(image_feature,) + return class_predictions + class_predictions = self.execute(graph_fn, []) + self.assertAllEqual([64, 323, 20], class_predictions.shape) def test_prediction_size_depthwise_true(self): conv_hyperparams = self._build_conv_hyperparams() @@ -71,13 +76,16 @@ class ConvolutionalKerasClassPredictorTest(test_case.TestCase): freeze_batchnorm=False, num_predictions_per_location=1, use_depthwise=True) - image_feature = tf.random_uniform( - [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - class_predictions = class_prediction_head(image_feature,) - self.assertAllEqual([64, 323, 20], - class_predictions.get_shape().as_list()) + def graph_fn(): + image_feature = tf.random_uniform( + [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) + class_predictions = class_prediction_head(image_feature,) + return class_predictions + class_predictions = self.execute(graph_fn, []) + self.assertAllEqual([64, 323, 20], class_predictions.shape) +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class MaskRCNNClassHeadTest(test_case.TestCase): def _build_fc_hyperparams(self, @@ -106,12 +114,16 @@ class MaskRCNNClassHeadTest(test_case.TestCase): freeze_batchnorm=False, use_dropout=True, dropout_keep_prob=0.5) - roi_pooled_features = tf.random_uniform( - [64, 7, 7, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - prediction = class_prediction_head(roi_pooled_features) - self.assertAllEqual([64, 1, 20], prediction.get_shape().as_list()) + def graph_fn(): + roi_pooled_features = tf.random_uniform( + [64, 7, 7, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) + prediction = class_prediction_head(roi_pooled_features) + return prediction + prediction = self.execute(graph_fn, []) + self.assertAllEqual([64, 1, 20], prediction.shape) +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class WeightSharedConvolutionalKerasClassPredictorTest(test_case.TestCase): def _build_conv_hyperparams(self): @@ -137,10 +149,13 @@ class WeightSharedConvolutionalKerasClassPredictorTest(test_case.TestCase): conv_hyperparams=conv_hyperparams, num_predictions_per_location=1, use_depthwise=False) - image_feature = tf.random_uniform( - [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - class_predictions = class_prediction_head(image_feature) - self.assertAllEqual([64, 323, 20], class_predictions.get_shape().as_list()) + def graph_fn(): + image_feature = tf.random_uniform( + [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) + class_predictions = class_prediction_head(image_feature) + return class_predictions + class_predictions = self.execute(graph_fn, []) + self.assertAllEqual([64, 323, 20], class_predictions.shape) def test_prediction_size_depthwise_true(self): conv_hyperparams = self._build_conv_hyperparams() @@ -149,42 +164,39 @@ class WeightSharedConvolutionalKerasClassPredictorTest(test_case.TestCase): conv_hyperparams=conv_hyperparams, num_predictions_per_location=1, use_depthwise=True) - image_feature = tf.random_uniform( - [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - class_predictions = class_prediction_head(image_feature) - self.assertAllEqual([64, 323, 20], class_predictions.get_shape().as_list()) - - def test_variable_count_depth_wise_true(self): - g = tf.Graph() - with g.as_default(): - conv_hyperparams = self._build_conv_hyperparams() - class_prediction_head = ( - keras_class_head.WeightSharedConvolutionalClassHead( - num_class_slots=20, - conv_hyperparams=conv_hyperparams, - num_predictions_per_location=1, - use_depthwise=True)) + def graph_fn(): image_feature = tf.random_uniform( [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - _ = class_prediction_head(image_feature) - variables = g.get_collection(tf.GraphKeys.GLOBAL_VARIABLES) - self.assertEqual(len(variables), 3) + class_predictions = class_prediction_head(image_feature) + return class_predictions + class_predictions = self.execute(graph_fn, []) + self.assertAllEqual([64, 323, 20], class_predictions.shape) + + def test_variable_count_depth_wise_true(self): + conv_hyperparams = self._build_conv_hyperparams() + class_prediction_head = ( + keras_class_head.WeightSharedConvolutionalClassHead( + num_class_slots=20, + conv_hyperparams=conv_hyperparams, + num_predictions_per_location=1, + use_depthwise=True)) + image_feature = tf.random_uniform( + [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) + class_prediction_head(image_feature) + self.assertEqual(len(class_prediction_head.variables), 3) def test_variable_count_depth_wise_False(self): - g = tf.Graph() - with g.as_default(): - conv_hyperparams = self._build_conv_hyperparams() - class_prediction_head = ( - keras_class_head.WeightSharedConvolutionalClassHead( - num_class_slots=20, - conv_hyperparams=conv_hyperparams, - num_predictions_per_location=1, - use_depthwise=False)) - image_feature = tf.random_uniform( - [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - _ = class_prediction_head(image_feature) - variables = g.get_collection(tf.GraphKeys.GLOBAL_VARIABLES) - self.assertEqual(len(variables), 2) + conv_hyperparams = self._build_conv_hyperparams() + class_prediction_head = ( + keras_class_head.WeightSharedConvolutionalClassHead( + num_class_slots=20, + conv_hyperparams=conv_hyperparams, + num_predictions_per_location=1, + use_depthwise=False)) + image_feature = tf.random_uniform( + [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) + class_prediction_head(image_feature) + self.assertEqual(len(class_prediction_head.variables), 2) if __name__ == '__main__': diff --git a/research/object_detection/predictors/heads/keras_mask_head_test.py b/research/object_detection/predictors/heads/keras_mask_head_tf2_test.py similarity index 67% rename from research/object_detection/predictors/heads/keras_mask_head_test.py rename to research/object_detection/predictors/heads/keras_mask_head_tf2_test.py index 4cdce7a1c..5465be06f 100644 --- a/research/object_detection/predictors/heads/keras_mask_head_test.py +++ b/research/object_detection/predictors/heads/keras_mask_head_tf2_test.py @@ -14,6 +14,7 @@ # ============================================================================== """Tests for object_detection.predictors.heads.mask_head.""" +import unittest import tensorflow.compat.v1 as tf from google.protobuf import text_format @@ -21,8 +22,10 @@ from object_detection.builders import hyperparams_builder from object_detection.predictors.heads import keras_mask_head from object_detection.protos import hyperparams_pb2 from object_detection.utils import test_case +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class ConvolutionalMaskPredictorTest(test_case.TestCase): def _build_conv_hyperparams(self): @@ -55,11 +58,13 @@ class ConvolutionalMaskPredictorTest(test_case.TestCase): use_depthwise=False, mask_height=7, mask_width=7) - image_feature = tf.random_uniform( - [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - mask_predictions = mask_prediction_head(image_feature) - self.assertAllEqual([64, 323, 20, 7, 7], - mask_predictions.get_shape().as_list()) + def graph_fn(): + image_feature = tf.random_uniform( + [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) + mask_predictions = mask_prediction_head(image_feature) + return mask_predictions + mask_predictions = self.execute(graph_fn, []) + self.assertAllEqual([64, 323, 20, 7, 7], mask_predictions.shape) def test_prediction_size_use_depthwise_true(self): conv_hyperparams = self._build_conv_hyperparams() @@ -75,11 +80,13 @@ class ConvolutionalMaskPredictorTest(test_case.TestCase): use_depthwise=True, mask_height=7, mask_width=7) - image_feature = tf.random_uniform( - [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - mask_predictions = mask_prediction_head(image_feature) - self.assertAllEqual([64, 323, 20, 7, 7], - mask_predictions.get_shape().as_list()) + def graph_fn(): + image_feature = tf.random_uniform( + [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) + mask_predictions = mask_prediction_head(image_feature) + return mask_predictions + mask_predictions = self.execute(graph_fn, []) + self.assertAllEqual([64, 323, 20, 7, 7], mask_predictions.shape) def test_class_agnostic_prediction_size_use_depthwise_false(self): conv_hyperparams = self._build_conv_hyperparams() @@ -96,11 +103,13 @@ class ConvolutionalMaskPredictorTest(test_case.TestCase): mask_height=7, mask_width=7, masks_are_class_agnostic=True) - image_feature = tf.random_uniform( - [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - mask_predictions = mask_prediction_head(image_feature) - self.assertAllEqual([64, 323, 1, 7, 7], - mask_predictions.get_shape().as_list()) + def graph_fn(): + image_feature = tf.random_uniform( + [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) + mask_predictions = mask_prediction_head(image_feature) + return mask_predictions + mask_predictions = self.execute(graph_fn, []) + self.assertAllEqual([64, 323, 1, 7, 7], mask_predictions.shape) def test_class_agnostic_prediction_size_use_depthwise_true(self): conv_hyperparams = self._build_conv_hyperparams() @@ -117,13 +126,16 @@ class ConvolutionalMaskPredictorTest(test_case.TestCase): mask_height=7, mask_width=7, masks_are_class_agnostic=True) - image_feature = tf.random_uniform( - [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - mask_predictions = mask_prediction_head(image_feature) - self.assertAllEqual([64, 323, 1, 7, 7], - mask_predictions.get_shape().as_list()) + def graph_fn(): + image_feature = tf.random_uniform( + [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) + mask_predictions = mask_prediction_head(image_feature) + return mask_predictions + mask_predictions = self.execute(graph_fn, []) + self.assertAllEqual([64, 323, 1, 7, 7], mask_predictions.shape) +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class MaskRCNNMaskHeadTest(test_case.TestCase): def _build_conv_hyperparams(self, @@ -155,10 +167,13 @@ class MaskRCNNMaskHeadTest(test_case.TestCase): mask_prediction_num_conv_layers=2, mask_prediction_conv_depth=256, masks_are_class_agnostic=False) - roi_pooled_features = tf.random_uniform( - [64, 7, 7, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - prediction = mask_prediction_head(roi_pooled_features) - self.assertAllEqual([64, 1, 20, 14, 14], prediction.get_shape().as_list()) + def graph_fn(): + roi_pooled_features = tf.random_uniform( + [64, 7, 7, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) + prediction = mask_prediction_head(roi_pooled_features) + return prediction + prediction = self.execute(graph_fn, []) + self.assertAllEqual([64, 1, 20, 14, 14], prediction.shape) def test_prediction_size_with_convolve_then_upsample(self): mask_prediction_head = keras_mask_head.MaskRCNNMaskHead( @@ -172,12 +187,16 @@ class MaskRCNNMaskHeadTest(test_case.TestCase): mask_prediction_conv_depth=256, masks_are_class_agnostic=True, convolve_then_upsample=True) - roi_pooled_features = tf.random_uniform( - [64, 14, 14, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - prediction = mask_prediction_head(roi_pooled_features) - self.assertAllEqual([64, 1, 1, 28, 28], prediction.get_shape().as_list()) + def graph_fn(): + roi_pooled_features = tf.random_uniform( + [64, 14, 14, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) + prediction = mask_prediction_head(roi_pooled_features) + return prediction + prediction = self.execute(graph_fn, []) + self.assertAllEqual([64, 1, 1, 28, 28], prediction.shape) +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class WeightSharedConvolutionalMaskPredictorTest(test_case.TestCase): def _build_conv_hyperparams(self): @@ -204,11 +223,13 @@ class WeightSharedConvolutionalMaskPredictorTest(test_case.TestCase): conv_hyperparams=self._build_conv_hyperparams(), mask_height=7, mask_width=7)) - image_feature = tf.random_uniform( - [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - mask_predictions = mask_prediction_head(image_feature) - self.assertAllEqual([64, 323, 20, 7, 7], - mask_predictions.get_shape().as_list()) + def graph_fn(): + image_feature = tf.random_uniform( + [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) + mask_predictions = mask_prediction_head(image_feature) + return mask_predictions + mask_predictions = self.execute(graph_fn, []) + self.assertAllEqual([64, 323, 20, 7, 7], mask_predictions.shape) def test_class_agnostic_prediction_size(self): mask_prediction_head = ( @@ -219,11 +240,13 @@ class WeightSharedConvolutionalMaskPredictorTest(test_case.TestCase): mask_height=7, mask_width=7, masks_are_class_agnostic=True)) - image_feature = tf.random_uniform( - [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) - mask_predictions = mask_prediction_head(image_feature) - self.assertAllEqual([64, 323, 1, 7, 7], - mask_predictions.get_shape().as_list()) + def graph_fn(): + image_feature = tf.random_uniform( + [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32) + mask_predictions = mask_prediction_head(image_feature) + return mask_predictions + mask_predictions = self.execute(graph_fn, []) + self.assertAllEqual([64, 323, 1, 7, 7], mask_predictions.shape) if __name__ == '__main__': tf.test.main() diff --git a/research/object_detection/predictors/heads/keypoint_head_test.py b/research/object_detection/predictors/heads/keypoint_head_tf1_test.py similarity index 94% rename from research/object_detection/predictors/heads/keypoint_head_test.py rename to research/object_detection/predictors/heads/keypoint_head_tf1_test.py index 0dc4c6f73..828174989 100644 --- a/research/object_detection/predictors/heads/keypoint_head_test.py +++ b/research/object_detection/predictors/heads/keypoint_head_tf1_test.py @@ -14,6 +14,7 @@ # ============================================================================== """Tests for object_detection.predictors.heads.keypoint_head.""" +import unittest import tensorflow.compat.v1 as tf from google.protobuf import text_format @@ -21,8 +22,10 @@ from object_detection.builders import hyperparams_builder from object_detection.predictors.heads import keypoint_head from object_detection.protos import hyperparams_pb2 from object_detection.utils import test_case +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class MaskRCNNKeypointHeadTest(test_case.TestCase): def _build_arg_scope_with_hyperparams(self, diff --git a/research/object_detection/predictors/heads/mask_head_test.py b/research/object_detection/predictors/heads/mask_head_tf1_test.py similarity index 96% rename from research/object_detection/predictors/heads/mask_head_test.py rename to research/object_detection/predictors/heads/mask_head_tf1_test.py index d3bd6819d..152394836 100644 --- a/research/object_detection/predictors/heads/mask_head_test.py +++ b/research/object_detection/predictors/heads/mask_head_tf1_test.py @@ -14,6 +14,7 @@ # ============================================================================== """Tests for object_detection.predictors.heads.mask_head.""" +import unittest import tensorflow.compat.v1 as tf from google.protobuf import text_format @@ -21,8 +22,10 @@ from object_detection.builders import hyperparams_builder from object_detection.predictors.heads import mask_head from object_detection.protos import hyperparams_pb2 from object_detection.utils import test_case +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class MaskRCNNMaskHeadTest(test_case.TestCase): def _build_arg_scope_with_hyperparams(self, @@ -75,6 +78,7 @@ class MaskRCNNMaskHeadTest(test_case.TestCase): self.assertAllEqual([64, 1, 1, 28, 28], prediction.get_shape().as_list()) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class ConvolutionalMaskPredictorTest(test_case.TestCase): def _build_arg_scope_with_hyperparams( @@ -131,6 +135,7 @@ class ConvolutionalMaskPredictorTest(test_case.TestCase): mask_predictions.get_shape().as_list()) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class WeightSharedConvolutionalMaskPredictorTest(test_case.TestCase): def _build_arg_scope_with_hyperparams( diff --git a/research/object_detection/predictors/mask_rcnn_box_predictor_test.py b/research/object_detection/predictors/mask_rcnn_box_predictor_tf1_test.py similarity index 97% rename from research/object_detection/predictors/mask_rcnn_box_predictor_test.py rename to research/object_detection/predictors/mask_rcnn_box_predictor_tf1_test.py index 4733e7a5f..d9a4bcbbf 100644 --- a/research/object_detection/predictors/mask_rcnn_box_predictor_test.py +++ b/research/object_detection/predictors/mask_rcnn_box_predictor_tf1_test.py @@ -14,6 +14,7 @@ # ============================================================================== """Tests for object_detection.predictors.mask_rcnn_box_predictor.""" +import unittest import numpy as np import tensorflow.compat.v1 as tf @@ -23,8 +24,10 @@ from object_detection.builders import hyperparams_builder from object_detection.predictors import mask_rcnn_box_predictor as box_predictor from object_detection.protos import hyperparams_pb2 from object_detection.utils import test_case +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class MaskRCNNBoxPredictorTest(test_case.TestCase): def _build_arg_scope_with_hyperparams(self, diff --git a/research/object_detection/predictors/mask_rcnn_keras_box_predictor_test.py b/research/object_detection/predictors/mask_rcnn_keras_box_predictor_tf2_test.py similarity index 76% rename from research/object_detection/predictors/mask_rcnn_keras_box_predictor_test.py rename to research/object_detection/predictors/mask_rcnn_keras_box_predictor_tf2_test.py index fbffe44e2..a92db9e90 100644 --- a/research/object_detection/predictors/mask_rcnn_keras_box_predictor_test.py +++ b/research/object_detection/predictors/mask_rcnn_keras_box_predictor_tf2_test.py @@ -14,6 +14,7 @@ # ============================================================================== """Tests for object_detection.predictors.mask_rcnn_box_predictor.""" +import unittest import numpy as np import tensorflow.compat.v1 as tf @@ -23,8 +24,10 @@ from object_detection.builders import hyperparams_builder from object_detection.predictors import mask_rcnn_keras_box_predictor as box_predictor from object_detection.protos import hyperparams_pb2 from object_detection.utils import test_case +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class MaskRCNNKerasBoxPredictorTest(test_case.TestCase): def _build_hyperparams(self, @@ -46,17 +49,17 @@ class MaskRCNNKerasBoxPredictorTest(test_case.TestCase): return hyperparams_builder.KerasLayerHyperparams(hyperparams) def test_get_boxes_with_five_classes(self): + mask_box_predictor = ( + box_predictor_builder.build_mask_rcnn_keras_box_predictor( + is_training=False, + num_classes=5, + fc_hyperparams=self._build_hyperparams(), + freeze_batchnorm=False, + use_dropout=False, + dropout_keep_prob=0.5, + box_code_size=4, + )) def graph_fn(image_features): - mask_box_predictor = ( - box_predictor_builder.build_mask_rcnn_keras_box_predictor( - is_training=False, - num_classes=5, - fc_hyperparams=self._build_hyperparams(), - freeze_batchnorm=False, - use_dropout=False, - dropout_keep_prob=0.5, - box_code_size=4, - )) box_predictions = mask_box_predictor( [image_features], prediction_stage=2) @@ -70,18 +73,19 @@ class MaskRCNNKerasBoxPredictorTest(test_case.TestCase): self.assertAllEqual(class_predictions_with_background.shape, [2, 1, 6]) def test_get_boxes_with_five_classes_share_box_across_classes(self): + mask_box_predictor = ( + box_predictor_builder.build_mask_rcnn_keras_box_predictor( + is_training=False, + num_classes=5, + fc_hyperparams=self._build_hyperparams(), + freeze_batchnorm=False, + use_dropout=False, + dropout_keep_prob=0.5, + box_code_size=4, + share_box_across_classes=True + )) def graph_fn(image_features): - mask_box_predictor = ( - box_predictor_builder.build_mask_rcnn_keras_box_predictor( - is_training=False, - num_classes=5, - fc_hyperparams=self._build_hyperparams(), - freeze_batchnorm=False, - use_dropout=False, - dropout_keep_prob=0.5, - box_code_size=4, - share_box_across_classes=True - )) + box_predictions = mask_box_predictor( [image_features], prediction_stage=2) @@ -95,19 +99,19 @@ class MaskRCNNKerasBoxPredictorTest(test_case.TestCase): self.assertAllEqual(class_predictions_with_background.shape, [2, 1, 6]) def test_get_instance_masks(self): + mask_box_predictor = ( + box_predictor_builder.build_mask_rcnn_keras_box_predictor( + is_training=False, + num_classes=5, + fc_hyperparams=self._build_hyperparams(), + freeze_batchnorm=False, + use_dropout=False, + dropout_keep_prob=0.5, + box_code_size=4, + conv_hyperparams=self._build_hyperparams( + op_type=hyperparams_pb2.Hyperparams.CONV), + predict_instance_masks=True)) def graph_fn(image_features): - mask_box_predictor = ( - box_predictor_builder.build_mask_rcnn_keras_box_predictor( - is_training=False, - num_classes=5, - fc_hyperparams=self._build_hyperparams(), - freeze_batchnorm=False, - use_dropout=False, - dropout_keep_prob=0.5, - box_code_size=4, - conv_hyperparams=self._build_hyperparams( - op_type=hyperparams_pb2.Hyperparams.CONV), - predict_instance_masks=True)) box_predictions = mask_box_predictor( [image_features], prediction_stage=3) diff --git a/research/object_detection/predictors/rfcn_box_predictor_test.py b/research/object_detection/predictors/rfcn_box_predictor_tf1_test.py similarity index 95% rename from research/object_detection/predictors/rfcn_box_predictor_test.py rename to research/object_detection/predictors/rfcn_box_predictor_tf1_test.py index 7a484c085..555c4b2ad 100644 --- a/research/object_detection/predictors/rfcn_box_predictor_test.py +++ b/research/object_detection/predictors/rfcn_box_predictor_tf1_test.py @@ -14,6 +14,7 @@ # ============================================================================== """Tests for object_detection.predictors.rfcn_box_predictor.""" +import unittest import numpy as np import tensorflow.compat.v1 as tf @@ -22,8 +23,10 @@ from object_detection.builders import hyperparams_builder from object_detection.predictors import rfcn_box_predictor as box_predictor from object_detection.protos import hyperparams_pb2 from object_detection.utils import test_case +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class RfcnBoxPredictorTest(test_case.TestCase): def _build_arg_scope_with_conv_hyperparams(self): diff --git a/research/object_detection/predictors/rfcn_keras_box_predictor_test.py b/research/object_detection/predictors/rfcn_keras_box_predictor_tf2_test.py similarity index 85% rename from research/object_detection/predictors/rfcn_keras_box_predictor_test.py rename to research/object_detection/predictors/rfcn_keras_box_predictor_tf2_test.py index d8cc01e4b..f845068e3 100644 --- a/research/object_detection/predictors/rfcn_keras_box_predictor_test.py +++ b/research/object_detection/predictors/rfcn_keras_box_predictor_tf2_test.py @@ -14,6 +14,7 @@ # ============================================================================== """Tests for object_detection.predictors.rfcn_box_predictor.""" +import unittest import numpy as np import tensorflow.compat.v1 as tf @@ -22,8 +23,10 @@ from object_detection.builders import hyperparams_builder from object_detection.predictors import rfcn_keras_box_predictor as box_predictor from object_detection.protos import hyperparams_pb2 from object_detection.utils import test_case +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class RfcnKerasBoxPredictorTest(test_case.TestCase): def _build_conv_hyperparams(self): @@ -42,18 +45,17 @@ class RfcnKerasBoxPredictorTest(test_case.TestCase): return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams) def test_get_correct_box_encoding_and_class_prediction_shapes(self): - + rfcn_box_predictor = box_predictor.RfcnKerasBoxPredictor( + is_training=False, + num_classes=2, + conv_hyperparams=self._build_conv_hyperparams(), + freeze_batchnorm=False, + num_spatial_bins=[3, 3], + depth=4, + crop_size=[12, 12], + box_code_size=4) def graph_fn(image_features, proposal_boxes): - rfcn_box_predictor = box_predictor.RfcnKerasBoxPredictor( - is_training=False, - num_classes=2, - conv_hyperparams=self._build_conv_hyperparams(), - freeze_batchnorm=False, - num_spatial_bins=[3, 3], - depth=4, - crop_size=[12, 12], - box_code_size=4 - ) + box_predictions = rfcn_box_predictor( [image_features], proposal_boxes=proposal_boxes) diff --git a/research/object_detection/protos/center_net.proto b/research/object_detection/protos/center_net.proto new file mode 100644 index 000000000..5047c000f --- /dev/null +++ b/research/object_detection/protos/center_net.proto @@ -0,0 +1,203 @@ +syntax = "proto2"; + +package object_detection.protos; + +import "object_detection/protos/image_resizer.proto"; +import "object_detection/protos/losses.proto"; + +// Configuration for the CenterNet meta architecture from the "Objects as +// Points" paper [1] +// [1]: https://arxiv.org/abs/1904.07850 + +message CenterNet { + // Number of classes to predict. + optional int32 num_classes = 1; + + // Feature extractor config. + optional CenterNetFeatureExtractor feature_extractor = 2; + + // Image resizer for preprocessing the input image. + optional ImageResizer image_resizer = 3; + + // Parameters which are related to object detection task. + message ObjectDetection { + // The original fields are moved to ObjectCenterParams or deleted. + reserved 2, 5, 6, 7; + + // Weight of the task loss. The total loss of the model will be the + // summation of task losses weighted by the weights. + optional float task_loss_weight = 1 [default = 1.0]; + + // Weight for the offset localization loss. + optional float offset_loss_weight = 3 [default = 1.0]; + + // Weight for the height/width localization loss. + optional float scale_loss_weight = 4 [default = 0.1]; + + // Localization loss configuration for object scale and offset losses. + optional LocalizationLoss localization_loss = 8; + } + optional ObjectDetection object_detection_task = 4; + + // Parameters related to object center prediction. This is required for both + // object detection and keypoint estimation tasks. + message ObjectCenterParams { + // Weight for the object center loss. + optional float object_center_loss_weight = 1 [default = 1.0]; + + // Classification loss configuration for object center loss. + optional ClassificationLoss classification_loss = 2; + + // The initial bias value of the convlution kernel of the class heatmap + // prediction head. -2.19 corresponds to predicting foreground with + // a probability of 0.1. See "Focal Loss for Dense Object Detection" + // at https://arxiv.org/abs/1708.02002. + optional float heatmap_bias_init = 3 [default = -2.19]; + + // The minimum IOU overlap boxes need to have to not be penalized. + optional float min_box_overlap_iou = 4 [default = 0.7]; + + // Maximum number of boxes to predict. + optional int32 max_box_predictions = 5 [default = 100]; + + // If set, loss is only computed for the labeled classes. + optional bool use_labeled_classes = 6 [default = false]; + } + optional ObjectCenterParams object_center_params = 5; + + // Path of the file that conatins the label map along with the keypoint + // information, including the keypoint indices, corresponding labels, and the + // corresponding class. The file should be the same one as used in the input + // pipeline. Note that a plain text of StringIntLabelMap proto is expected in + // this file. + // It is required only if the keypoint estimation task is specified. + optional string keypoint_label_map_path = 6; + + // Parameters which are related to keypoint estimation task. + message KeypointEstimation { + // Name of the task, e.g. "human pose". Note that the task name should be + // unique to each keypoint task. + optional string task_name = 1; + + // Weight of the task loss. The total loss of the model will be their + // summation of task losses weighted by the weights. + optional float task_loss_weight = 2 [default = 1.0]; + + // Loss configuration for keypoint heatmap, offset, regression losses. Note + // that the localization loss is used for offset/regression losses and + // classification loss is used for heatmap loss. + optional Loss loss = 3; + + // The name of the class that contains the keypoints for this task. This is + // used to retrieve the corresponding keypoint indices from the label map. + // Note that this corresponds to the "name" field, not "display_name". + optional string keypoint_class_name = 4; + + // The standard deviation of the Gaussian kernel used to generate the + // keypoint heatmap. The unit is the pixel in the output image. It is to + // provide the flexibility of using different sizes of Gaussian kernel for + // each keypoint class. Note that if provided, the keypoint standard + // deviations will be overridden by the specified values here, otherwise, + // the default value 5.0 will be used. + // TODO(yuhuic): Update the default value once we found the best value. + map keypoint_label_to_std = 5; + + // Loss weights corresponding to different heads. + optional float keypoint_regression_loss_weight = 6 [default = 1.0]; + optional float keypoint_heatmap_loss_weight = 7 [default = 1.0]; + optional float keypoint_offset_loss_weight = 8 [default = 1.0]; + + // The initial bias value of the convolution kernel of the keypoint heatmap + // prediction head. -2.19 corresponds to predicting foreground with + // a probability of 0.1. See "Focal Loss for Dense Object Detection" + // at https://arxiv.org/abs/1708.02002. + optional float heatmap_bias_init = 9 [default = -2.19]; + + // The heatmap score threshold for a keypoint to become a valid candidate. + optional float keypoint_candidate_score_threshold = 10 [default = 0.1]; + + // The maximum number of candidates to retrieve for each keypoint. + optional int32 num_candidates_per_keypoint = 11 [default = 100]; + + // Max pool kernel size to use to pull off peak score locations in a + // neighborhood (independently for each keypoint types). + optional int32 peak_max_pool_kernel_size = 12 [default = 3]; + + // The default score to use for regressed keypoints that are not + // successfully snapped to a nearby candidate. + optional float unmatched_keypoint_score = 13 [default = 0.1]; + + // The multiplier to expand the bounding boxes (either the provided boxes or + // those which tightly cover the regressed keypoints). Note that new + // expanded box for an instance becomes the feasible search window for all + // associated keypoints. + optional float box_scale = 14 [default = 1.2]; + + // The scale parameter that multiplies the largest dimension of a bounding + // box. The resulting distance becomes a search radius for candidates in the + // vicinity of each regressed keypoint. + optional float candidate_search_scale = 15 [default = 0.3]; + + // One of ['min_distance', 'score_distance_ratio'] indicating how to select + // the keypoint candidate. + optional string candidate_ranking_mode = 16 [default = "min_distance"]; + + // The radius (in the unit of output pixel) around heatmap peak to assign + // the offset targets. If set 0, then the offset target will only be + // assigned to the heatmap peak (same behavior as the original paper). + optional int32 offset_peak_radius = 17 [default = 0]; + + // Indicates whether to assign offsets for each keypoint channel + // separately. If set False, the output offset target has the shape + // [batch_size, out_height, out_width, 2] (same behavior as the original + // paper). If set True, the output offset target has the shape [batch_size, + // out_height, out_width, 2 * num_keypoints] (recommended when the + // offset_peak_radius is not zero). + optional bool per_keypoint_offset = 18 [default = false]; + } + repeated KeypointEstimation keypoint_estimation_task = 7; + + // Parameters which are related to mask estimation task. + // Note: Currently, CenterNet supports a weak instance segmentation, where + // semantic segmentation masks are estimated, and then cropped based on + // bounding box detections. Therefore, it is possible for the same image + // pixel to be assigned to multiple instances. + message MaskEstimation { + // Weight of the task loss. The total loss of the model will be their + // summation of task losses weighted by the weights. + optional float task_loss_weight = 1 [default = 1.0]; + + // Classification loss configuration for segmentation loss. + optional ClassificationLoss classification_loss = 2; + + // Each instance mask (one per detection) is cropped and resized (bilinear + // resampling) from the predicted segmentation feature map. After + // resampling, the masks are binarized with the provided score threshold. + optional int32 mask_height = 4 [default = 256]; + optional int32 mask_width = 5 [default = 256]; + optional float score_threshold = 6 [default = 0.5]; + + // The initial bias value of the convlution kernel of the class heatmap + // prediction head. -2.19 corresponds to predicting foreground with + // a probability of 0.1. + optional float heatmap_bias_init = 3 [default = -2.19]; + } + optional MaskEstimation mask_estimation_task = 8; +} + +message CenterNetFeatureExtractor { + optional string type = 1; + + // Channel means to be subtracted from each image channel. If not specified, + // we use a default value of 0. + repeated float channel_means = 2; + + // Channel standard deviations. Each channel will be normalized by dividing + // it by its standard deviation. If not specified, we use a default value + // of 1. + repeated float channel_stds = 3; + + // If set, will change channel order to be [blue, green, red]. This can be + // useful to be compatible with some pre-trained feature extractors. + optional bool bgr_ordering = 4 [default = false]; +} diff --git a/research/object_detection/protos/faster_rcnn.proto b/research/object_detection/protos/faster_rcnn.proto index 7e06fbcf4..486cc77ea 100644 --- a/research/object_detection/protos/faster_rcnn.proto +++ b/research/object_detection/protos/faster_rcnn.proto @@ -188,7 +188,7 @@ message Context { // Next id: 4 // The maximum number of contextual features per-image, used for padding - optional int32 max_num_context_features = 1 [default = 8500]; + optional int32 max_num_context_features = 1 [default = 2000]; // The bottleneck feature dimension of the attention block. optional int32 attention_bottleneck_dimension = 2 [default = 2048]; diff --git a/research/object_detection/protos/model.proto b/research/object_detection/protos/model.proto index 9333f2df1..4fb6aed0b 100644 --- a/research/object_detection/protos/model.proto +++ b/research/object_detection/protos/model.proto @@ -2,6 +2,7 @@ syntax = "proto2"; package object_detection.protos; +import "object_detection/protos/center_net.proto"; import "object_detection/protos/faster_rcnn.proto"; import "object_detection/protos/ssd.proto"; @@ -17,6 +18,7 @@ message DetectionModel { // value to a function that builds your model. ExperimentalModel experimental_model = 3; + CenterNet center_net = 4; } } diff --git a/research/object_detection/samples/configs/ssdlite_mobiledet_gpu_320x320_coco_sync_4x4.config b/research/object_detection/samples/configs/ssdlite_mobiledet_gpu_320x320_coco_sync_4x4.config new file mode 100644 index 000000000..634eb9e49 --- /dev/null +++ b/research/object_detection/samples/configs/ssdlite_mobiledet_gpu_320x320_coco_sync_4x4.config @@ -0,0 +1,204 @@ +# SSDLite with MobileDet-GPU feature extractor. +# Reference: Xiong & Liu et al., https://arxiv.org/abs/2004.14525 +# Trained on COCO, initialized from scratch. +# +# 5.07B MulAdds, 13.11M Parameters. +# Latencies are 11.0ms (fp32), 3.2ms (fp16) and 2.3ms (int8) on Jetson Xavier, +# optimized using TensorRT 7.1. +# Achieves 28.7 mAP on COCO14 minival dataset. +# Achieves 27.5 mAP on COCO17 val dataset. +# +# This config is TPU compatible. + +model { + ssd { + inplace_batchnorm_update: true + freeze_batchnorm: false + num_classes: 90 + box_coder { + faster_rcnn_box_coder { + y_scale: 10.0 + x_scale: 10.0 + height_scale: 5.0 + width_scale: 5.0 + } + } + matcher { + argmax_matcher { + matched_threshold: 0.5 + unmatched_threshold: 0.5 + ignore_thresholds: false + negatives_lower_than_unmatched: true + force_match_for_each_row: true + use_matmul_gather: true + } + } + similarity_calculator { + iou_similarity { + } + } + encode_background_as_zeros: true + anchor_generator { + ssd_anchor_generator { + num_layers: 6 + min_scale: 0.2 + max_scale: 0.95 + aspect_ratios: 1.0 + aspect_ratios: 2.0 + aspect_ratios: 0.5 + aspect_ratios: 3.0 + aspect_ratios: 0.3333 + } + } + image_resizer { + fixed_shape_resizer { + height: 320 + width: 320 + } + } + box_predictor { + convolutional_box_predictor { + min_depth: 0 + max_depth: 0 + num_layers_before_predictor: 0 + use_dropout: false + dropout_keep_probability: 0.8 + kernel_size: 3 + use_depthwise: true + box_code_size: 4 + apply_sigmoid_to_scores: false + class_prediction_bias_init: -4.6 + conv_hyperparams { + activation: RELU_6, + regularizer { + l2_regularizer { + weight: 0.00004 + } + } + initializer { + random_normal_initializer { + stddev: 0.03 + mean: 0.0 + } + } + batch_norm { + train: true, + scale: true, + center: true, + decay: 0.97, + epsilon: 0.001, + } + } + } + } + feature_extractor { + type: 'ssd_mobiledet_gpu' + min_depth: 16 + depth_multiplier: 1.0 + use_depthwise: true + conv_hyperparams { + activation: RELU_6, + regularizer { + l2_regularizer { + weight: 0.00004 + } + } + initializer { + truncated_normal_initializer { + stddev: 0.03 + mean: 0.0 + } + } + batch_norm { + train: true, + scale: true, + center: true, + decay: 0.97, + epsilon: 0.001, + } + } + override_base_feature_extractor_hyperparams: false + } + loss { + classification_loss { + weighted_sigmoid_focal { + alpha: 0.75, + gamma: 2.0 + } + } + localization_loss { + weighted_smooth_l1 { + delta: 1.0 + } + } + classification_weight: 1.0 + localization_weight: 1.0 + } + normalize_loss_by_num_matches: true + normalize_loc_loss_by_codesize: true + post_processing { + batch_non_max_suppression { + score_threshold: 1e-8 + iou_threshold: 0.6 + max_detections_per_class: 100 + max_total_detections: 100 + use_static_shapes: true + } + score_converter: SIGMOID + } + } +} + +train_config: { + batch_size: 512 + sync_replicas: true + startup_delay_steps: 0 + replicas_to_aggregate: 32 + num_steps: 400000 + data_augmentation_options { + random_horizontal_flip { + } + } + data_augmentation_options { + ssd_random_crop { + } + } + optimizer { + momentum_optimizer: { + learning_rate: { + cosine_decay_learning_rate { + learning_rate_base: 0.8 + total_steps: 400000 + warmup_learning_rate: 0.13333 + warmup_steps: 2000 + } + } + momentum_optimizer_value: 0.9 + } + use_moving_average: false + } + max_number_of_boxes: 100 + unpad_groundtruth_tensors: false +} + +train_input_reader: { + label_map_path: "PATH_TO_BE_CONFIGURED/mscoco_label_map.pbtxt" + tf_record_input_reader { + input_path: "PATH_TO_BE_CONFIGURED/mscoco_train.record-?????-of-00100" + } +} + +eval_config: { + metrics_set: "coco_detection_metrics" + use_moving_averages: false + num_examples: 8000 +} + +eval_input_reader: { + label_map_path: "PATH_TO_BE_CONFIGURED/mscoco_label_map.pbtxt" + shuffle: false + num_epochs: 1 + tf_record_input_reader { + input_path: "PATH_TO_BE_CONFIGURED/mscoco_val.record-?????-of-00010" + } +} diff --git a/research/object_detection/test_images/snapshot_serengeti/README.md b/research/object_detection/test_images/snapshot_serengeti/README.md new file mode 100644 index 000000000..bec44871e --- /dev/null +++ b/research/object_detection/test_images/snapshot_serengeti/README.md @@ -0,0 +1,17 @@ +# Citation and license + +The images and metadata in this folder come from the Snapshot Serengeti dataset, +and were accessed via [LILA.science](http://lila.science/datasets/snapshot-serengeti). +The images and species-level labels are described in more detail in the +associated manuscript: + +``` +Swanson AB, Kosmala M, Lintott CJ, Simpson RJ, Smith A, Packer C (2015) +Snapshot Serengeti, high-frequency annotated camera trap images of 40 mammalian +species in an African savanna. Scientific Data 2: 150026. (DOI) (bibtex) +``` + +Please cite this manuscript if you use this dataset. + +This data set is released under the +[Community Data License Agreement (permissive variant)](https://cdla.io/permissive-1-0/). diff --git a/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0038.jpeg b/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0038.jpeg new file mode 100644 index 0000000000000000000000000000000000000000..a843d7618c5968b367babc3f6778b2a1efbecd1c GIT binary patch literal 630625 zcmeF&dsLIxwlMsa3j`1oFR>;z30SBJtyOMq6Ce*kDBw4H^PP1cC@6ev`KQob!$G{_(!=f8Tdz z#y}n+gq7d3E_2Sccy|5Sokh#nr6znpNLbiQ#DNgvOqdiZSqN8p_+Y{H+*iMXj|G2R zUxe#L6dIwx=edt%6uaL(*TMDke_S`gwdjxMP~qAOewGQJ+u*|+uG#Rp7d{qKe}A31 z=X{n!B`o+n1|JCr>A$@u6@J0r>4a?ZUgx#Z^A&H;XrEU-*Z6pP!gK!L4_A054ZifR z&%UpUJ=eVTr;Vwe-kzRoygj`<{XE5fp0B%ldiiS-fd*)0ie_YSC?_ZAvLP8!o5i$Tz_+RP&ufFE+WF`Fg@Ar1@ z=XvlycnRVTAB*7QU;l?oeEk1JAo~5??aLPXV;(g zkdFz2Zf9>tXV}}>J1`gyP7CKbIXXHmVJ&c8$bIhlW!&dD9GAvEn zmtR@+%JVOHzv{K}RgYI*SxI3yI5;^vvFFWWujF(1EB}ul&wiwwfY#YjetZ1hR`@@a z1`T64I6BRv0Li}|zkmC`eRBb!QfM?Pon~i8hha>~n465~3+)yy_X@UO9Q7XK1qItX zyWqHk+nTfWOQNqmbobejl;i01+)@sA*^4WLB9E7RUw!R$vBYofpVozht`7^}yycBI z-+KGcTVr>=|3O@Q!Y*ZU%D(-nX{y|h4}5a)-~XCdSX6xE)1#k#UUH(Wyy9eK)u}JP z{-);K`3n~>ef#|nKQ=TPo0@-W@96wzSNHW_Zrr(R?!R~6GGHAX9UHezJeqtwHN6m; zOr_IlbjDnhDb)RQZ+0QwZn>BJqTndTdy2&`cxOAX*AyH-Tkq)R6a8?>j-+c&&$;{F zd2w{E)xS0Rf3M9s|5uIv&o=*Olb_X*g$^W@7-*E`gu0MITS$3!gE-TmCoiNeBtc}V zsi;U_Bd4q*G~XJ+thqcbr;e{99N&Y!BueHGFYzVCa%yxNHA*&b9bunPKbI|=w;_*s zHQ1}44>r)nC)A7bh=@%-$s=1uq=HR$6g!c99x?-sy-+sacy9S`--_szQ=@)+#tAh$ zS}!8(8ZwyFMmtq5r>%dXB%U(=MZzv7a=$jBx+b$@lM}@cMU?dDs#X#8$Jdnq$Wku$ zoS;^($p7g2k1k)ZMh%QB|DnoVvAWLl%guj!c*T89xhjvm7qd$^B%AMBOp14?o$QEx z9)VY?xzk~lvGRhv)Z4=b?2`Rn%f0-!n>nGA@wC5p)YOodPTD1emgS_fDN9<9UXZ-o z{`G2GQ{17#^*_iN%l~d&^W%fHV=BQv3p>e;aN;2%C)7(5Dz3+q>* zMX_@W@5}Y3%udR`MYIin|LOZqv=6!FEC04PqWyT|bBlf%lvzh)ON-V!XZ@2wH+Lm3c@(ka{IqDeCl@d=iFdESz` z)}=I;j_#?hV-=qUrBcRni=8FhAXBz%>9B2#>r;RDebx@2>1Sl(%b))Iw>@t6-9P%S zirPMq6uN3_^wP)Shl9oZm-qDEe7kDf-)MDojjn_gw%-s}yt|s!aZ=nWscLr)sCa#m z*kE7#jprM0zTuP^J5=)VJN}+_>K%?l-MO+QtN#8UqSd*k`4@81EK^(F`t-o-&K|q_ zV>{eq+Uyu#l_VE?JBK(*05ZsEa3@5F%H5^l~#I!B$cHoh} zrZ4_*m0R83F16RH<4vu7#3DF%G~MacyI-7-w8)l4y>z~fA-0r$f39Xmy@bPCTxY2z zd#yjcbuWg+pUxkD`tpfuQCpXOvS#gS_t;bYDZfgd#?vBRe&fLR4`iO}-;_%0;Ylrj z$&XmyoRacE&jB}aUmiKPmg6aBpUC-rwaPyTmfS*CDu=#|6aUw>5RqZ&)I*eYH-w4&wgm9@GBK?k7= z{Jh)n;WjV#((Sp=$UooMvqSQJjOUl$4*gF>)UW2TM&7D8@2wluAAH1p`c=7FK zXXdrGG(b4!RLJ%k%|SJzkYjhO%KTmGjWeDKm>W^i_ngnx*{Z2w+0b5(Rt zVVbhOeIfQf@5r_o%YeN?Ui3Njyb-yC--!yrDfa4l8bNkx+H!-OsE-I$H=?q z#OdmR%`V2=?ADC*Ed9yT#&ZRK)~`G;xSku-&E>VxPss0F^L~e0Kae(@_l`qRx@g;i z%bw<-chA@i4wdH@Zl9lNunb5i>BqZ1C#oBVZO9{AzlK#2d7tmy%&f9WcRo>=?0 zUTJpYngTL*m3{Emt(7sJ+?Aoelh7p=IrhQ3TdR6%zASX)XHU3% z;w>Gu$~dZDNB=S8`r+p>!KU}>jQ6@O{p}oG-SOTh$_pIR)TDvAH+tjQ_^Dfk>59i) zzW@HGTrzWezb_@}*7@`NV6)-X3zg-XEt-!EA3n0)P`|#L+kd!mNm%d&*S`EL)i2*% zi7#03cHL+Eru9P_RU2c&uJ*{pf7IQf6s^p;9oQaHD4_$Bk=Dil_SCaC__alct^r3oqL4{=LC- z(Ko|yxJ1>_rYydcW9i8kD4nuM+xh^Xnd$#r^`P1*X!=6PRV zaWamb8TIQ|y}9aCqWi|!eLu85a2y`i=BT~jugzQeQu8O<_(pw}%Pz~rJH@NLBz9jj zcrm5n#BzFPV^qX`#i!!w9p5~N{je@ekZM#Bm~=Nk9QlukOLt#B=KqGKXIDYmYYDIX zdg{@F_x?1#gnHyA|1WAz>w^=Y&R=Z&w0ZX{Gr_yw?7hDH6UF_N?SFTo#CeTe-|76$ z{guJDE@u2hic_;1H&yhm4a&Q9dfoNH*0=8-czum~h~x5wi4AWYbzL}KxHi$_#-~Tk zU;J&V;+yUdSH5!Z`kVAaJ3TW;m`h56zOXdYCti%CY)m4_H+zj&(G1-48 zy`5JU+CBf7vokeG#j4n9phDm^_s`2muC2TryEO0g$$zH!CnWDP73?{C`kwWs$d#Gs zxZ6_b^$Gdr&UcZE*6i>bo{qfNV|Unf;|8zB`*)U-5B<{yW;B|vjjFx*n<|Vm5j*~T z>)7!{EB_SY?7Spwq4IpRdmz4r5E33oT&N&h1K<;S0#9&PTwwcvPE^ORT=a_xa*-GhjD>gi)FO?lhm zCu`Ezr~IjteyIQ3nxuE0mVX)%+n2^6Pk+tU@hq`FEZo&LaM*a4d*{;CHxtL#e6gfE zefNv$ueDsFx$^o7T1eKjU!ILTo7uAN?Ja~n!v_kW01BW03ZMWApa2S>01BW03ZMWA zpa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S> z01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW0 z3ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWA zpa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S> z01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW0 z3ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWA zpa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BYM|6GBQXER&Y zy}gBy|G5PC&QSmbPyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu` z00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG0 z1yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5 zPyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu` z00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG0 z1yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5 zPyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu` z00mG01yBG5Pyhu`00mG01yBG5Pyhv01BW03ZMWApa2S> z01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW0 z3ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWA zpa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S> z01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW0 z3ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWA zpa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S> z01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>!2e)@ zk)h%(>)zf%$p2sieCsHH0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch z0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2 zD1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVe zfC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch z0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2 zD1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVe zfC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVe@c(^* z9`Z3+FwdFk?6iP6pSft^g2f!sGR||)alG7Kf#Zh~r z-;RlmPlykUNKz@@Py5RU@jEFC7cJr};jCV^Z1v7H;x#+}j~~x|q?{mxVn_MyLH>A9 zs5H8rJ;TA#X&!~pe*Yf)`}=qQ+cy^wDuqU)(rI>fbUM6D27Hgu7uqda?iFmmIO;vd z3ktS(cENE6w>4+$mqcHC=x5n;#|AV;rgk8$ylzsbC(^R=1ANb_pzyCF_u&DURr$;~gyyQe#dBw@f zs#9Nn{Y}le^A|2&`u6)Der#woHZ}j$-qHEbuI}r<+_-bs+<))BWxzT(IyP>bcr^KV zYI-3wnM$YA=#050Q>gplgSL=vx7^EqQE(LFJ;mY|yt5tHYYL8^t#@?uiGH|bN76N? z=iGhoyf`}7>fajuzt`rR|Eos-XPf`C$mHJ5OxNKf4gD2( z%SHDCGY1IWqOUo2R>rtD6>S@qIfh4ahGq6iN%4W78|co#bl$@D+}TZJRZwJmzEx(Q zA(Cc@Nb^JsU)H$ZKnhj;j|`)|YP)_dr7Xa5%0Lscb@$bjj*8VmhX~get?@+TR`z_WGWu5V!%ps`Nj!_v-7!h8^YQg1E8Dn>B z1=q~~rd>_T-zp+Q-Id`!1zMjWnf-c>>D?}^gE1(aY%e_||HIniUK;$Ioed7H`5}XL55n zCgiUptNHd>jfazmW%If@zWN+Bd!p~jGCxI8?r@iy)-fG*ePMxKL}A=|G#xz=v?`wP zin=Q;p?f1A8feq)8Ojo}S<@HCsUSs<<oH+f@F^AUT8$$-Bis5OFzCEIW?BPbXi9(Z{g}0tYo>!K5HQ55%NQO1Zuip~TMpbkKq_>ae?;`B%+b#!a1u{mj*v4Ti zhDr!i))>T-i(OAy7``Ny!SU6_vWXy-5p;lW%GyPu!$^verp-A#X&f^U)?SsLrIJnJ zT-r5yFBxTk5$@P3BCYoRQO3$M zFB%GaklscepK9M{uppUo&`cKnF2AsK_cj4)O0hka&!$uYZC z;s=|Ep!8_CXR3iDOD`IfvuyHltTo(E8+k`&FP)AS6qAc}vmdoD_~DF-$Q34)SF5Cj zFbwtL>1!q~=o@(!_GjX0c$?WJq`42~l%c%sq2~xse<|pYa2MHJx;acXZ!-U6e1HPF z?EYWlXW~h~?3NrgPa4nvYQ5yHnt65hTk84O$$I{%jPNqVceTe@Fqmdl?c>I6ZPesP zzLx-n<+{I&k)xcFQ!ahmKkKV2Xr7i+EjQca+SSab9_hN6ry`1HaKmZ=%z~9&#aeGa zIZ0*EORM>+2?H%RMBLw>5l=~sj8NPtD)h4rh~QqeCy&${?2LCE3_F$j&XYms1zs_N@%8As8XZjK7XvRpjUOt~ZX!H&n(=&my-=HRfGp?5om4q( zi!&I{krj?&enOadaztjI9y?Z6)TW2&yWZ8z&syeno6o3*9@=jccBy>^GmA;AG%56) z&}Oh3nPn$Gh$oV|=D1gxrjzpGuJ#>_QvyH+k5rAl5dgC z|254T9^C5KC#Ur0R}-F@o;)!eyxmK~n{FfGL1hp7^S*IomzquKaW`k@1zU|ZyI1Xe><9X6a;_TynV*sF4LRuwnS(dWv%19XY;ZaU3f_2LOO1v+1@-aCG{vyIXr z9Su+IQw#+iFEM#orr;O#@irPMbTe(M5By{VQF0B zi#j{hZgizFKu$|*GVR^!1&gxF+E5ko^vp;|HAxKg$p(RqTz*tONgFrV4T`wG2E9Zg ze`a5Dt02oKn);)zKWU@D>QhbFmnJx3czy+MCrk#6T@H@`!XaRpKRm&>HhUz5v8T6& zVw#K;#M%>PJIW@aL7u7TZ(ZI~*D6I`%G+?D=?TN$Eh=euf_!E%+I44p4>qbe`5QO)|nJ&zH&i?BKbohdNek!?9XR%>`- zNaSXjqb4!bcOOh*fu#ptUa~SgM>^N;V_BxHB+BUPJUYdUr=*Q^CVCK42ShJkoy&Z3 zo9xw2WaMc)k@_jxwK}DY3=%^uggREW*kFD^y_6>Aa=C_BF->^p-CZO&Kb&!jmPhu> zLidL6&CF3Ry~#{}8eh+u9upDCaBoi7UWg|G9y-j3coWc@Ios{~a z;O52aN3TEyW6+Dfnq_zmp2i6c~ufCI6b|qsz5t$3chPa`}ZPcA{CDBZ8=(iC{>rjM| zI{?w?+JdnOy^muSmJrALhhgpe z@x8P4hSDz*&F$)CStfS*hwjVB#+(ojh+ERlF5w%siHvaKFco?-Bf&q}Dx3E>8lsZv zcJugbGPzl@{T(fgHhw-o-eAYzJn)DNh=JAeqK%jL?namqa_f1va>0)us3{v@tbE=V zJ#$4jeP8Xo&o}D;@sLQw=C8{8_Ca^!1lE)uAmo}}DPvqTpYwQRAh8`2Weh9aVz168 z(eMfyjm?k&^M;LcS9`@qKIaOvH*`p0ZfFfNSh#o9&X8w(pk^Mc8p$4NAoiO52HNpo zvzAG{8i=UXFU62DH`lL8n$M5q4^Se4`^RrHd{YA8_tE<&%cqwSzqWK6n^d+f&R4O{ zizq&gJ`JlGRv1%0VZ$XPuz3H7jIlXX+(K4)-h5O(sBBj+l?&R1h448kJ$iy6F_8YKx<-hM95T|HE8O^g5FIV<4%*79C5-&->s$km zK~nNC-c&&tJ>oJ6LkO{KZuZc637cGMHib){%+10d9M#4XVL*xvVl_RiFkR|+GCV9Z zRnEPv2qsoNlfn=bXC{-lQ||X+{iR4I$_L{q!2vPXj2j^$rSwloFP1}(x0|i0^)`%l zK)%4t$of8ytldz0QbcAFLi}O*`Ng!Nh}Q?%H_LaO*RRY5H=Q4L{Hx97!IL5mT_PD7dbb1?^oh znydAQ47@lAx#(Rr1C~h>tw^_{xIn*@A*dmr_nWJi4=|Ooc{yRtL$}2SO-?X>ZXJ=c z?nG>E-9%n@A201HWo0#!86vO>)sZ5u7wm>Jil zURp_q?4^ney?`(_mqyrPb+b8Y=h2Rva?enjuvg8>JgL12N!I)CKkyo4Cismh=d#87 zM>)4T@;A#ILL8gR#L`eR1QF)IU&>ChEWK)GnovAb)tYXzj>w$K-1B!O80<(PaVvuS z%|N$RjC7_LNS`3hRKfRxg?2`oCafSkseVtKh>!m2F;0`w^d>T_kuL{f>4-XMB6MK;~)(tWOdS+@FZyc72%4*1iX;sZbA>Ps{11;#OD#+z~=&=1U|JsNa`tVnwrZ-u0D+fMVz46Bb8?z;cP zQxBB@)bAd>u$4p<@0)~$T&pMv)VXHR!XlPgJAY} z^YD6mm7Hom&9wKQEI0EYy4H+0i`k@H+gnpib}Bd24T>nz!1qEkb&!Qj^f40{YhkV! zO!CPSQ6j_b2;)YsS|VLXJbJQg;Lu?u5VAzqFarW@eL|?Cf04G>~P8%v=M}S!m=DJ7!uj2(hZ7zI-l=HESj81!71H zu1Xq&JUO*c>SeTSXbfEo6X{x0I74tk?c{H+tR?)}Vp{!#>7I;m0&LUrpsmEwrJA2v zL-tRWLFIx2QTa@&ftI6M^y^JR3CxV6MY2YJIVJzc{f33Dq0%Zc=%xQe)1M)R>FybMJ#rU= zCM@bGA>KZn=H6`06Ih`4RSafCK*FieWWXb2@k5KrG%vp$N2 z3X^RZ@&rx#^!@CnJ}43_E->B+kBsnwh=Wtc7i0?hQeW((8kIn7U5}_iMK8Dfm#N~;&Mc|J;T3^+P{A&Pi!

(n{e)?3#rButFYmbAxD!vR+V=9yey7i32h@Y-%DCa$jM@%(b3VrgOxlpzOkS zh=R83_Dz)cF+Ue{lG?M=?K>46d4zR3-n22xk3{K@X??;pu=MxAqIFVD(k9D-`twMX zO&au4WaM@m+4;!gTj|ENLE0jSv>C>-z6WN3L^?15;g64zaZbp0ysKVXRUY_sfTA2& z&t4D|PBvu85H zl%7m)qm<5GI49(qD+s^#l(GY6x0cl#bG5MGbIs?37plqXS|Kb`mDd-DOo6$OK31;+ zx3O}Q3p8g~Hqk(6^f8&KS^zPk>Y_+F|TZXqi|#1$iI5oxM#E;mrK zP5dfKl7urf9cARt#XpQnK@F9$I~U3Xfi;=XEuhMfzinLRINTYPA5J_}Ooobt`@_tW zc_JGcce!-4%pyDvbC0SmLqrjqtNAdxH^%a#1>Jnn3bnIyQX0%nF2|spLb3KjTxVz8$eOLC$Ml^E z{0xHE36@FnW$_a41Ky>+&G``;!bB4 z#|^=|W}Nhak#M^x8xkp~V|&$1!c+=gRnJIj3Gv^dWk5es3}*SsC01BvD>IW|o~Y0r6>^L_E?o6gCV&v{51qis&>fx;Zgv zpwIGHr|xQWFzX0BQYRY~?NCa!xIz{D5g+cXiZaMhMbwZueeuPBhH<^KH&xa9$DK6(+E?B9R4SkHt7ob!CXc*cXJ=((X@DwfDoC)i9gjPW>LQica> zst%TJ-I(a!S=$I~C@4C7biP zn@nr7We|+o%qz5BF!-5LYZ5o#ohD;!E1f+DJ1$()g*(9kW(Xc7!y$?uUy>2ur-e?l zdps24a%ok-M>RzMnTfsck%2*hymqAbFjLVC6AHI^FuwT#!~uJ1%E!V+VCpYwFM^0D z=I&90aZe3N9-fYZY8NwTl}#60332Ms?K~;0fn{z1i5aKn##j$q1#=dy{%-L;Hqn`D z`HzQW^TMFoMB{SHW^4um`Ggz94(ZWQo0t1%vXiY)?ET6BwNLs`$2oo#c^cmYBSK=l zP)+M#lPHK%A_b)fmQO2W_VMs$!&;NgwPN9LlGIS>J1*m&Kl@ln1D0VbDl~oWK)BP`zO&E*|!} zQ6dpT&EV2R13fXlji_uc2bMR(=vaM`Cr@(ZK#;EW6G#UF^q%vHN8f~<8e(U; z`J_E%U@o}k{yJYAqbq=U*VSd!zS7JDhk8h0RB^c>5Yidhiy_0zkNmm4Sf8PkIbI*F zub^uUAE+1TpANVcddV1f!j;C2lujoS@!60J5(Nfiu2QC78)1BuQuZ*=MrxmtWX zsysvYP$1`M8s5--YFbXj5;to}w3A*joD1trXOwAAj+$l+XK-d}VQu-U9AZ?}{<0i@ zIbm$?%&inaug#jahK@mLo+T9p#MVOW(HV8yV4%-vVO{)n5iN|p(mj{F&fs>tES8< z!y{t|A98Tc2n1M0DC=4Zx(>$(V9q&hM>{SVkvVSA`azU(Ocm8JS1Z11g`zBpoBMb5 z_7g=0*tT#U)pjNas8nr4p%+1YMa&k!*zH%9?ux83*a?FUtbk>OLDcK*j|wHeI<2@o#gYoc_iA=d|EmlDuY=zbbC$uPBTnrBVio{vU&NL@gu;u5K0yKKWV1hC~Xh3 zma)}I!LU#KB40UIDjOT0D7VU-7@5Vna4X%>>STZ7(Kqntf$w+#!}{mL6MmmlAD0&b757bLbf6`X!+9FpO{^=6g^VbkjpWZQzwn#Cq4MAEqZI>ANX#4MJIINmB$St_f!mNq#QJVT@+ zUZM&>Xy-$_H0n^P3c`$W)7fFD{ix=mg0}Je>KEps(@WAX1XE>!zs#k+!`Y>yhL{w~ zsDa)r-Q<6Y(-%h;L-o=1e3CS~++7KIO71D{WSKZ%5o~69D`uV!Fm#$WY6^!|TD|(X z|5ZQ2+Mz7*=M~jKEqgGHFJh1hDeNJINyBEWZx?C$$4emK=+ai|XCWFa_+)s4PMySz zoXcHi`H*cC*^zi{BrkVBE!0_`2AOjmnXGtHb|tfh=q7v@7fr+K=fB%@ z?SPmETNdXCV=!T^+?U~<@b*XjE7qI15S&-)w1EfNA5UAL<8_ysdJpf1E%_#kFJJ|Q zOPVJPgeQ>tkB1~d&s~&8%bgest#r}GlV&Lt6>p|5g2JLUZ06AFJEe^<5UhC{A!;kr zw8yp0O^yBL3c)2P1ey3&r8ao3I%5`2l)0VHfEcB#I4Kh{_14s<38^+C%-zYxtv<$T zvcl6NQ2~2X5Jx6o-F|F<;uozgeF|H@6Gryj?3S*&Rw#xT!RL1V)}SEhIkPT@cpA#> zUNx;X9ri<^mH-v`R{y!Zn#dm}Dqz-19f*HmM|eADi{qqeuvts-GF3o+bmOSOKyx1* z3gsHN--0zMUTO!Sh~`>-YZer@vNhLWX!BH~Q&HE_3`Eh)lc$;`5I{6dmT4eGT03`# zAb;3dpsl&&YOdrTBK)&OB@rLDQD>#`t&wipPV({S1}Hi(x^l0pQVezjp+Q@NTOn8U z>K-qX47EVW?whp`((dsRsp^TG_>FgTw-;0NP#_SLer6a`f}F7}Di;cWXPcUQkNwp^ zsESayrwo2-#qd(Ib8PX5wey zCgfg>-Ov8~S0+?-HP^1gc0UWw;Rs+V$SqkbV<^HQfs3{n2G$W;_6@E6bGS_z>2bM` zx%Q_(rL2WkOID;-^e3bi_VzqzL?8#W}{ z_W2%zQ1MD`Kvew@vG1XRbW=JmG(pYA7n*2JMWsmPkcJs11!&ZPUc$K3+yxFG1Z4 z&MUAj9BJ)9(0+S0^YO?~3CnWZmr&}=aC@O9!PwzbYLyM&rmBfgyWrzZazv`k*Bu(qoyg;x*p%-lX$6gPjTnNIYm z^@lOM%ByYXlc;7Z6iE%1QMOC12{s2flMLVFI~QSgvUAs$>pi@hB)riEs0D~z$5_=I z_Y|`)(Jj_z7RPpHLwubf;xb&#`a}pdJBGTB2JBqmt&$VRdTwh5EQ`J*_e{Gs6p9M| zblw7qpf-WAZLtPoDb3>23SNHXD9puhie~zhe+q1CdbB#;(mbjd){I(VK`4~iAbac| zg+Mg6n=z2!RYKmV9}bm3k%`6~DUb;{W~lWKL~e{0d`_cPkX4TE+OpFkO8_;VSUa6l zhSsMIbwhtI7)Xb0#p!~Ha$^$I;H|LzHFPbvl?B7kti7CBeC)f-WO9+lec&O5w0@{X zc#^;d?9G_bu*Y($?kQt?jmkDNK#|tQb72wM6~uxvppL10+~$*;D`UhqFnFWJU4*!2 zXTw{7lM}4QqClAY3rbbnc1}XEIrhGVEvH=9`n8+2SC0_k3;A<}#QOW$&6cYbgnm~e zCn~B!W}pv7kiO^w=)7Lpg+4=}P(V7~mzm7!h7#Iz>?o9?X&n+vEkm^dmIZdVho+i2Nwi@n-ep)ZD(QYX0sb}zz4(y;`YaSUc^ zR{K##p&fDM`)nwzbjyWPLoELMxm3k)S6i!sWn66Bo{*%-QNqNNGBVxHGegf>5Io$l z8bTqW(Bz}`6wSg37qhs0q*uMvFll(%)18{= z9Z&Jf4fh|H5%KPv(DtMBlo6ObuZI5?EFAM&wrv!)Uf0+wO14cx^@q`w4ZU`$CK|>% z9Lj^S`J#vmJAHZ&u^ZG-jk459!}xX9kj&0yvy2!sMLb@g)?7lo3Z8D_E$qo7-ZC*P zPv&gsAulO&8=%-FJ>U`nU!t9;>VF)?0TS2qJ8n^(d0 zD9ye>9~rE3395Z=d~b*=QU^EDRkZxm@vvZ>v4kw8b;Ic|`=D??C*ojX!$PZd`c zFlghO%CxoVup08cZscay-&eyLnrjX58Zo}0Ud-)F86Mf7PBpomBm9g547!Eq0*Bjp zf~?hZCm;fEts|~Hiv((cVmJjzxsbP3>(vW88-<=CSW4bj>MyZ#^~HqMWVvL}DzuPD zN7CC;`yfhHv`|u8WibC9&KS*GL->hspze8wb%XUgD2T1`69oA=K%b*@YCwblAA6wS#cds&p0t;!H-UkYW>=%Hh$`Gh}U%&74d3Rwim}epUAo+ zb5uL>6BQ+neLG|G2n4>BTreUM!z3*TR%7JBq?99kGlO@z>ec6~?w#{6o+_*C2%HC00FnXkiRUR*E47cIWIGNt1gobHa9g-lyUiR zF2bv1Tg=R@D$=M}&tQ{_u5w8$l+Q10_0z!o^KNCNO6mt4mSPBES~i9k|)^5V6xWxx0|`m^`LG4G$cW(PlUvK@9y>(O9c_N%MG)!0(tgI$?HBWz(`JfS5CBco+%wTK;tO zs2Yx3k+l&MmZG5wIViY*eC@NNBv zez*X~lAc_Sh7CwqF`S=~2+^eUiJVd|O>1z4M3B96f0^39e-sXOB-r3QC)el(2S74) z(@xt;Wvp=G_4wM{=}ubihU+nMDpS!w(xFJ(8aY;$3!Rqr164zn1idq-R{dN* zmBH~jN5*4WpVM4ie9n!jb7@pBy#iTK-*_SAJ}kyl2lt2qPr}q??Jb^kAD2-Y)66~9 z!ab!B;aX1J)v`DN&?le>B!<+5c#Yi9TDsvJ1WX#Ce29(ci~MG5;tUr40V3R-t;z6+ zWWXW2Hwed4xmDciCx;Rt963Fykj>LljQoK{zfmaYG!|1Mg(L75(p-1sA5C;~ojd=P z$n>yJ&zc?}Lv7I$bwo}}9geWL(dO!!&T8>+Se-9nKDHTHdm*1tj)cO|Vp^V$M$tk9 zw-h1vjExWze-7n*N!?H$q5P1T@lBre8;DvyTE>dZ>@6k>Z!`EF5y6rsm_zsV@4N;? z_L7mQD4Q6110y#lXjx<(bky7%MftnP_UjXd?75>Nv}z9I+OaMNR#&;gVUwnEGgQmB z->PEtu*u>&#@TW>%>xTk3qKU{I#1G<<+pYSqBcX^MVCV^N62zoJJhAv=`gtSBfr<| zoQ8;gzcsWEs@F8FzoNMehNe2nN1a-B=3Uq$ys}PE2j|h)m*HsG(*!A$uec}RAmrnA z0nF#OZDnu=Vjtc;DrtpnHC~J*uv47^X?K02B9y)#*7vFkD8E6yru$4#>*2X>&>)if z6^clZ%#OilL@H#CMsv8Q(gf-K%@R2ypbxeH-J>9g5Y$}GhBBE-pO6Yi^c_2DS7&V{ zjB}j+M=$~zn}8P5qm9)sptcqMC6VYAIWx$Mk#m+4%2MbR;H)K#3soB%e*>D;P|53m|?tyytqm_0QF~I61h?D55qX#7-5DzW^cG_ zS|5lI3nP@KQ-hhnodtlXma+i$R=1(x5TvsM@9a>TG`l2d55G=M>7u!*~Dz3!|?}=()t*>WR(=Q03KTcZMm*GK`3kvh5jT}1~+kh z$(3Vv%V7f=Lf%-85jGuCq0pEOn^Z}P((jMzGoF-?z2@-seX#27Ea<*n;#KhLoLt^U zMVFe`Ck9u>#LcU-z;xOBtDjsoF(V?+NOpQV1^(W_mt2%)H5r_>;t4neCrh1{N5V!& zwz0z12A$<{QI}S?7|#D~dy*9{XWfGhYvHMS>8bT~95>j5T6{CARl+9eJLQl&^Y&S~ zG|KFHh)*Q2b*j|GU4>minm!@3Rw{)etWNuMJ{PvjuB=<$4C{WFq`BFUZh-@ZWe_-? z$&9=g+Ie>>ryl-LtHiH8He>x zHw}L*-~yBCR*fb1IxNZIbJ-88nir$Dk<1;%eNU7!hg37KEu95_mf}@f?GX^`OO&_I zEw@0v(9}2DW3Pqt;jn0052wMM?atq|Sp&OJmA*PNoR4GP8sKk&qj0H)q=+=w8LC~8 zF+fQlQ?*k-E4Fhy1g3)RH>cavdG=yCXvHny=~_ZSAqWIAtvhTQUc!u)i6Na3 z!F`UkK4YtNM4=rBbQTg`9&yA98fpj;0SPZ6(DS+b*YvtZCEU4x-VMY24Z++gOqQH-LmHHwO=R577_3>vG35jNo21xk_URzy zS}00CtsEm+uGSB+Jgl!wXF3f9455B_5^&~^L~I=g6IWHz2^UAuL|nf9eXUL2XQa#+8eZ$1BW7h$ z0M?k6Lr#2KQ(BGT`vN!MJ8Auwp+J|q_MgGL#l9;0h=lhKJ1aoHI+WYOG2QTn3c?LI zo9`R`#f_uBfBZ^(rs+g&GX53znY%YNAy}OGI@`V@Z&Y?L4yJQka*HWBKmy}@DkurO za(UO^(sGRB%sSTDRt-t&!QM8slL$_*q9a;ylvX>Lt?uMvs+8^BB_n%Ir2;qr)(o*7wAYKLh7UX^zb`&_(3L z%NVIpzN0SFYrfEkeK~bLDN0}RFcxr?`SIQ6b2C^%7!UHUUt><*`x4F+?t;pbBu+5v za8|6rP#x!%0ZQl=4_NpY!oXF!eBEvC+v2%i^7DGGiPdaTW21Xob*T-+Rc9~ex5Xn3 zf$k+}qC$Bp`Lqu-TlDMO!P3~r4dx__{)BbO)gt>3Capy-s^Dfdel#m|(ppNlZ9mTG zpXEDOH+M!yjsk@ZP$;c0Ew5juSp1(Cm)oXTcd&H4YR!!ida%m5!PjsPBwmWFBP{7+ zs@IWg%3xlI3SRW4HYHbMY0()6BKOvxKLyeOb=gA!m27iMXS~ivyM|DY`-)?vGYpmU zrBu{L{l7(!P%eu&m^i$Pa^kof@1)t6?{T=4sms7jb2o$DGRj)9gY-K*EsSm)rhL!n zy|_-d*nvH3a@l(87;J3^3YJBn`;2L9zShZ@5Lq>13Tfic7WBz4(E^BJP);W@Jq`>kfN z{u@1Qk29_T4qB)f?o)0ff-5`ssmGi|mMzJ040qHX#T8Tz?7o}NsBBn>f8fH~ky z9CWc-g7ls9hHUfKm%oPGD(yJjXYlt;E?6)HMO7bCWE1w?g0OzjM(zLfZItoVunrAC zxw&~&Zih8uV6G({eB)+e(zF_0s$xZ2?gIewrgV90n~z4N{EhZIYvzfEfv3bTnexBfSM*o&7HU>oc+cwMX3B!|k1=kmo18}BqyNFq{SjRrf&BME`3;!f6 zl-D&w`drXJ=1+j?Nk3#5Ls6MGrka|{FtDsIMEig2uY$rL%>VVvg1Mn>Bs?><9(dOS zfQM?D+y$v;$-Az^vbQ0UvI+(qm;+NI*FK7v!T3VN{wmDB=etj!6@`XkhsKLGU22!z z|KlSdkJbcNhjnxu!k0eQ{C(qiy&aJCP-?%=a>@@>u&_aqll3F%F2eMo>|$7;f|WB~ z9IW1*ge8boQMx|Tdf4$an!%VdY^oL4xx2v*y*{Qk=!diP7btavhP5d>vuJbU)&($Sb@n!MXdZQfz zo)~AuEV4|8{}HU+^{#!1A1*>e70F@K9qJl?jHu^ZvQqo7Xw{fqv^x~Du{TNEnc>ImwUi2 zD+IiXjDsLYppbs6Sp(@!nD>$9DLP%)Tw@B%GJdTZ63W4jgPXF)NOlC`000I9hsPNAoetynp8st z40gg+$=%Izk33L%I0hIPxSeIoG^5px-!DkC;h~_BkgAO-$@q`65P`R56OmQVK-*0# z4u~BE5&`w;oW81=1lQY`9dvUNw8Vp&4-2*Ei@*eVYf}60k4mM5748-y_85doLlTTn zd69Jz=uKVbY6&E=vfneWqy9_sm~p$};Rho5FPE)Yp8BNG*w}VEj&5=Iol`*dSyk5i zhDT@dOP!5l4{4c6^L?>+*Y!p5PaE9YgUNu<93Vp0Dm$WXL_s%ki*sq9n?vqe@5InZs%mEQPG!bmX{HtmfTD)lLOiK5V@ZCP?CJKgkjD6H8|M>+t3jIl5d)#f0 z1D4GR7$@w|bCcs$kDnRtuyzurwoI^S<>Hqhx-qsU&|6ED$z!Ih<7o~{M z6DAht@9SyDl0Wy~<8mm9oY)=j8Iay~t z4fuo7@~66?VjS1-3-=9@l63&wuPcUit(lNc4f6gx{!qJ(_q&O;OdSxvNoh>#Mm3JR zw9|@dhJW`PeUsnvbfeetBl#~aqtW{!24A5#Qg9t2s4nabJ)6-E9<*t3x0zIjg}f2F zUXgV+vW4Urdjw*iWpxPeL#q-$_kO)li?k4M%E{{bKI2j!0ND@prB>9JXb{@`07>Ox61ofnHQuMn!ihs6 zf7sE3mc@|o6!X5$5K*Ya7rJ<;EU`zNOW3(L3Osm0$8U#0(f?b#47_OQPBXgAW$O(1 zqvPKwq^cmYuxsQg?l{sV;@x|jr9s(cE)NREK?fhG$T%Rz@y%+a@o@l)`y-7#)~jBc zh1Q^0rP#xs)mLGJ4AfL))qjTB`*eJ%8&kjiX#pPmmmG4>T#<#|a& zqN%&S1W3sI%r=F>qg2a}jkZJdSf9pjKLk9wmZffyLpehDwHI%pY`T5D{=u3(B=+Sq z!%nilYRm+?-wt!iQ)2{8IlNr%-PVZ>)6dX`BI2;^iyZ?h1JxvkgJ>W^BdlfuzKaPk zUB$PCv7Ba|R+&QvGHsblBTtAH8p2%t)s3sHrHG;%Eu1 z2Lc=K26!NmE3SyP?nq-8u27r>Mb+WG*uuWtAAa#PzL+1$(cy{47IHLe(Ok}(fpTVT zWE~Q_`mL_e_3u!MduW`z6|36-k(y})Uia^ ztCIlBXE1-q>xyCLUIMjUVCU^5p^>Bab&V5bNxq||RbIVEDPeU*6G51OdN2&ab^p(+ z76)cyd;NXWXeRegNF~GeQ7%w{B9U)CEf&EK)X&R%V0m~hbl?$$^(ow`6L?#GR@=^F zU)xr!*5RAEJE`=23P2TJ)vs}6B2EOex;lPP{!hactO4TLBeBwyXMe1Le3i$JTzIRQ z1Qbi~kWA+I%*I=da)w>Yj=okeyR&h)kEPxYVYq#shm?^~Z z1ShXg?R{(<^@F~K}%i*0i#@RBTG0F0U*w?}atJ3zBVvjRqs@6$~H zuC&0nu^~Y5VZY+w_&GCM`?Q?A@rC6!e5MYbXtN-69W#ght3pWzV|39bevScw68uQ6 zrHR~tA6K5ofYFT6oW(@)VY=qPU)0714T=Q`bK4^FQ0%Fr!%RfE zSyM;hXHvNZIElX})T1SZw3kFXy7nH#aXO`+TJ|R6=e1-y-I)EOwCto2gAnMc*lK=- zJz!Tpf9$`M#jIGcGl{-CFVm9%VY$6Y{5A|sF-dL|;hv13sF7ey+{<&XvQ#zryX{XD zN5KLHq&(Fja%F;Zw-c-*raECAVduWP|JExGF_p|8!I*8&9s4c*W~=9+5fd}7lkYq6 z@BT&|X4~gwZ4NxcEcKk+^8nc7XCBO)=mIxGP_yRE(LRtf0$F;tR5gduVYB#MK{Y%? z;n}$t@HXlFim7T16$SaJ>?3|BUYu7j4M&(e{h)u$9XsU)>0Tu*l$1hQ*W*oAV8?1X zejY7Yy!Gn}m-!|+k&(I%!V5CihkC>J?-HJ{^}A+najZOONOtY*rt}-L(S%pV&F|VP z??Hnp3MgN(-l{%L{%=u)UT5yK%6>J9U9hb0-F;PALNq&CaFJXN$PRp?o0h*UJ+Aqi zdg7{uoNte8Ia=arDqx&D-z8pe5l&*Ya^HwrXWhJjS5znMCDBn-Q_Z6h55k?{nB zEGqonKb;%!80eMO zW!aeFC1b|%ZB!oKE22b>CG6FPclYnqrjNpqaxckqzuB~a;-F+lxx{U&uw+q@ubfI& zc^~Z938$WZEbfEtlMFEkn&%&;g12rGu3xF6}ndsN{QT%7ozqa1j>#IK^ybEy`j2P~V zMKjS9TJA|!QFYoO6O%5CTTJGNzDa{0dg|2FIqMHY<#tewyoFfDeLV;+ju~In)GBJJ zcG*3XY8whr|DIN zumCYPe#no6er5gn?giWTdi7*GK%Usz^ij^IJ>6nVTAjUN^I#B00}PTM33hq64N*gY zL536`3}Z6z&K&_#&7hpY!?f#TsVV)w9y7>;2wiQA1WUKRlvlvb@I&L6`jLD>{M}a_ zR|?}_XZY2ByVNGYlv{2MgX)}uhJ$@NcM`SH*Hy7350q4#v(|&FTQvL~x?vy&Sbg(9 zi%b}C8@quZe92n`5)I}M7V1FZS{BX_TGr-a9#rNeAX!y^4a9Kb$NCd+ayjln@B_?$ zR@5fN9%i=*A~6@JyuA@PDIUcK?(66mCv}bwuaSSOx_kE~fPuiBF))E@7^#!^8_Ej& zDpe4Zti8@ahSCnFuWf#i><^nu-l54LlqRWltBZ(AW{M;&Nm$8`4F4JA-K%GYzhiaQ z8-%{LfjK;=jl=sA0X{a9%dg<4BN3GaK;%0keU>dbxas(;O1wptqPS+#&MQuA#uI55 zKQ)4dBddNFU$}U*#w|jpGu3A&wqhs!^Ml2fgR!V@d$o}4i}!z_Lkr%KW4IG-7kgB*AR zl^JpS1n~CmId(c!_D1h=gLo}m&YT?~CVe6I`2ayIzgQ)F=2yKIkYQ@S8=O!wJTCFDLrQ>KXt`Grzu+Yi4# zu%gR6VfRWL@k?@4UlRcSrg$((6a4)X)t`{56~2!I|D&cZ;|KMZU+cO2o8;5)2gL4{ z*glmVh+en<3HP$q^ztU*|E%4ecBd#g2~BG_b@{UtD(c_Vuq%lF*_0=}+SLWMf1^W| z78rGQ9W2=+?AcY9p#+cBPmMqbj=Y4he&irA>>q4KyKbR&!$-!mR&#)xtZgD z{l70rShoKlXSnL`8-noQkXMx#iAnO|#aapfGHUc!W!$I6&oF6|9jhO+VyGRh8ggu) zFE+WNj>xWSa#mcJ-qrQ#rw`%WY@-D~NCUe_>*{NU3S9W^Sm%lhpk^ue%IC%>(SA>y zZG$Q%#tvnFeQp6PP8_O=k6K&r`{fD&=4A*jvMkI_e)e#`@@k%cPtC+S^+ z_Vb_^9H=Wg)D><^d==I)`3v}TE4;2qZvsK!D1T$ zC|)NP7<>dspoGNsz6rWDPr8&eBmGN;-^=v1k+WD0rQ0Zcn93Up5nyQk7_&zV1+7oK zv6DZ&)l5oe)N7ez>yRFXKp4Czr(VV78L%w+yEdZXrrJ+E;MW?X@V(bwl)_~f^7ta{ z3wbYDB=CRjBv3}4V!}6z;rZ;c^|N(MyZVY@eJOm<(2dPZ`_YPi{p+l=SmFE(?bJQ2 z);F&pW#>w?4CqVzIq**^`8IND3INWy*~c*JK!8-+_^+_LjCV0_VByc!BARZ?w21y* zIm1;0-WPXsO5B2lIFgs(-fQT>>lvPwA&SC?84wtGja%c)U<^2}UlSZc;1pAYhj*0g z8wAiIwsy?V;guKLQ3B%eC3Y@bj7`u#v7?OR2~TTN6Bgbjp|aeV@_&loRGh|Cd+VCu zC{67_JR^Q>m;DObj+BiupXJ|@+~!yvoleF2w&mEWB9u0{cSwcKEUsRKhQp39qz+P7 z?cn+h4=@o*@&z_%-nMSuohp)S7BGI+zH{_T?u!Ag`D5sN!(<)SA4JkF6q}(UwIlDL zVXaS$hacLPBVU)oVi33h*!Pd zWHIA^V_T5Cq@E-T@dZV3nFit9kDNmC~rAd8sh?*f18Lfdr& z)E`$qf@W#j73MhtHQ*gUP&KuxwUz@KCWKK#8rOY;mG<@JiFki`-^iskCFL6(*sJ$3 z97%mhNE@f|(3`uBXoS(f`S%V>lV^N7zk@$ereKSz7Fe-i|jdHB0tjivW4(-kF;^&Z{^+6nvcNd6t_2#bu+A3eo zr%~z0@&3d=GZd9i(S-K;i0=#w>!v(NWSe$f=a3UkMMC*^4d32MK-)nS$o+=tQhaXB zFqlDxqw0GPv>p96k-!iT5BHTM^8u~VEWmfRT|8jrPMI?N$iaXDzzmuU>Pi_5D6O6& z-S(VS4254}@LDwQ$ech|p18)NO^g+*%Wd;ew|72k_{llgOzz}`T7i>6m4>Jq)k7_3 zyK-v`9L7oGEn1g*?|w}k30m%@fGn|9h8QVz6gZd-F_%7ad&SC2e-G%Q$5WR(`9!{L zuLxC3m?m*YVl4~aiu?4LmaCM9KBRIrBP>fjDwRH~W+<9GKOo!v*!T*JP?pIp>ctQm zoYf%w$nyZ(eSqR=^o$0Z)R#=%BanRSnSGiJ#!Q*)n1X+S4?jen+1wS)G;1I&WM7Ne zxC?sT>7;JOk`HlJFfH?D1`P1SSeSI`V~4|17sB`_NfDEMP1{#+V` z-?-Y7^a4K#hAg-$0jmX{tiiIsPob(!4`?`50#%0PhxO7ndkz}Py>AX26y7&Ze1YGk zY+ z{3-Et|4B>AtoZamR9shf5>7R=A{$#D8^^~%cFrctBn&>3mHGzh;*p_mLBq9PyakH} zPP{B3gB}HEDymA_r3WeO`913+d7+kVJ_XSudQ*PbivlvMN(KHE$<>rv z{n@7p`%IyFa@=PLSh|1l!J<5XT^yTAv#n$`fkkK5pmnpsmT767Y!8}lIkvnF`5<-W zU@UZ3=eTujE<(geu(4*`#w~HfsF(OD8WAPM26j+*q~F^}r|1BQGq*m0DAs2QKab@v z4=N>v9I{O6^LIlX5m+kps!>mJ5rojx^|j3UL-K9R_Jj;Sy*75Fp%fBC=Jo4OlAeQi zNRDa|&H&Q+b1EK*sCDHwrxkejNN9)aOh9gc-;zO5)g02mM&AwX?up`_YkN^qokor|iF@2%w0Xd^QK7SHPu=l8gnc1U z&VcX;tm<5MSE%%*qJt)CYr?;~$bF)Q>MqALjdr6G*>h;_Db{=$%6sw*dab-Ij5eYv9)B{SSOrOVQ9I|;dkNavyXq^EQ>nj^iwS(Vko`c zM*P{wky6Tx)Iox|Pjz##L8BST0~4Y_v8as4OR8^d9E#6k>Kf(L@$O~o#OI*xMba|< z*ab@%w4ZqKM9(3)Am@5`a4MNq24GM9?!C3@3EBecy0$+z1>fY|OIBYPKAz8Q%39xT z$0#fhj5Oc9d1qU{3j^lfYPOAs$(b^U`_Ril4odjx{AQBh4qUVyfxGNKejXz!!34SR z%xGq!@Bps=+E-YA~=}b>DH|shD9I{Kn+sW7AJC@@&q_}m77p`O1 zVq3zm?XQBe=lnTa4>Vv0Rr;2rm{q+;>OuX-XJXop>%v+3Y5rsD*-C_l> zS+k{1#gdhQOxXQ7sKj^-UBJj)x8A{^x~t4}4a!fbGd=oDd0_ec5eHljLEW?4!loBN zG?-YepTr14AqS|0ePOGFVZ_5|kgy6O7V!fltWVokKATa!@%^P=|&sZKoUqh2DoRjMwzxB&_JjqJG$;H zhh$HbFtQ*tbagN^#wEL=~^7A*h**-+i9T?r(|7|k7zd z)NNT>re42*y1a|^XU=lAR85u%7EI$B0|vXy<$y0eq17&rLOs*2Mzm z=T)58EMV5>WDwyaL~pHy;bqe=NZ!{LwMbD%HeRD&`7#b-C zyO8cud@g~0M?IG;c60ZTqKu3$PK8QY>UZsVcz(Llh@fDx7HM;8^c8g!Gt&Dp)+Ga#$6pHX{g zoob+a{IS7=#5$i;!TGP04=>hY&bPlB^v5b7vf`Z%&TCwIR(}jRVG&S1n=~f$DZyx? zLWx+cU`>nkBS7}{ACkJ^$AtnIy=(EQUWJ+Wuvh+zB)iH`K>DboRdKtulz#@}5fmLh za`{KncM+jv`taOG&LWIx5w1c?GO4nQ9 ziOL{>4w2o)Rb$k0{7@(2QFwY4RX4f$H5dg;-@2$Tg@=ImMa3SmXA?w?>QxrhztM~; z9j6r^W&M57dL*g*Ir}uL0VQJEgD+8oot8cU(U+R)L8H@lplM zb}6WL^TC5GcIIQF2Z8?f#G4hcK5$1`PXfu+v%ihwmLeU9>3mt%miQ^YGIK%rr$Fq% z@n;iG{`i8Wft5__jTk9|+zbc;>vHd$LOIXpNj75=Z)2<*3)vWq`SzFPunzmr21acb zL`J#EGDvy$mx2$lO{$(NZ2ONj4pkOlf?u{uKNj(3h~tnsu{j?Edq`sQy)uRw9xlcP z`)b>)y%m78=Hu`*kpL?edhFCvZ}gDt5K{8$b23M-;t`xtM$by}A8WZsO8$AIzEq6G zG(NAYI!QrijS6_|)1p2Mny;1=U!eUPO)j3WMfex)OUW)ws|Oy1jCZ!VUr(aetH$}w ztp*k?tjG}e3`W@f&44RX{aJFm2J7^IU=8vfe>yhTpg5XAVj-^Al!o$78Gw<+5{c=7kLf0Y8-l*Ty@OvWjUU7oHhzDIdNXy`i!!PWQzS z=5z$NdNapiZ&i6)bGB2o7~vdlA8?J~uygmYbEQWP|l?p7wS9{4zfWi_cE8>I@2@l*Qxzh+JSd%qWr!*nkEXr@_;dG_{J3BOB& zM(nfF;(&)n%es^KY3D!~gzm}8DK;0{vwS_XMQF#$wm#yH9LGW|9+*A6H+f_y2@7>! zAJH!YnkWGpL;Ftn-xn&>X2}WA+Ed)^xD>-e_>ZdMnA&6s!T^>=;ccm$5deD;VMp1v z;?Pi(6M&rXMNy@v%rgpz%ydEvO$dyq`ry|kG>XHBUYSPG$|c^xh$-~y#j#<7P)?@m zPt#sZ$#4r14q`oJJaCJx7C;vPVygbyUfE^|VO^j&)-diO{=}phq)Zo?RISHxn=hvCl8B+9(t|kHeb^!1=k@dMk|_U#e630FUm;o%~K4;>^09 z`!0DC@-O=#FxI!H7gN%!=)h$}jOMsS0>QO61=p*5q|^Nq`mpY*BZ()}{o-desBm|j zL9?{V0mKSWhw>rWWyHo1Ag2$cJMW#cl9m!PeqpwF+hLi;i(W>3X`x^LAWx36M9v2+=<(p;OV?XjCg=%|j;~*sh z$3WtpWdcEKHo|AG=!?XCuvp~RY@yVFVF(h-zG0tM3YYR*h`RP(MNz+h{)pjU^lo|@ zI<+#N#u5+kP=b!ZgkuMWC!aTMKe~x(F_2BYeNcma%d$Nqp!}bpOeH;2W)Oyk#FLfu z`o_DlR%-j3UsWD9Pf2a62hbzeMbqlOzURmfiNGl2pJ!&czOWtZpVs4*N~)Q>6=q9V zNJmWZH|{R}tKjqx0b*`Hd6=`sJ+4Ko;edJ#borYp5gXq&D6U1=MGk!Hf`s7h zIW_t{L}?0{7`Yg!zXfp**K}j+*PXA_7@tbroxab?T|6+%A@_1ChE4p-9mHdB`D{=* zGxd$SWs7}T1)7_s^x}wdJ3x8fA9FwQHNlRQL^=`#)(B0E_ z3{0B=8~baEFnIV+vo7&GD$^4U#y`JW3b}UB2Je3bqxtA9EG_U!WXnS24EMv2TufT% zKE}?@;U6rDW=P?$huDSfNfWxS9YY^47IbB;>$>xK<}ZJ-t~0gt^}i++Dwnm*>G!no=|XOUQicgFGN% z?v~u7Da=^6vPG0&0bfJA6m2fZLeyg|k_Dlh%6Y#Dd)#1HL`68}%qmgj=$E*I!u{#CVwT?FQ)6<;5OXf`&B^K4LVEX#K>I5w6=d#|KyuO_~s&Ep*x!c_o^xZ zT;q4)H(i z+wZFCa&>QM{a6}xU4Vwh8a$(2%x4~K1h{-CTl8Fv#8v-|zvA3www^)UgCei>bdf#G zgTf-U)~9RIyH$_1Jfr@LP=HR{5RA@s}NS zB0XbU&vWY9-gKILmqL}8fI1V!(!r@Q-h!Jzh*jW3w+;Uew?RGDl7o_A=N`i7p$wN} zBpOuq>$n2tdiY1c&fOf=rAXLLnX?5hyvNE%d2d}zKvX#KUx1iSD&{V)So{phDLAKz zV;2BEkyT9Fv=}1F2XsHff(DOq^j^-ST{>h z&#r}vu~)^HdLI6iC3$vE@g5Mx3XniZeWvbdgiz@a>%9-JS}5k`V`R$qi>I7Wlqkr? z7#R@L>V12M*l26@m{ zld!NpRg&F;A2p@nkFk$=QI28$qx_f9dL73`3>8zrld>~X!gKHX$bXKlu4hglJYt*3 zs|f9*1Fg=w>rZEC`~tQSk{@af#pw>zm>LSgdJ)`Y{;f$}^ROjR2d{|pR8@%yRp7QO z)HCp`nSXl<+Z;s+8l+cIY?FB9$ZQ^TRAK!Jqr`k4OYt6TM8(y=4-FNx0~L7P31#Fo z4(eTZ{bCz4VlE8bGgpxt`_O1YA$?RE#J?HxpaezP@aeOQ+rChvHzn}z9j#NgQ^ZMF z$rR`IT+cf4y7C-d{!KyN2}qN>r}?U+2vdz6#f-`gbk}9-r$gjH4J>uNy(Nn_ftmOc zwc=jVyxh+?Gzwen?wiZ~Z)pP(+@XWqd?<)^m8^P`?4vj!T;}**ZR}*bv;Y@K+=s;# z5{!Y+#O@LGXd{5(WA-R#a(sz}Oy!3?gE)5`AW=ZU7M>~bTlS7+-H`ni(n}#kV^tS0 zMZQyUve|t`MGL4y<|YgUZrMMRP#<)l;%QpQL7n;3vY@JEW4Sfl-Ym1i(5Az^DOU>m z%D2$4S)^7mOBR|{Rp0I}`ExV;2%@_S*Ko=PP zl}MD~u9sL77Q4X*zHg*AFn9Rnu*iuUkYf38?;!A%DA!lfxZSP`^AZCaCMrNug2}8n zE!U`8uYw#ne!m8T<=Gd}IJt?wA~tqlk<@@3_iBLmRM-CF=qH|3m>o9mWyb=C_)ZVL zrwFBGUbqd5W~}Wu+QR6pU(t#UbQU=~Q#mMj3lufyHc0%CL|nGO zq9?NzTaQ0CPmgyba4P?vGI|38@t6Ch<&JCQ&DEhob$>Tbs4~9PMq$jj{7H=C5Q21r zccrv>a6^~LWpPwBU-$w;PkafcLF_0lLm@tU*Ht>McXc+}I|_W940yXatyq+p{q6;N z`c*zNCX`??pEjrBau4*cBZkVu090&(GtINVrgNKRLji=@feWeNrfhY)^2)7aYRbJ>sbvN(@!fF@W0}p zwbNm0H3r}$g0I*|; zpKtVFe&%29nchQ%WfTL+-=*rSmi*kmp-X}ZuSaD6p)PQ0*`9+PPb+EtrZ(`oQjDL# zszeO^-+KvFqr0!qHv>1YUn3!A4bH;31XIasuOQ5gtP0M{;&OX-j-HFK9lza1J@@r6 z*0x>lNZnRkh#f#cj?XRqy2P>-M|OYGnUJOH`xTn;hw-WhON!e98OS8pdRC2{2ar|} zp~-+<##D;*ZVcx0w zH6l=ywR$wxwkCB;;8cSKOUDJ>Tw3*GXzr&LWZ%>u!VA&*BxxW4yz}(%nE6{YGM3n% zF_$zXMScSi;4lZcx_;i()L#^<@g+~*Euf?zX_zSFY460J)Z&uY%b3wrsRnxFdQq^& z#cffn__OoxuHr;z?yX3*BOk7N@^tCgfj0uR83Ih&?stSA>d-?GXaIxJZfUvcy zSEZKPqJXF9=6#}w+UQv*t$zXZ_$$dsaVjoXVuGUsr-!>9+*b=Eud>~O1|I{87!t)-6aqaoOfuYb`i zQ1)qAf5IK0bduQFKf#LU!{vgP0M+i17}QvZa!P%c5?>;Mc{Ekcx4XsItodZa!b{qnJoSVK2rge@dT)d!yMwE^_@f_pac5+ctL7m|1nh`!KV)&?M&HCAyBF*2DG z5HL;Wn}+MZh14aTDMD64h&}vwy~JOwFP1ndt`2Wx>L@-%O3zz0`cis_is83MW$l(z zPN(Js+&MATtv0fAc`8%?E2$zIA=cRe>KfE(bEPN64ySVnx3t#}$&BO6mY7I=72f2D zIJ8ws<2e83?Xb`ffRJY`&&VXWon)vCqHXhEoqi`Tf)*fCH@e@%{w0+_iSOfWBW!Gm zhzQF%i=ocRS;A?p$WZ*ZGC(Nbo*cvs_@f6ancG}$ZKC`kbFcr_qeyb90X4GV6#rvI zQQ}SV{~gc-ue!yS;0Dh5?>iWQZgZ-QaGR1P#ZZ|q<+XPSG9YV$y2N!ojj@D$3*4o2)!NjImOjV@9Cfq_^T=M3189*}~5cTp5> zUXWn^BvYrnb7NlMGS^RP z+kD8z)lJ^U%nm|A&(dsH{+2&RtQOoi*Jw|I7Tt}3MTA03y!PYShX$}#3=E}ucrS=0J>=uMvD7Vne)sM!y}Lm^%~{(<#;?&1c{!+5jHn>Nf0gQZ|9G- zccSuWSFE%aUNW*ewHvl3v+o?B!RI0ykG}o#(o8wuM_+~|oq3A#GGc&r9SDs*7tb!Q zE>{Z@NukMgQD`vActc@Q4+XWP;Do3hT9PjEtF?;EG5WSJhK4=b!d4?Sj5@DQPMwG7 zbiNl8OM2X8fUV|O^G!^`tUG&i)&CeBjpd z?SV{DxB(JOFMb$*^C}eVO_kMevvUxu`c&eq2JiN|v)|Ev?U%qv#JDXA-cGa!cD8WT zkPb1Y$hUD-sDrSJAkH}-fWj->3oRjx`I;o@7a~ktH;x!&*KN2BX=*e2N0N}i=pJN- zaaj;~7kDP=!^2W1Ez_gUR$P(fBq*_#V{5ow^yf;TUc1=UTV3J*4PwQ?x!s*`6>qK9 z+zk9(l7|n1L3gp+a%#~GLut;Ux9(^jLOtV`r%P-rFTifuvyZMqkH%0xkBJ9R9ploC z-nPy_!f=NDq>iO&!mG`@4YThL=uEOc!-29Xe9?w325vjcJt)CC_9NLH7HZEN6r5pa z0;xO2GQHlNG&N;x#5Q++{n!^psT<%j?8hxuo`MUOIcTnbULH}+{t@zVhWSpoSH8Ro z@x#-DYSV}|wR;G*r}BgL>5u7`P&I`;tCp9VI?x{eC!XnF zssJ{7&B}sRP$Q#vD+_;$stHVnB)atyA|Xzb4Y0KZaPjE4oP;UIk_Iz2m@@b1Smmz&J^c=laejsPKH}$3-+AD~>1`8$$(}UG1Z3N%I^w_pbH~*RN)C7mC3tCB9zeeoMAp%%G&*l8WMv0aw&1x(8|2Bd z56QPD>h3oH2)DnMXO4?;?Z95y_G7>T0@cY zLPl~KqP1GsQK;@YyVtUqz#xX$M>SB+Y(>OlxV~h;s(iu~YE8h>q?Ndr<`u`f@%bO6 zb%?q>y>AXo8`ppZ`5dQn6S@5i z!-YDD;Q&CNiuGZ;f8#hrDCx?_hyuZg;{L1+O}`$i8v-_wr~vLjL8ldjG{n{c*>;Pj ziwb3LljBcjsV})qyDSqlX>7~?^Y;XZ8RqJ8x51;^M1?-9@ln`@5*F7oo1oOz`E*zmt{!2%UT8Rsfa{gjm zzJHOl3*2nt13m)pi`BhE@T5*mIbS1`??1kq{{4__Pr(W;pdKE-Qs#);l_a*1{pT_F&nPfD ziNAw%m!%#Heho+V6*wDQlK@6?PP}h8j6Y@K3zLGWx3r{E_O_U4mDKS=a>lZ>vg7+9 zMbrwfgSf>=-`-wM5y)AB>pVz8G8@W%1`#GHe6igZhY9C)1;V>qF6?031?_jE`yF`tzFEcqbi3 z|7k>U#mM@JJXCC>aa8J29G%VwF+VUj2%$P0-?BZ*Z+V9aMU%%@1$c@4jdKqhOfRv> zdnO(>Z$5gX6IXDXvOei91tc44xbPD?_cM$kL5}Kg;KSa77u~GQ0@(A7mdp4@(;Gs= z%}b!`T5?uIG8Er0V-j$R#?9Yfg|y-L64vY?jIET|q4ZrqcSF)6!cX8U5>8^3(iS2< zYF})3#ITuQE1R`idVhZR12aGk{vGv0kc|Gw=N&Oi=uTj6ZYG+kkrhCo=^FO9$g-_% zRsw;EEgGQ>A$twATK4^BrpN3fx>=D096xShb0UN*59S09a!aqGqzO@;7>MY}2}`B(9s%mFHf>=v{s0u};eAR93I99oK3|&~ z(JtS=9)UfUs2^Uu#f2tIv)tdt!}N{}FV7L@@&NuHgDDeH(Yw?WUD5taApJPp#l8SI zVEj)RlUXQ5%kDVSCM;ROd9`ILZOG=uP1&9!;9Ivb4RH4p#aGb@e3YTZN5hz{Uoju+ z)v#qy=ik?x_+^066u@u^dqfZ1n7nUDE~adX0;;}+Lx?r9gJf2U!>A)Z?3!VDQP>kxiDQ0#m#IPB503pf@-hGbbN>} zUL>~OnBGp5kAEa1H3N`0`>RH6%zB6w_rH@Q^+@ti$kki#iVVpI%24@T9dZAqOa^cp za#iHBMt0yOxzXQV`e7cfL}e%GN0yx7XRy3eVW`yEIm^+3 z7?5$hacoO`J~~`k*wlhBIj|q-TX0bjH**3Xm^&4$dU)|F^W&{|v?1CGWHSyMtlG-2 zku+dOfaomvNExdz9p1;#DVJy5wvP-%G{nO~R{chNrXlDbyJvu}X_LiaacwUDV%ob* z-MW{Zk1-SFNwT+qoG-hP|M z31Zt)%>FWBh*gd%?!M8#^0cOLXgp)Ble&#BXi?TC$#2N@04y4-I8DDA48{6X=a0eFEjJ+3Y9I&v%58%@Lg*s&k^ za61{par`?SUS7vl_L=RyrDW4u`D0_iz+H}V1@1k!h^GrUZFH}jq+#8Cd zCF-j}p?t?fBXG4$c}S+$xa5UD4Ah~fSi4kD7N#Pnc3U07LWk0S_X1mm2}A1jWh zb;ovBiU{RF)tVLy7unHgaHtdslQ6}6kX0Py2Zhim=PaCN;qDelrFzIhAq^4EphSv& z7Qc&l$4V{uF6`d4%7BCM5E*vQr%13TWK9(nBS=%Uo@F*IuVXZyt4jwEq6_n;1q9Jp zi~rN#-(9rhZI&RE*2-I~kq1E`$;M#*PM}qQlt6HH;hVT$0*WRX<47OH!~a6cjGa{4 zF{428!8lyz6zv_UESMjKB7DVLm)RWw@Z^9LeENMYqgHX>_z3F5$?RkO{)h-=c*h?a zzhb~Ty6!B)6Sa`?|DU9<4{P#H)BS}YFIY^pq}#+v0xhh8&Q2A=HBEp((q1KimH>A5 z+I3i@g;T)OT0-~?1%heo4kryCp<{Jph!Y|>d;I7WD^5kAEd<&FL12q5D5*&eh=@o) zMR3mjcK?~H*NBq5AJ6Z3?&tm(Rm`W-DS`>!@g}IlpXUKS*;RwF%%An+!p&oifXxiW z5>6hpbhTF}+cSv#p{+3ouO%1mQ5qb1Q&$UVm4EJS0A*FIQjCF+UC8GCU4hA=PN-SxPRgO!j@k3#GaTvBP9FhcepKs!{H;f(S? zoVx~gdX&U*5O6Eo@zW8pGe8$|-$u)gNohF7V)ShFgTyZJftf*IXPzv%O98o8Tdrgo z3R>zIat2ezN^CtkjV97^+*NSll#cd64}MP)W_)Bb6mUUd%YDlExzP!n=;nXv;`V=< zDk0tre(+*^Ad7V5IX=h2B6Id)Bq_@V!H=#~UlOKEX#tJVJ=+s;Qn!d%iTn3M&%uSBTuIkl4T&QUEc=jtH4hw8{E zWXK+EoJrw@Pl+?4vxYu$btE$EehGzzV*lVCSy4JR zXnqpBNTcjqg~UHsZ>+g&4F=sfkP=8onB2RCB(sGKaQivrGrM~XR3&#ag3)?NDAY1+ z;J1O&TPMwW~*FxZc> zYD3ZXB=FyEy+3;vd!j|lq?)dwgE4a3nS1C4=f@5C7?55Ya+TZK;Mq>9DE*>h6{F`4 z{bFkKSIyA3Wi`d)y%n35U{BfhHGc7~JylRI2Lz~oPLT`;==}B+OYLq@JE9lb4o6}= zL+YB-w7I`Jt0^C48l{3a2y)1kIkNy&BiEKdWtl7=LAnMq8MJIJ2s}2JLIM6~XOC6( zp^Z;^i6L0Wwso6;LjT5moCz>lqhHS|b)ZDQq|AK0oa*3!N+H;sLnlCrB?EYvum);w zJA~=w0!uj#(dj5~;_oc3RZbboCcpYb`rpasAw(^pRb%}mY6=;3FDahB0oS02+sA~DX==iJ zx-uI8YlUd`B5w}WCEPSKwnyy5qii<2nd2siQ1#}vv!Yrtko%ly^SBUG&O#c^11(?J zYb?KC1%Dq(^|WWVu!{u9G^3j2XEalwZUK&)#at^`R{F2f;6tP1B9l8ngce^O-~^dU zVukxO(`bGgTY#;fWzyD>gDR$;s9zXio)TdB@XV2TI5Y2w2c>{m;!5EkY6SiSC^xD) zxc1+y{^%VuX);H!NB4#RBfpblbvHMoq^!Ek@0jrS2lptzc4x|8d-Q4%dsB4;wnKO8-5!SDRdAbD}E&LdcR%bLc}l$K`795Yi`T$?_(9h{eFi5 zsd=+T%v?3U)(BZO*11>C3qIijm4}P9VtN`j=|Hg3)1xYXsv1W7Uv0?Gh!bK(CGY3C z3M^Fvu`6=`9<$NtS&GyqTGgJOro;g_XWOEW?-{RmcAyr1?hw@59s7syxSU+ScBBR*H0Humnj|Tb zLNotts^oE_>VWz?a{;WA)8+l^*ZTQ*q0I$W`@+Nt@=CFIGP5nWko=t#@YgdZUCS0c z)#8!ed1fQct9qp3d5Be)h}TpfEO_iQcP)|>SoeHj6YftjhFizKG&s-0@>$sDnn{R zyP(ych>tZ+ir|PB}31WlueuK`PD?>$$7|W-_e*eNltEiIT~YN zUfThMsVR~;_-nRv3_K3;nP31s%Mg4*(Rv!jB38vPTI>0Q2)-Hz@M3_pup+z@({J9) z!*)R&IALGLWj%DMtATwqLxLS5s5w*BH-tU|e6liN z0+JC9k}gBPGh&;_Z#ejUXgc&eX0=jFV%QO-b~46%<7FU(uT8#VoFuoz@T(bk!J(?f zg9Yka1ies!IkcitA!oTeRQrE0NwHEpSCr~6{MK?r7p$M~GydMlLdUhMrU-W}(Y!P9 zI1eRBGd~!)*gzx${&_F{PvA#%GxQv!SQd#^YKu%$KXq}J1haKLIERaaK{yKn1@%_! zfbXc&D86Q{kkN55k`HjF=W)cf2a7|ll@mz2nedo$GhR5`6Pp}XsoijUf~B98`ve8gE#{{r0r_8 zIcs$|yaNF>i#}UZPFLY42cwUulS{zK z?1p%z_P;Y*D*FU;)BWEyBaDlP%40N*V_BuLbh&HHZ`oM67OT@3`1N=-RlnTSRmP_d z^T!x{+{4H1Px_kTK<5bC@vv^^K02>BhcPH=clq2vYMvSH!!CI@*hOP-(vtL~A=?=T8UjPGFbZT&fA58j$OJDmD9LQ;u6jcrQ`ikN0@a~F zFX{&of|U9In;_AxExU0dbxenCO`nA2U`E`EJRsoa(Im9O>LYkbwu=>xpKu)ZISvq3^pYhxa;Ojec8JyaOIrLgc0$qPI!wp;?>V&l&0av`1+vb;;(#Quz#236 z;&*(Q*Xix06T5FAlll3ZjcBtL7L*6km_A%k?nlQosDu*=3vgkXRR~4{6&r%%*Oj|d z0X7KJHvhuTN?%jfN0&T+Vudz}x)xqyzfFOJvt1~8@ z1;k~7wa;3Bp{axoAHK?w7alq>IKwWeN7Ij|+T|&jI3Y$^4!>HY1Z5>az1r?&!Y@Z7 z2LgNWgKKSXq0V2sj?Q0LZhX@F&P|U`hHTGQT=`V~)Z0?(L^cSW6oVM_G7U$m-M9Orko)jV0FQNf$WpA}$Z~YvsZ5Bl2*6%(@bv z{a~w)-mFXGUu~H^kiCX=Rlg|_c8g=(>hu^bJDf{@2^nQs|!D$t}t{?VJbBYPpEqTUnZ*0xo6@vjgPZF@F{b_93I>ykOtyb^3 zaujf;q>meW+*>??5R0Bj+#0nBS}8WN77f)8f`#@F+D;`Fzx75maS*_@>uQbB!7>k7&(M0K`U9p4F>Cwx{0_d1U(2p);;%!qM^|mjbUfde&0JUA%+HUa?;ctF28TGgU@#7vitN?LI#xNj87%0%c z)(RX#gsf*Fq6Dq`svWYr;N_{afZ1UlhTWVW<3)cqD~rK$SyrWF$ozRC3h z`|f>ibd)@9U*e!nF4KYYVEOTu5y?di2N19)`!%K!LYjwMfVA9xI#z4%H--$x0CPbv zCyNX7sZ+50zbt}(^J3%oXzyktgi+}!{uWCYDIeZAQ~llx8ho2O#nEHeEK*dsN#S+0 zg<6yE_D6S;QGC+_q|#Mu9{e&wD|j2?6qXXmAIWUn*X%qSdJp><$*7{wTJv=cG^)%a zl@25{x?eAB;p0P)@4N4l$v+CLV^C^kXf+UY&B1bR9k|`#7u;vZ;>{Mq| z?fcUPJcr5h-ECYq7HD$3>S%SF1H`-n4H1dpx+}lF`cfdCTPXVa)G;vEL5>}y8t z%5(r0q#IET-W~NUY)%taIrkXuCY(vH*k4Wr753^(^~3h3LKUuQrxW1Qc_=)1Qrmx_ zG7Sx?WUn*N!&gN;sU3fa^E9_uHYdGP4Hm2#A(m2pDPWfV-`qKccetzV8?++Enm3p|gn22urjW;7 z!kut37x35pJbYp66@v!GMT$mss+?l>patUGkB6u>E|_jALJugg-qNzZHk|A!j$|k^ z#Pwq*4oh$#ZY0xw8j~P`BCW)kU}-IdW1h!-0`DWd1A$<3E`MKutxlmpjMcfb=Vh@j z4#R@UP*9%==(MB|;U~`V*2Y>}LWbY=NQwe!c3uIvrTlJ7sx)>6tT+u^*Ukh4`A{%G zwfPlm=LZ8D%8Ss^&}q4dW7c4g9_iqDGKUU>k;Dy`MQKcr%Av{8p8%yEPU4b9+Tmy% zE^pDOafvs2qDr()Z1{brsZHQ(v*g3F!bJ-toYW=mdZy`a2^!#J2He`h*61-v%Cje) z1QR!o{fqMaE*(p3eg~kvh2a!LrEnL}cS(H0uK%&kb@1#mjRUAwejaarF*%)8NZ!qN zM<7R8J_^{*ty6Jy1=tyK%6YMH6OY-0UyS-M8I-JN0w`senWGA82vCOVYt#7OlIV?2 zXC`p=EkgaNwRR89wCcSG8~z;jg?1)D;_1n(joErDQ#wE1#AoWASWX4ASTwzzA%oBt zNN6^EHqh2ftigGJTLu&FhdsI0q-EXcWUxi+8BioC59HAL?6R<(2zlhJ3|skahX_Y2 ztX8MQ6{A_syLl|UDII4KxwBl0x>j?D^)BmOu)3=`1e+42O_j)p)CEjHcY@ zyLp4Kjnh>Ej36r(*ecZ0@~*G`h_H!6LEIr6s66kbTTuZD9HPuw%dZ(t&;4=R#|xIh zkAJSduN@=Ro)w>Mmeviu_2%D6_2&cprl44o8*)&~C>{B;kG`)yWM__mOU|==>$EVj{u_t(D)$&P*rBtupMU|DuC4M0h~sEuU}( zFPY(?&;GMYo&d}Gj%Gn`YRf~|%88Pnp>o!4J*^V#2H{gxiX z=BOuWH+S(UH=otrF6{QeC~UtY=gN42;19G=Ehb=(N}D5j+`YzuozC(Ur<-q~5?I*d z_iG9uTAG^Q6&bi&7vq-!=K zA)LQ$&=}E08LvS4+{+XKk@?##L-{RW)f@4dD7N^aq*0y3MY9WMYD=w_DiIFsuzE8; z)6@muRm9cy2*k7(z0`KTIPuTAU~PuRVJD#j4=fw}tfw+q&IC=L*Ty|W(&~~i(v4qX zJLQhO`LXVK%Z(z4eqVEHrSzc|=Yz)?9*G?W=EP(s9UmPf3&5Vrf?pjRdY0LN)sJS>>M{$}LhHo|juFfEbu6QJzL zx))M`r1>?1=yiExK7Iw7i!~Qaayl_lJTDkZL~KI>8l8f_lmC7lL*&`8yNG3pFq?NW zK)%psne!%qM8P!>{IJUDsS^sLk6ZzrM(TyZB0d5>-oAnd?Sos$OXh+Tbb?(P5V8-- ztZrbyTQi-|!TXiPLiX2i@BrDT-I+b+$Io<j7FS9647d!1xs2!}s6^x&w@6C}OdO z&!}03(eAd7a^?2|qV^z58#@eI0Jr9N7XYF9PTf}VQ(#^vvZ9CkakMNqe%F*iqUwZb z4yPNEp|+|$9B4*P_gcHa2*m7}0p4AV?_%TY0u5X^NcVbHaU8b$ zIasDH{A7IG2guIuVp@~{MGseN%WV-);QGkkS7w{XMm~B0ot)t*-IJ$AWF&S2#>wl6 zdi+^4W@4yTe@->{a2oH9J^nD3{5+`nYML_(+?mm&kIPzrQ_7Bo|dzQy+LerLBeV58f8US1^kux5Zq9{jYVZqQI*0yCY;k*~$tOa=^? zp0(S?YJM(lp-VJhHP#|Rbh7M4$QBrXO;)N@X}=-&(<#uv*g2GJB}HtMqCWFYOO6A3 zlo87ds9_Y>Wi0I9KRp4oq9i7T+rBUXH|0{%g~by&6pT6~-e3B$1PspgoQHlUbAJ5D zS0zF8L-ykr#%`NU$EM}{)2jn7ND}yJya^o_e1bD_iBo9Ki-n9$e}}!xBo$Nv+jIOp znK>boEXY|o-=%=u874Zshv1CZF?j^611FBg61Lz>%5OeqGm-^cshkc~LWGNJd zcRs}99G=)a+PWHn>ctqw{@v_tzJr&-vM0T^D^EmAbf(VCFQrvUBpmR6FWZ2stcH57 z4UiXC3Gz8tD)18)3CEfnTn0biz$>h7fc{(ASRO>Ds6;4YANWx{LQg($CfIUjKRu%J zOBEh5Zq6~3Q-+lW)dy^qmc$o~>*paEBuXik#fg_U&^~BZZO|8!^}3e=*owcCAFPD2 z(3tkj|C0OZw%fNHJSh)nEz0;dW7tz+HzLb~M<5vXz5)jqp3{V8(uqN;bGmb#kd-f+m1Rm=uT z0HBBBw$C?a0!A_(w`1)@9f(t(fO&f0`c8HS8fcbe9oF^-h`0q1Xr>A6Xxpc|O9JSv z4XrmRb-t#Yd{D@UH)K8nz{5QM!LssQ4Ums;L6HPt3&+PF2i*6(LAx_ujEyJ9dOx}H zX%{J&YW}H-h4?Mxu#aGA!g*qeTr$6nG6lynMqIN31^ng8gj2(Q6;D2jNp&>ci%N+_6z=s6GYPg;wKK~ zG%&Q7K;HltgKQ;nZQa+HMzU&(qV?FqZ7q(^Aou8XIx}pnYL4DTGw*K{#h1XC>R{d1 zpZ?*0`vt%`8OJ*qgkj4DFx+2GuA(?MuevgrUo;!PHeJTQ+ILRsUjq{wY%$*Rpn=eR z(t~<>1q2#SnJlIzlH5U6oU0xa_hX6T3T6yMdUCxOFbplq%;c?}SN7rh)IFcj_>1!J zL1XtrEAh- zG^+k&8e__zac1^8dF@>0msmFP5;r^DRG1gRLfgiEuYfaXs<8^O^;;0Bv!0RSji5Pe zoE;B8PjyBxAQ;%wEqF71mKEf{($Ub?256g2tMRg=(F=V{Tr73uvwHRLkpBmS4 z+i0tqwT*~!91Z&)_?U@U>OC6+AFy0(M)>?ErPdWlm-={j>e72;M_&!yPMq-4|XO4v)qOvhY zk`$pn^dx>8&rM-7C+wJ9$3(Wc7n?#JGHP0(i++@$%IP9%L*9vab91viXka(bN%ww; zX$e6k9xqR*MU5y*20i$RgBfw#*iNG|uTnn)m)Nr4MB*XZ{!6h|mX9{ATFo+>K8`3r zM>>0W+wOgM4RsO@ZDf&)|F2_$kN>!4lC#2Gd}CA$7{187leRBm73EF*jFWG2(v{M> zI-k5(SjbSDR_9Zf=rhKfVEUoHc@hN_?gBlp?|!=kCfX$oo^=I_*?r((fN(hJ${e~2 z=D~WFo93=-`fLUt`uEK(98iSbsb9c#S~1!LMM1x#PwN`WoeY~^#uP#SN%QF8tK*eN zlf?BPl&X;vLWm&^VtoK~V9!?0O! zMg)gpzY-SENHVG!jC4R|n;mlU$N715<#kOQf->6^Rc(m}`Qe2-f%Z|Zfr+culwSNi zWwo7yXF7FIy`=b^v75SJW&CmJDJPG*5DtvrqRnzMFX0s9`z1@-@BA6H%JSKrs49;e zB)wtT(OC0}MTQlVYsdBKYj^2f!@xnM`vil@KLVSTisg})YyRjB$Mrd{cr+%Szy|WU z;y}r0jMR3dCs~2~;OBe&kEr$!ZiyvIE;PPb0e9Wzg6346BD@o`+cRyN)g{Hx} zKFk9lT#?`-e^Q*1gn{Sea--G0WTe1b;2eG|^^O~lT6&QKsJZUBW&#>WDW<_>K_eDq zZXz2&Bg}2IK%aFbPtduzaTKH0f5et~!*M+AH&kcL6eljxZalvB1TQ=zfd2I*Jk8D= zW=bSLRJTiqLPw2P7zVscM&)T)6qT;007=(udAS%Xw5&@9-`A3TA!Frs05e~kLO+6E zT*CfVrun(-;1zuAmh>t(gNV=_1f&f3uZ)##L^2Y)twM7!SXIoK6)ei&?!-{cJGIe| zK?Lw;Utv%yHs0J-`lC>{)sBlIh=zX4O5wZKGfK2;E{w|ttg8T~bCGql0$dy^LDLxi zc4>q88I>X+>wfTZy#ZTD_2hC|2|EFExGFtzm|qK?ot+!OMjtk(%^Z}v;Ae={QlgL4 zFl?7VUtqv`b`mbPNV5RbgGiVc1z@hjAt)`G6|;>nO`~eqb2`$Ypi;|`hBrw*b6V|l zTS#cdO~Vt~w}v^*2{BnNdt;FWI$lho*4nE!zbB@Kdc4Y=g*#2thm5%%XkOn#Wl^5^Tn`FdYJ)LTc|9fG@Fxza#AF zLC0XjW6dyj)iMDmNk9gDHim%h`Dv6)g28eZwpQ3bl&>y0aw74Eq$HV<_}Nl2i_IY+ zAA@e@$rAx9u+6A(7)F>|5>Gb)3)qDa>(%vfy&e^txFtiOpN7&=f>Ie`H<2ZZq5}8R z9#D}2qeqy{5T!|ZR*mBT`F-=G#xS-AvzLEAC5x(bNbwIyWJxcPb!;XXcP|gw60O<{ zg3RoIXhc~vJMUClcarBpCQlPj0T=QOwMU?3yrbf1@N}lzINexwuKu>aU8pOwZu#9@ zbbSL_;K0g+my-^_ikV3-MU)uu#M7t|7=;TG(5sdNRD&D<84;06liFenenGy%7Ef?p zoOqC`og^jc0jeKNp`BJHPt@_lrj6~!igL7dBwuMg&Y>cwZybkD*Jh^CW6KAyR<3|*I{`d$- z5D}RtC}u?xuY`wl``Q0$LYDiO-t!(GslKp&tx3-F zAaB+$J-V$UVe6|^-cSgkPPXr8s^Z077M+TCIBX4Y)%$$0Zl?(I6ZOkrknHm52dSdA zOgRhgWsdsM=v3q7EZG4bpeN5mL$D`IU^j|o54S;f2yCwCat6@bW{&(Z!-u3 z?miJ*7%Sn;y2W`pwjd1XiNVsn2&Ogu%c9;ZC$8LEM#a5t#NXrHWlDhQn%V@cJPs8! z|ADIR02@0c}GDEM^&V z0J2iuh@M9MsBRq9rFi&&_`mvrCQzy;vnm|0L_3h#@4u5Eo-K>fW{1?!T7VV%ohtW< z*AVFo9&B269;~isH{J-tN79(HHUY#0il<{sV+r3q9g9X$7?))U;)By)W>0uFk|A}i zjO-2*Ucqa8+!0pWD%4vC2vYzVY;+qE)J^knB0e9W3t1o6$?$>)hPezo(`_&cp=xQi z9b^5;7=?e^Ybm7F>r2Wo51?Q;ef1uLr3=!jelqo9OZS6e4ITBWo1?~cjyCmeL?w2S ztKFRA5i5A2+s?fac1MPVrfL!bm@dLnNu3w@Eg+z{iju3BK80s_y{o3y`0m7CqBX_h_8tx%dqW=USa6lUV~Y~pLM~tZ&m=f zvz?BS#1iIGa$+_Zrdwr?a2gS11XRaKs89`(MRZuzzTqX5Y>I;2IxMukNk)48CAHoc zPjo_`^(^Ct(@xw{0&6bhHl@d{q|!pG=dTz11ovz7cS3!OSkae&!poxetyv3@nujA- zbs?<6x;qCxK8!JlvsV)*$j7v^Y87l?=j#$sKd{5-Pa!67!sP z46~gB%doQHjrnhkGtlDy;~yx_@$gf-bR0%g9DDyyChD*fs9MzMO}6Z@HR>Hlh76dl zMe3d_s_I*t!!*cNskDqiDQ)@Ue5swCez|>69{hVW42;Q-kl+*yf-i&TJ}r&K=Ak66 zRIr8n_;KGVLHpDJBex-@t_II5Mcw9SyfMGjXqV0Ra;epAUr11F0oc1RpE-Z1dr1%brx) zD8psC6PMYdnG#(En=TcZ)(7^PaGS3PRr_aSh1K2w-L~%n_OQ7lFDv@{g&BoS(+=8y6%5dN1}f2MxO!tUSr2u!rkaxYQ|m+oOd*xYiqWN zj>J}CIX(i!YrjP?oOEE{vICHbtoNnpX)_wYvQ--m(a%eV0CH150C;Czwl7Wi;e?&+ zhN2{-K@8$c6yD}QZPo*Zm5G?!xk9G#5g6p_~2vsl`4qcnGFTZv!Q4NJ{jf*RlD6>WLD0Nb}GWoxm|MxP*iD(7Mle+ltsmhzO< z{iS*qoV1;)Z_xE4C3*jmxXmqixxDLdH=?=blvI1h^WaGXU!lqbjpDcFl?JkZrnp+K zkTR-p6u{bkhhCWj*Pnc!B(ck^ORt3F>Y3RKBVq5l6W)Q0E+%jway4NMO@eq5EK(=| z@ZxwNB@imPxUKt#Q1~C-Mk(D|_2=dg));5};Tg4;Hd%sI2%mw_Q6@jd8fduH<{_iL>PVc?o?S{YV~*~gWSRX@;sn(#6*ESMMHNmgIaV0Qn@^tlC*?nme)*v*?AH(SeAAHw1CJEOTx`##VS5?3(ZN-u~RaUsKYz&k<+ zUBkxLU3pU+a#Cxw_;%dE2T(VDus+cfB&$J|TRft=Q)TMAkwnzo+;|qQE3l<2E>gV; z(|?{IDkcU)A*Wb7CRuDj*Bob@1XhsK^VcF7I5reNw_(5WKhbk=@ z`iWRwQ*u1YMZ&cS3#N30>4qD;xKfvo)B{oyxBPzDcod7+uWlPp(lXAOa`2U4G4Zdg zNARR7#lM_`NVJ#Y(Ruc~FmJ(a(wJbP-~|A0lTAJ^32hdMM|P z+PXCMzGcHxfF;iP5fR7^l0|@99|9Ww;aIN#LfM9RESs*>TPAC&&`>Voy*iF*E)n+6fw{Cg*CYzS9A*rs=Edu;^k!l7$7}ECjF(x0R-|dLw zegg0&*Vty?oy|hNXTQ|Jfzd37RC;Bd10-^IzNO|2m5q}0Tv$^zpP$uK1;m#T4g2}M zFn5JK*;T%za7!h)h20}j{svg=keHymMRIRKKI)wCvHNh8%fBr|c>M-hs*+CqqBL(AC{+X;Ub0L(M5ht>etmaKJbFRntmEUVhQ zV?M3g3))QBYJWnW)M_YCyt0$9FWo6pPpv@D6Bz_gX7j3l0e}whKTUU&BN6-oaPxkp z1Q}>?tSw}ByxL;Op^DfRP$U@M%v-qY4cp1y*u< zzsLwy&DQ5-j`Igzn+742p9km9aiuI*yQ(L~Gm5Cl~M(G%2sH6~90e*rb6 zR(owopk0wO_M1aSnEw104DF6Y{aNZ9eJ7Zmp0@9dc;lOoNY=0EH??R)_Z~?V0GLob zZq!QWXu+}+9%QYh1VUf&LIbVtWVt^V%n6T609Y!+SFAoO|Q1Yay+ zC8A@FL=FLK?Fkb;!WB0>Z11D}(L&Rb{t2hx-^qF>Ekvw)X`dSON`n~^-&81sB?57p z3UtE6E_~mFow)Pq;-T1{Kpyac^y>q=FY%3_uXF_=RzC456BKJL@nt{!@&00Cn< zjks`W`(Q!s+pqKH;=n^60{k@YslZbeAQ=i<>sv^@CkDpOyi;lSLD!R`+n#%$;Y6Bp z+bJt~Q1iutQUFV!hX_omA7lR_4_a`#m4~lX!y9*fvb)dUCyOMvOc5xaf6?-{?g;{4 zjF>|YWvfKCTdL$wF>Q# z)WEs(RkMrUW4k;tM)6eaN%`YHIJ&^SjX1XPx~D!glvHcXL0gjaDHC6 zUg0XCmEsYE%ACKR<=W!UU%}y=XX%MjzFLu+S8qJ6+a7{FtMPaOR?tU(cLatLg*idQ zolt@k!F@h7y(rqK!U1mZZ59qDqrj&v55ihQ$)y%X;Z_?uFov$#Y?{AE?~^m{DS=nS zSJ)xn|4ok)Gj9kypY6jwtSw2>EB;6bU6{axyCqmoz9_Er(?9&H>JTD#TH<`|`<>@N zv&6WsI?u`L1Vm-dYdpk?cAy0lrbiBfFNPE2?VvEM=nKY7a5pdqul&?{hF^ikvsAh< z$2YR@bW@4?1!b+~5p+;{kI>L(($+l+FSeEL`k|!pCZV?J4MNR^x|Oh1FC<$~cVXDVjQeJK}Rs$ephLSM5eKHqooy zgHpBMQlfCYd#&Ca+A*%%GWxUeYU^j=;Q+L68gf}mAo)D+Y3Q|gg0uU3Ul<59+g=tA zzD@*xjVrYY(N1g@sb?Dq#q%LsHJUaC9QJv!Tez(Y;JVE~%>d-i8xy;Z`!I53j#;4X zi%bX}=jXxhu#Cf3-2mao zt&ZSux5AklhI<%TAf3ub<^gXYa(&F6(CF7ZLWEp5R3kFO{5cWd+^%9 zbLtVU_oj|jWkr4fV<_}|nv}A@>yrmyX`w&B@KAsiKW|alaS0YF>->l99{fUWL*^nn zuHb5<8EK(652#v@OJCG6&DCv(V(L)TLml@yFf0gQr@gYh!cT!NWsDVv&$OF2Shq8- zBnUfLH_ug_6^tqk*X;x1!Agwqel-Z@JZ4=m0><7y)sSuZzluZ)ASh!@AZ+Y?P$q5^ z*A%_#?Kp@I%(?HDamGD?c9n%_=`$285Byh0vEE#{6Ujhquw*VM40V0_L_Dn{fuS1$ z#SFf1jHQ3mKg!DMfB?&4ttnh@8^T*JvEIk-9YVD$mabs$BnmK(7+>ePa!ML~EYLJxj#?AoztfwjTBKDn|_ zG|_Q@RNN44n>TFT=O=ausIw%}jU1zH&b7Nh^98*_?ZkZvA$Rj*w_8_Tdd%Ib8=9Cyj3ohruUw}L8LrbIRA4b+ke7Ti+ z_S>fGQXl6yPZIhW+ZP)rD;9$8I{9R;sAKn>xUAoI!&iR4mR9P?I5#8c0_4 zy^R3@I}fXxeJA_w2d-J`8|URLswp+R_r?Eo!aRo2|BRtp7oA**!;?WpoJjgV`Zi#U zo;)rX1eJ*!$cX*X&Yhm5*B0s3uy6CC1%5#)n(_H;UuuKbq2HpW0XAR2DsKU(LB8?B z+BZo%_|Kqmq@;?b&l1sng9Qp%^w$IyLth&{od>mq(fBR<5S+BfayPYEhGra};+5P6 z#zuSwW7XS`ZJePcAgDc*jJEH^urP|f?r}_8*tQo9<-Z6dd z1b~fs@7lcpfDcJ_3>%RNPftcBjFD8S&+fncP>T*uKYQPHUbZ5qtV!S1PvL+obu(i- zxc)mZa|MlxQ}8ub?M_;BLqeutnk;cc#Y);%8Ynlam%&(<-ue@XV~jxRw^cO}niiAd z09Bg5HBOlHDe=6jQ_Hps9&ZnZgXpyM7@RzCVm2Pf3%zvW07=^qmkN>KOcnQ6{(+%!)YSV3@DHeF z{vqH)g4^Nl=s|jr#uBA5ehpgP!s@BZp3tuWxB0-pddBgC$qG#s`)m}K9um@2Xs_1w zC*qKB6QG%sS}*0`SHG-vudKD-1@NBYd@CiHoksroy`GO{17R`=s6-^(GRZfhE#kS6 zW)h$z80#&zRh!v^vZr;yzZ6S=2(vfwv|c|N7@9_2t(>p_$v8>!Lna#27!F+gQR;&{ zM75Mrfb3bxa>qi-9-NsA4FPX@peZ6IUAz#E_hrc4e4d)N^ZM2`ARe8QuEqK#2a52p zxw)}gHvB__uH$xO_QSN7#PQ9w_&91g>GMLCUpfSP54C;(iPq5lvJ~^<#%?U;;_~dw zG2HDfUa7Ypx2Ogn)xM`!yV~6ppU!%Q?;*p>hZKaY4krHDbh0}f);(AEu(gtBj9@8| z<%y2Ng`-GQK0spPpK?0F>f(r8+&5a=Rd9~6j)(K*WX3JiM91gADxVp=U!$;n(g)vz zvH`+x{HFe<@rnl5VcU$m!_MSx2Grrm;Ix2Xdel<6Xi(!ho)U3KcGdi?wu|KFS0HxU z>^3uW&|AeQJ27hv1D$1t`NhFOr1s%9#~HMeJhIx~KAfO2m&cO38&>?P!6>e`*#I9F ze~~)D)28KEREd+MIEF2!v;k_!Ri>sI_M85@2B^*O;m6*dEq}~G*eEb}iR|^zgSAI= zA)gN2iR6x^;AOaRv3_{8ND`6nT8C8aHFJf1cxaX4;z84QyBLeGF~jM+J?=z2O{3$@ zPt_|Af4Bp5ymfWz@SSwJAM;Y`+otZvUxEMA0g?N{l&+FP{?8}hASjMIEWu&Ha0_ZW z`8ygRYhqnSQ4!vNCyoW%3R?oE7O5kraDK*p75mZC(NlKjX)PMig5Gctu2x-JiK4mB zE*Tztg9s`GMe*n}HFK4Ap0AH)ndOhWVZ6n-R#U`Ef&7Gc5c7)X?YYbU*zMHrYYNa} zmg!s=)n?8`L+|A=M8=-{VgOe`dVnRKMRCkI83|ZZm5`s=*a5O$9H5d*53FV&+OoR1 zoNA1N)NPOXsVoxPwGZ5a?)F7Jodkeg?!97s#|l_((5v?NZr7M>88ib6K05pGWvl;2 zR2^J%Z*CR=td0cr(_Fr4n?N7SaMsGs1mU7FJCGu1rsWcKgU808bc4ka0NlDX1@a)G zMp-Evg?ChROA7B&IMs}e8-Mi<+>8W(Y~3u9&V#s|eI!{W{JQ=Q0QFUCu?Ed2g%HI+ zFS>{N&B)M{*&Gx`4VlhIfOdpEp9mg7bI#B5?$T{P5iO@w ze<%Np8y}#C!LnX24_Z>bKL|(yc%JfuTr5+{xdXca_2T-}^2dEiAWIyE6%r}S!QzP&V`YHY6OuWye%E_|#{tvwA<`vsXMn~{f2p$XrkGP-;<>i$*^~I^PiDnJC=H|A zuBhizXLT}6R+}&*N|q#mMdI%<;Kk`n5JGuVA!>o)*&F<22W>BuDvdCTP5OadwQr}~ z*6}O9v&^s$F&yW>Mz+K~D7+IBy$Pp~#I9l$1~kER(!|*?7$XtjcW%h9;vn>^^qNCQ z4JGk(Q7vCEjKx$k(X!kH8TeT?VCwYfzSt|df^W_RGOZYl`Cd`1@Uw8BIe*}0FDv@q zHGgbn9; zftUAGY^N|zZY}1hA7wh>Ru9rZ^W$rVFJXA0#0r67c%YHHWelu2x6a@d)72C*N*JyW zy3~{T5TS}r2ll}XlwWXR_+BBQ3|{2#(I?rQY;I^H8c0t8le%DExLyWZvmvf6ymud7 z29_jYYqnv-hN8c(n*H@LvxilK1Ig(R8xr8>Jkc3hAUG`H9kHkzk`;qH@#>@=%eVOR zVYyGX9+n6q0ixYl7&mpgaFj|Mc_S1xi~dLgdEgW-ij6u~-4IuA&O^pz@B4>dt+`A; zxclC2S{FuRvT%cp@BcLaU*&}^fQ)dm!T(iD3C@nB@=I)=_1`DRLA>g$x9vs zVHM$qOBNjCc#*>WjX1Mw0d!Ekf;k?_po71xJdc9|NgHFCj@RS?B~tJQ!O8H!z{b;% zjwys9p3+76mM`idYb8QWvKUPZV`Ce(lnxMp=PF&RHVLpbr=XMnphku!NYf{11OQk# z56W(-(D$~Ez@HD^}z42)m3~X3O;fD~28^_>5sYQ35%tl1Lq3I#%th%kQf*(GN zyKrH&wv}w(6WvxTX$Nwoo|f3X2JGP!ABEMHR-i5oOIB_3*y`a>i>eHxjq@Mv48q1j z-FDFaf`A4zI6LM|a=5g}*Ld)zu;ZTie3ke#eE|I+l6}fkNpwyGZ+6Rl}UmoS^_pi{V`Q!t^Mf zik#!zaH9eghrDKk7L_`-1CLc29x=#ySIV$YiGg-E zF4GL%%sD`o!OU?3t**>vSRTRTu?C&#q+nEM486;5=-Ke-Vi~5CWOBJ_B8*Kx+HMChX~?6CV;Z&2lD;6>v?ivQ(Pnpm#Ok z?x@-E6o)}EA~Qe@)IsK7qYd&6_D zlNWei19SMr#>;m>eoZcKb1t7YegonMb7)SP3C~EE5T91Ab}lHwMd!QWZ719?e%2nA zUx2I`X4`(>ErXjFf;HXHz0_N7NbH#t`g_V)^3v0P7dJ2@{(8)#)N5U#LzzSFHfXAX z`qu`20{tDST)*ZMA~VQ-+l51rj_)K zETpJenKPMF*p!JUc(CrJ3h;oP->=}8u$1^cg5jYxt6N_W_&J8*7Ts(-Mz=SiA0aUm z%*NT30cJ-y(%c!)zD>nmVR=Hqr;vS-M)CSF#1vSi_Ha_Ho~AX!L8Gs8`6$jXKS~zk zAFkV(qs#C^c5yF$As#lh1AQiSxQGgS0<@&6=M|+ZSMEE`C}|LJ^HaOJ0noKDN9=#vQPIXQ>Qp^zzJnDsxJFglo;}fxn9<$L6WJlgR#J=*M?*H1_WLF8v!=@h?efPdm4*H*sX5SD4w0SSj)6z~k0 z!>bfs!17QzZj6KLY1^Spq_91IitiSjvC&qf`aV{;#nN!2Sv~NIKo0%;!eRq^+bBj+ zM~=C)eFOKI+=<@XqJb)nmr%WCZLS}z!mJizDEHaln-Qu6g^a{`2j_~H!p0s9CtGu$ zm0Y^`a!imLV7sfr7H$CJlhp3lui8YAm*GU;Wmq1RqTdnmclg%q3;05h#(n>vq<0T! z^32x7-w@;ji;33sG|?nrVGOi0%I&lXB#_i20n~7D`kUEqMyX*0JXQ%og-{@Yv38so zKyYhm3~@t5XGX0vUSJf3S_pKu1c5D9Q6otW0TGb^is<~F?DJo*NWQ$^de?f^bHUzF z@VHXOZ&Jfga@eofto*!NfS$8e%)C>(vekRfYh2It8G$CRcYsR|-DScjl1%`dO+{EB zD5uP1nQ>}75qFK*X*hVdpN>jNuQkP{4{Py7541?mD3XO>&jkL4PUos^tE(^cCR;6p zbIXX+&%TdL+ zWWBP--CxfBb-j%Tq2VeB49e1$f!G;o&=4nz;f9Jd<_VR&Hlrrof88UM47h;IxnZ0hNi z0ieE?@87G*P!b28e9;#zV&$p4SXe_qw`$|02|M`>5e_g-SLYW5&`-`-vIyA892AI( zYc=EWhwUEkV?IN^nX6`IwyDFVGXc;D{}<7o-=%roAJ#qALP2Q&x@UMqr0FXy;}2in zH5Yh;c@FaYov-^{zB*&oI@}f^S02lSA=h(hchQ>hul22C6mwjlk}y9*;$?A+`YM7f zeBO&+cs*kd3(5ETw*E90tDmy6{=m>=VR3AC)#}sxKr-Ani*{tJ{3uyP@$!;ffVt_` zDOEkzfN@WLcQ>S281v|w+S&#*K3FH^R{#4IG^qJSp)~pVJ>d1ftQkI}7$i#*qn3m{ zZsE`Y7cI1Kv!;_Y%JXQKoDOqWuGnZV!#tq8;@;2^0*P|8b!Vod8FaCw1@alQPcE78 z#9zGn$1X2}jQQAFewJ$9iVvJ4k(A~MdvTG^jn^yU?6AvU9O<@QFuy4K497;W(~o(> z0Z-AAH+gY;0k2o3zfA8c0#){KP!;xWqOmX+b!rKG!{3BCFe}lKXnjnGuPoXWW8#pN@=Xzx8pvYzbx1d^4(y>m_ItI~(l(T*{xJ+4F1=pa?MxXN1G z8#dPR3hV>X;&N=41dbZ%Vv%@m&de`J*aCAG3mseqO!Ea{2( z^>#0F$7s^-s5Vf*NA{YbCqa*K?{KSE@SFEr<{r5Cam;&BJUNa>u--E-g1yeDy%6Yb zPBZy)LU)mMD+PIUpzBq?{V4lQ7)>M;XG|Gd zv3aW1kLX`lW1pR$^fa##?gMPcrSK4L!;e~;JY5eR=nNuS_lh9;^~*25<(p+{!T9G+ zEX9MM8RxfG_Mvnxwm+9mR3;`Si9#ToN!PpRXPSf^BAm-`Nw*{BAmq{1zH!ey3{=Nr zr=$TD)RpG5VM2Bf2d{m}6Kh*=mk5jsg|t^T(J2Vti~JH2T^6&<_Rjy9aJEy;1Hdx{ zLq6KTXTFt__31r4hL@}u_f5g8+AGmieoX7g1PguRw0{3Hm6ZPOgE)034*`>C8vdD; z1M|rox@de5kk1j-z<4JWaz=0&B+o()wI5&M$3U*3c* zQ0)=GkOOm9u*2NHs~~QsNhpxqIEI@-`|N=!V!hCCbFxI!N001H1`XKGZ#B~?p{oHZ zZc$eeuN5Sf{T#u3`o*UnoV3U*t6F>fsv#5t>L1|X_|>-JT&rBqvyXvs@)Ik(RfN7=!{Kf(+FitO6kmck6*t8Z%UbKn90b_(kAO zepMe4i7rWkde|B_bncPik$vq z`*~2=Z464;=z%6CvkJDG!ZH>|7pj6o~Zg1O3MDyw0R^Mfi?^s`Y^UP!)7748} zdwF=aGp@70AZrDu$KBF|Of=5pDa?~@2)F_`JOG`3Mb@)y7(-JqQXbU526r0bTBYgD z60*9;ooqxI!*|2RmI^!ICFCQYzb5{A!hy^`HGDvp;|4js;9Vey5Kh?PoI>8Mb=|FS zo*#Nyn#kdaS*{zBfT++;UGhC_y@3r#PxD#<;7h-T!3;0tH~}lmPvcA;^6oo34A(&Q zG|o@oJ9b&!V@7=yxDWlmhY?Y}IQ|+6@w41nQF{z&_#?XFjN~!3?xB|X11%+i{&E%@ zhRROC2mnY*)X8!3Z$Q5=qU+n|!h-F2kqczd)^-PQX2TzHEWY!AQCxJ>gVQh$qMOGz z)PCnp8X(~H#=Bi2Y_76RsiB7=Z2}ANRsMO}w1_f5OCG?@FzWLG0OD#fswa}&KsVR! zxuY)`(Y|-6wl^`4nHJgQb#^p0K5w#>r;x1$rMTm2k1zq9<~yfgn^cq35mE(e0wQbW ziM>ykb)%HRkLt_PT-p!W`9FyZo48t8&p4jCA6>s!Vr4Zw6W2bnkd>)ymNL{uK>)&C zrY%?>+|llU#=m#iRBqw$4q{#SV9I0ZWo8ti=YqbWK+aOew|W3GlYB+$g_DW0&)Y%& zQ1wu}*u{!wF_1JfzjdY~Cg|!T9%NVtT&t_6J8Y>I63!7^htZCos^&;EuE*69TF0c+ zMdvL+%evcM+-ThIfPP=dxOqgQJd}Ap^y7ywC8(WT6glDerk-D>iUg2xWu7Na?*f0= zRf7w@W20+q3HJim1nmm(Bd-cd^VHK2n4x0`CKb}&JahJufM84xp*213`y?%@cm@Ly zOe}hfSp2fj-B_nL;AK&W-heVCA!F+WfSWIpYgNikY$r|1V2f$DR?W=dql=$LjHQuw6qd8zkzAGh`Eo?`!R@F&~# z4SOsksN_n_Hywa3tv?pE)7u0$@YQ9=wpLbY(56Fa>blTL8Q57444UoMw(w`k^EM9cOi~cr8*e*F!915ZIQm)Z6f7|e~TMW0X&`*Rh z`|o$46dDv@sR@E0ZIcHbjNzXUc@6hV7@FHU*ku-3D6Wg`+@W0|6ZJefMu+Oe$`1Ov zan#@Hiydl^OV&PN+j^0F!R{^gG&3TkXs&!co+mY>w60<-Pc&}fbx(1okgbrWl>E5u8P@l3rC^u12G>aq(?%D|U{LLxKHJ+3# z68UKv+IO$zc+B?5j$X*k7ysHda0Mw-;(~~{`96Bp?{4l9H|*SKX$QS_r#~YFjlMjA7Lw;GCKX2}0rJ>Rn$@rdQCTtnP>12mQ*< zNd1CWhPpA(#EdD&mCvp0G908H(uGb952utP{VbPuwRmB3KvF^-bZY^ zFXn)@w>!R5~$FHqHy$1W)wZCx{3RJpRJ99%SiH% z*+y=*t{WHSIf-h6aTYg>?+fNB>e8B0ls48rI2{IRs=lwqotFMqEw!Fn1TlH?-9iv{ z;DX_nzrvm?vNsoyWyb43VAWgv6cNm`P==}duplN()`{Y7D81D8s-epZ%y0pp18Kgg zgK!rki>?2N_WcBDd|kE{E0;y#_E~WJVL~~H7JaLPawP*%3AbjJoj`mhHif6>)1*)3 z-V6pe)?&12n)P~KoChlf){)eZ7=9bUOvv0B4My5%7*$gbySl#gS1L4OuJZ{gxKMe2 zuZULAoxOH8-w*2zMzNLCNEk93j>*d+s!AgT@gZM8D`4*2145Y4Jp<~29kYB?RFve5 zTyY$pZGaI4J6X!>Q;UozY*}#Y& z6L!*hMYwf}Fjcw}0cbfEGlWe-+eUg{8i_qpG{2Zib=4C{FmY=%pHf*<9hh5 z^(20RIB-X`ashwQ{ozB;yQxBlh44>=D+<(ub7q?QS!5EE;{Y|%5o-9x=4W`>VFmv1 z>hC%&6z-7RK-L&^EM;@UvxVz;{fr0R*4_A_tN{T48pf-I@Zp^5mf2fmwWH>H$K_wA zSHwaTD@jy!pK?&oo@4)O)rbVAq#)E$flgAe=o3>i0mdz$DQIMWvyW`06lx;DF6aJ* z@YpOz5W2$6HA6R=HR%JFOK_bv70%ahd-<*d1Ek_+bBMXAE1%%@Ut$$G;s>wh=O;4L z(6cTVbqALpfK#1#8Ab=1GcYGaaN0U^>PUpDOT@><=<(He<3^5|{@kd$+eZ{kxc1er z(-v{FFkwgqq*8WMeTDS-_;s^0=ljh!vyEIwsAsN|#7rGnlS(L^02UIIBnX?VbIkvWzm`5H-zsxsJt1Us7Q%u9b52g+u%Br4 zeNr`m(JZjyw@S)guZV<~dIs&OOweEbyl5WN1~ywKMkgk{P2N)ULWMo;c%?)khoX;$ z1i@*=!Gc05U|2OUW8Ui^m@Ee_Cv3@dGW@Vi7_e|Aygdzv#m&J>Z!Da+Q|qbRy`}XtaFa^gy%3?W03gyX`dYmw4lCP;Txwr|#Tn2Tfv55|#k+a#>89 zrtG(7`pI7ArBx_kDVaH!$f|K{3@ie}O>rT3gJzhr;8j{|sDm zk?fa*L2NvID7wb*5cwU-{;D$gubKH8>^(yC+Vw$SBR0&Pj3dvB&A=wFM0xKhYC^+S zDH|RSlA@8i^O?`pdj23WPQMA8`5s@k%B1Qu?g2fh7sS`=1v=atEo zAdX$G1xN1rn>_X17nlM5MgAHtL@hWyv0>ruMe?pBE=>}NmWEeW2P@#+KLJD+2QpLq z5o6^aUzg^iI}%YIEpu5&u%qF}V<)>7k<}4lZg9@1^FIUPT+}@pHsj@U4V?|gwX({c zQu;M|;%ko8m3>+0E5~0gzoS?6f#B0DnKHTBet}>}0~$}%TrCEQEIq6+ahoqDuJezR ztsiKY_aEZ{hFe)7IZXFPSM+!~xrJzSEpc?VZ;m9F*2vA7vPeXtpsi3@Sx;Iu8zE*L zi;#>8LlTY>Qrq>^N7U0~%fy_~6&~k&i@jP_O)b24Doq*AD;G^g_VV81Mz%E#Stmz+gSmmbuN`#`H*Q3t7f~eP>T2Pf7$r1&Y2;WK{aobJ zTv~h1jTmhq;im%FtV%t*WL^I~Vz-;8M(s~jOuZ|p~0mjW2h74|&54+HuFM3=wcZ)kU@Bv;B$sgmtX$sL9 z9>Elnrm7Zk(-b4gJr3Gw#9IV&7yHrJ+)}uwxjR03bKJMm z=3w*%6z?DgfLN!#urV$*cX*RqxEym)BWbOzJim?qve z4W9Q#db{5-4%a7{MRfV}oHJIx$^*NUdJosDj*c^^JDQb3PJ3^$+4o)&)qH9b;U{}+ zg0o~%(o?zO?Zp#qt&^A_^d9$22{D24)ip9FCK={BIkeXtUZNyCwg9oDKIl(v zO7H_9(e&{M4xg%}pSKVnE$;i-MwH`-B4Ab$u$ibEtiT)tF3PNB2*7E>3QKuDM)|V` z#ESDl=knsRafid^*s4Jo%7zIX?#Lncc#5zC9#FndqYs7Al80GeVAooHR)7J|52Kl| zz3FWoxwhR9ha!w=#;bTs+5EN`Gov#3B>?3Ise3+`1>qngJa1CQdx`mmaS@1ljeB7< zzJ-1l#r&K%Lk+eVF(-f|S`&Ue)*`|`n9pF~m3erKGYN-$fQKG0?zV1F-|vl|fk9D2 z?-3+daIsP}`_4{oFcsl6N@CL(LcG)YsULtVN4<)<0gmt+TGIk%a$<;{X$+L&D<=Aq z+opm<%-N~3DnyvKQL z<-2A4kOHcq*UYoN`MKot9(E_^`-XC)6E{9{27ylpQs@>4hHuyg(+W!Aqq4fFHxI7s z_;j}5sYLbAYAqw~wlx7l53insMZY4nPt~Vd{j~4b&izdVv`FX>7zWASgvTjZxbeS- znwnb%+EzqVB)-D}>4eNGPV>v?CUa8L+hBuOtn-*CS2@Z2qq*i2Bf;z$Sw6}LOEr3{Pq?t~P zI`{HDWZAKahh7FJ9sWThh6Ui7@@=Q`G}+k>ykCFPstsXuD5~g&Q+3RB_wjE(HheP^ zV|0a70)4%^@X}&i*9BG?BQ`%FmMH*mc{N!~0wiG_UGbTk9a`4M!&zs##Fwbj4SkB=B{QK(Mitu2OoV02O++a^q;2;)M?Pb9EQqMo7N=8AwD?JFEL6gc@s8< zKY{tJxFJ86{&SBQ+kd@@<}8`oP1qClqDB?(>=9MI|+H^P}-3m)rnQ zDPe@!xVZBhe{sDT#)LOOccceWq5DS+65R|P5T&|;8oF2K`o|2L6N9g&=uKF#w(NO) zTNv3bzh$ht8I!N>2~DHbTwD`tI#mk`9_JK_jy1Eo+(M_$V^{Q+FK^?15s!)I#|dY3 zi8{H)76Qx?#`EvJ$)iGoy!W!;MdN8;d3we|ze5aH0wFk&@eag-)4z)|MZ1&a{~Q1X zV$#!imW03l>Ao(h$g3WsdB#)Z=Tpl(LH9`@$zwtDrdt(|k*v8`^z;gGF z977`RK|$>;U*vBsR<(0NuCN;$fT5=TEE~rN(-nWQKy3hC4UT@v8J?pa)+dKxZT z4NseND;zoKFVpW=#Mcp}0o@hYo%=D*j^8(Vh_PRfRpL?!3J{tI-pkdsLbOD1qYiVN zPa!Fp_Clx{5*%LAbR`!Xg@2KAT9!|=p`@@;04-JOk9B2RffrA`t;^oehQ>Uz-^-*g z8*D2ADiDNg5JSV5#$M9DUHkrcgGSMUbHV|WgDEiKHETG#hWQx5^zai|WxIjTe0rpA z&O4GNxnf%2G$!>c89^mX??I7MJX)bX9Z_w|^OU6?k2Jv_6rhykhJ- zJK6O4vKL@ok09a1+V6O|Sdnw+@NT_D;zV>#cN6l^&?!VK#(Oaj`A)VsinCNz z7=3Fxcws$3+u$ocZMfKK1xqhV)~GMoM5x+<^D(L(m_U(CUdw&`;`L9xHUHnxbeO9@0;WIn-T`N*g;g1f)gKC*!HOEP9l#wo766y%Tm&%K7 z*g_I$HcfBvNm@J@~Uh(uTa@)U>k7OJUvP z-Ej8}%%QhVq==j@u@l;;9QWiMo#`gfLsG;}$TZcpxQ>drSYX+LL6W=Jyh_e3Sxll5 zHO~I5BN6_hQSj)4QPAIvrV&>>xL59@GKF*KFN*$HVvgd0?-l?6?@Ljegtw9eO)ec%7n2teLf zr~41r$q6YvJGdwG%C?>Uaric9NgW^Qm&R6ZdT`PH`{HiAL!ws&C25I?pFR(xeOwW% zG+jg7mZSMg5HR-n?MFiup^!N+v{f4ZP^D;TtWe0uwa_1k+If=PuJ@9O6Z_O(b3L|! zvupn=x;h_EswB{LX%(+Gs&o?x?+FH{o38?v!3HNvCN`ST9}Jj7;22ihrv?kjWa`+QE^ywEgO#3H4|yeaAUz%)E)3p#T`XFUp(Ig61HoqRvUD|DQF zD#_(xMS1BLBrJF@HQj_3$Vcy}`dNlOmp0jhT@Ff1U&b7OkKY+WuaX+g8jF$-;YpJRRMi*6O zc}xn_LDj<5el-BMaJJ(RhMverL5HcMHWB*4WuwBF9FraF9G|_MW`1nA?Hni&6NmewzrdHx>C;z%KjVUMx4^?+{_;r^)aGw_}5lF`A=7-^Xix7M1(St=piJ0#EeaQ6&*;05SJXb1umT}K5$b+? zL(4C%5n>1b6R)^akdE$6F+A7EWeCl_d}yFIym}Kxi<0EYTj2fT?m_4X|9ApM)`BE7 z67RO`3mXOVmEH_@ma2?gvPZL(+Srh`o^VyeulpimVQG!T$Aq;%_g5>= z`)FAelz5rU?#wPNGL!73i2-xwpOdlRXcvvAa?kj}!#P-a#ndKxQhNg)_fkLGvCxRO zzyA0(LLg|HMWM*jgMMsb1#-U5B-;z_ddx6cwotyV8tl|nkDC1|m~%BR46|C zs&u}OQ1PvRvb3isA!M-0Y`hD1^h=9}BT$^kFPHm9@)VS2rS>j(Q*#y1LdGIDaYPAK zF|b7p5pf4v6EX1qG$CEm6Kc<*-7_=?&x{`;WYJd&g4aZdG~rcf4!er{j^%~uy`D>= zj;C0yj0%M|*fw-U-c`$=#1){fBvR_}TXDR^h#%5?{yf$4F3n?4#|#{REx8w*ioTd5 z;^Aby`tC0;9jrgUla-T;36`^~B6+T7q}D*K)2=7sqM6)K3+xX))PrL0e%tNI*2W?@>+ zRZHp11}@QC00jys8g_ylPx}Q@uJ>WXf9IGs&IMLOXr-|lHbz+VFi$=?c78WN1)+^2 znzdNP*{0z%pU83DtK0Vt&1l{Fj5(J}!wb;eb#q~*j9;*^wkuD>gA9;_45k%(tqhrp zi~GC5Jr7Dzl+2i^?5UgTCFvbLF<-uLE_hRFJlad&7Lm6lcDRrtl$^w;62NFDXD&uMsAyj$Uu zT~rET0vphrF%&B=B~)+{Pk%W5iHW zRtJ3#oQD_I6L6nzz4NFUYdyVMCv~Ca@>uum+z2Q;921tV8(^U0P@D%PT)(uSJTQa% zZnO!Vtf`J>jl-M^zVwGk5(D+N#$cRn8o!&BMr$~gCj=}_H%=#eG1?_-DbvF`vR4$N z0uN;1Q2bX|Azcr{FNBhE_3mx-vAYWLihkuaOT)%jZNQ-5hyg^JZW49kwqq|yM#p<% z2zN*IUMDEnw|w_aN-2-WLUaR2^zcUxO|8C<{?+h{=!P#dN}1cREc(hC9H_WclAER!nc82Y@ELT z+(M5bk;A#;V9G?^EnN~e!(BlbpU%X8Ut9G89Q}m!(eOw;h;0#!(yO-(OiIvdUi1`Z zxPr}9xG_uD^NQFuwbV~8v}{!fB?mE{>X&H)Cf)Lkr1mG;_s?@W!|Vn*(67p079>*r zMt_Ms{=q1rmP|7|y_cZq%gtlrIZ8wDpk!P%n6d z{Of}ZRW;IK0=k&n6JmuF=y^OQVxwqS&=N*eG4RDcRS&{vs3E;dAz1631R}MJ1RV5; z($Y3C*(3^hxW;H#Hb0g!xJE6ZA6f6;D58uG;fOZyaGO7YBU_-y#hFEc?WMs+e&cCU zGjJMenvjkKT5}=!$XVgd@*1({E`nZk;Bh&JBf#w;j(p|DAc(t5dY?mc$O^2aLC) ziR<8gLcde3JXhpZNO4HR?u#%=UxqSOx0mQ_cj`XwV=F7I7f@gz8hTUA#+4fQgPmy! zcUH*QK?s`@c2T0{1GLoLfP?*MGRN7l?u77nCedC{ubcOui{2y{s+BHQ^vgemv2%!7 z0I6jVx?z3!j5{JiC@#RS&F*2(ukaT+_kk@u$13c~#q$1+fw?#UrshLbpx@-KjHMA$ z8}MMYeuu#ij!m?8Y6eR|bQKI?&8ZqPydC_5(+dbWI3dD`#0b2C!lrhYq-~Uz%=;?b z59wuu^OV^Z{*h6u0?fwTs787sGT=+2a;=}&;*qrXOpfL$Xo&UkQ4PhY?!!0sbo(u? z3jbo3{o8FXW2qVxgXCb)VK4a>`6Md&^SK2Stq$uJ z271aIKSL~x=lYP*Wx7!Fz2_f28z za0$hlH~mnD#chM4&btN`BA*qoB7nte&@a?dz`uhz@F;+;o8zmNLaR=wUtztCr#8UK zxr=vkZ~S00GBk;cf8Armuh#aDK8;}z=>XR8WTTgj*nKZibgBV(Un>S(CHBh5j`HTl z(O_u{Wt`93Gyfs!n^ACkxIyDBS0mQdZm$u8XAO@aK1>RHE>kCw zG2I>W6W$`kc;>_St=IO>-)@1afw(GN>KB!q4)av%#7*|IB}9<+K5t%=VfbOKB>J9q z>Cs+_GwYUf?iJx29X3z+=mV#$eQET|N17$fg7Al%jgRptE{JTbftAaQp9nZ;ZU18U zi}kyvwETd~890q=IeLd`R+PEDuJ-D4*p~f`mCdO9K)|}J z{jGusE=7w~_f03l$gif~WtJtgaw_|j_q2<)uMF{}_w^fL5rvs(bJxdbpm<4?2XpbJ z7%STs&O~-w;XQ_Q0YF6?Y1S{(XrmfcPosVsXE644P|9Y50u~Q&0pv5wch`~Q)SY9& zGwl+f$cjh3x>#~8Mqh&eCO-8(o50!rT(0JANb)rCJEJFg(oed zo9ay3vKX>|ykX1MM!;I3yKURf3U*&4tutV<)*6BTM7MKpad7|k?Q0a-Qt^Yo>2|T` zRxnX~O2f+^0vVMibuYL}eB^K~^SC#dTvcXBvtQ)Nc-F^KO2UK#vT+5;^b%~+F?RHb zw}T-Vm^0nTBI>S1oEcD-=oe=f)(h4>gTAQt(Qsc>s`oq0i66|5E&mXSXRuxx&H)fq zV=RP33DhVrKSTs=Jq$(Pt`7@oX5rIrbY^(B-*3W!O4An(Z4JOe>Ji|2xdUQ{(p-G( z?it&uFs{MtYUWFS{`TUHEhsp=Q8otTVWi3OHOx~W7{Zhc<*0zC+}naV?{TgbL_NK0nyFd~Pfqt`3}$BD5RHnOMiUDF0lZ0AoR(ybAwJS>$K_mm31m4$Q#hiZ8Umq|<8J7x)aj?e z>dCGaT6UeIn$d!&Dr?@d^3k!A<$e4moL}Tqid;w>wnoaK^j#)h9409R;Us&ywsLgQ zCuhK?V276GIu{ezs{q)b57lQ@M5tb-k;s9dqif;}__CF3=iI3|Y^Y8*K8hihmis2! z>e~B8#PJyOOBQdej*40Yaw`=Q?+`XQYHQjZpe3|Lz_Nv7rDNH|11nr@gb^FHnV)7P@FTqeq=5zMp2@CU>MzS*0XvIjfP~*JX8sHY zSoICdhezmvd_03YLgje9tS6LTI!YT>LS9}NbE0eyR(H{(YLmI&={l!S!#n4=SpRP7 zmftn$hNf`w-T$|5L-)>%=vEf&TZTOWcU4)GNNfmu5$Y;%Pyoo*Upse`xXvZn`~Q@) zJF-fNJHGf?Z$!6# z{l`|TnDL({W;0?Bc+6Dk@%P-OMqPiy-%qn0r|EcA53F2w0A#o*l6)`zI;bWz+XJgZ z9=xyJx|6K4v61BahuvG{^n2PBKVEY>8PWHSPx;YzR@Gr(5Ba}E)*stTwesK2@IpU4i2vi>3MT2k|(5`$D0)JtR!nxZg}M zZbXI}^4EtH{_f#7PGCub>k+ZDO`KpAK2wE1+g@^L-N5N~`?S>~_i29ql6V5h2N`-%%j!Dm_3}iCr%&Y^NUVFPcJCn{0YBN_O8o`;O_RU;x|V zp@fv^moPEh4KpWRery?dDjLuDhU%oyz|t_XePuPao!QF<8Zgd@w&(s*DuGKTS=Z6I zrr5Sl>d#KoB@0MO$vq+u-{dp;#YjhTx+y$^}d3QF{J5%~mRESW|u ziD@0T);u%k$JY^K4V2&DnHYY0qU*&@*y&&~+RH8y{>w(Jfb6D`YGn(HjFkR887xBP zk{*0H%(I-WaJlwNh`-2}-#}yK`M_WevJhXFso5>xN#fg13x!@9gb6y|#Ins@SQ|E{ z2pyO_ezQlschbGdE1q|m9vx*nYk-3JL_Rd!z*rVlG&kEc14T(QPyVwa?6db?wCqXr zefzuhUGBDI&4ic1M7tnG~E6o*lqd; zJqCIpbhUAChf(J}jo45M-SYEWNWjG4e4j$<2;GY#5b9ieDHu9Hg}kh7Jy(g#!>w(7 zVz>)hz`Fip#4~;$W9y>Fe=DnNKuZMl-`wOLItvk*-SzOR4iO1M6Rx~-?2PLMX0SVT z^p1`Xv`am#w%71dYwz?`hG1j(Zruz&lm^BTk2nsUQ{}{lBk%7~+ER!@u?g)|(pXgNNJZoTsK}d-6if0V@8ZiK z{E9;)a)|A7Ob~2X0^x-uRow5Y91vlYlV~dI(;`jk2^8tC+4oI#Cxl=e`<%A6P89$tWP&B?9kG+3Q@OB?5@DJKb zjw7bh=!551P~812Hzf20sE&d8`C+ar9Ld!*vQj(~^YqIfre8AZ$P?cM{mQ#F8+(Pe zvLvWa5}zF$=sy;^R7QEBi9KaaGq+X;1T||&S^Q|Y?h0pOrn&g{UsHq$5h-_-(7Asi z!~c5WkNFB3rNez`4)|Z<-^&~lf7^8PFH2N|_WeLd{0}C+Q!`dw96id_TH2E(z+DF7&Dg+t7+e<>gb6?M~&9( z!U$4zYSHSbMss}YTZ}?~p9#)^@p|7efxUu<8234=Ua{+{QMcaZaRw z8QhT;h`b`31*iC#2vvIMjh3b*UEI5vogH z)1k5|cD-7x_`{1xU?x&g$LBpw+$mEegj-dz`6qROhN}u7j^owIo{8iII4ktg|1q0E zeYw3FApuyXQI|{IaXG<8*#0M`DkQn%S_}jq+MW55@W`rTxwbE{XYBQ(`m33B<%lqI z3>f>3j$gC_cs@my-ALK=gT2OdktiqCT|$iZ4w2ZTT}(FSoKwp>95Fk_hHbW$5tOMxRlWeIQjMTWMV-w`Y=PZ zspL>ZtNl5cTYFhe44jp*BHJY%GZ@4)`PK@o8L>%|ZFuJI?gEx7(RIchu&uj)mc%eb zBgYISQ(iZsi2QuG%c15-7~!d+UqO{TW!)(haBHC0$fFVtlkkyheb8p)#oRc{5u)3* z@**ee(Gmh2$Ji>^bF&Rr1IbUk9bsS5p$HAQFV3hg@B?Q~2a=Z(6CkDThDkrF z{Bdi{bt_7Fbid&)uvgY(Dq7%;O7YuY8)s}W#Bl@)?s~K0MY_`s{zN2r6r=yQ3$=aQweRUD-#|~d`Pr&OFy5(O{x&Fj>7tYik#=q5 zr1aenm-={k$rE>s%MWn4TnDekLy0D>?G8<7u2H=>Sucn z))~daskde*>5ahGR>qXs!W;>rkwjFl~hL;vw?A&NmA|O82^4k||@C=FL z%Rt)n>rX5W33Z*ZNLHTdwhB=FNpQvcA$|2bZhyD-$ELz3T5e)=w@9KI?A!kZcj`V` z>h5;*-s|YKEgm<}F$GJjC@nVUa1bNr!*?{Bx&5&&qAecH^@)mT((6?cTvD5Pxi%5U+G_Uma9vgF6e2N$ z1|&tl?8N~P1I`A!YFm6j(b(&Ji;iZI%J2ZqnG3q6L%+M0^<0MSdULR6av+A^I&lO9 zj_1~DP5*V8SO<*|ty`4vbrVdCY3m9^)_7w^!#Cfh!V!wRNOEu5ny9Ws|7V;L#n(l> zkYYMI@lq0=FZUgR1;?`l#3Re0VM-r5e>Qx)rUuMvqfe~sEJuXvy(>RkiTH??wD2(f`&4E!T;#1#U>VVtJ@3>Hy(8YVZip`*t2w?sg zb8WUaj0g;;DnX|yb~l{m%%Rz#g5TD?tANoP<0mt7>mstoQ4y86=o1#X2p2m7A~w3+ z)RPubcP|__mKxuLIl;!KBZeCA;Br&RvJFQltVj?-c_|oDF)>^xJir=kEE}@K5m)5)V$KdR70ouGbWXt+hw6KR zjNv1{UEV3kUm1*SIoFLlRT~k7$M_fd(U6!+SY>k9$rR+e@2YU97Toe+tg9n+vQg$1 z62VM~!)e+2r|+j8RyC&(=tde`$pDtrHUQ8g`77xmgMD-&YCPi$Zd07{j(&!RHBH9H zj`LfK%-M{EJ|dSe9UJrvc`(L@Z`dwz;geeIY0n8={RikbQb@qR@PD5k2O}q7VlFQ+ zA_ApGG9(}#aYh#ZE{i-q+!}XAU811m`EgA;LkIv`R*4zX@(=tI+IQ2pdg&n!_SZUr<7OVbg~ZKH7+4E8?fz`W2i1 z`Fyx)3lX&oZF{6)?^H`Ld|E{}Aus>V{-T@wrBG-o6hnEKJKX3K(T=7LlaE3Psd8>^`GD;!ZrMeD&)u-gSYtN7J8anMhcZ?0e|zA8zSa7l|vS z%VKiQIOr{>^mCbXD2F9qB{CL;@n+S1^(2?=-cXgMGGQrDSy9?47{wCpg@a1yKWtX( zr@fotBav|H9|aADKCAsC8&Z&J-ZlG+NGUys{>pPJ`Z{*tA?H$uxcz!ioVC<8wh+}!n|>bvoh(Y=^PQR%HS{l>Tkb|2a6;kVW+PLEu@jCjugA&QjTfX(-Ac42US^5}+8L|50!vkzH~)1XU|VSiXdpgTP^&9m}QgwpeelzVRw9 zI(YzbH}pYj-;dFHG`uVWK7onRNrh*XChYE^pA8rK^4bl?KDr{hlif`Ua%ts=W{HDU6=U)M zuc)KRT`j+0_8qBHQo!T(cIA#T*J1x4yT$RU=@l=n@q>I|4$mVele>i2D!~Y~8G1@^ zGW(BRmw9#0RwR7}mjIe)G+4Mn0H`JgF6XvCD@N`qI8Tq0N<3ZLvlqL`(-Upc~ z63)u!*7KOKCIapUFLAxDW5kdOn*EMrlGo_ZNmV{`YcBnl5?mxZw?+hr@F_XY+qzoZ zC!-ib_-(1+^c9qM9X*&?iw}N&n@j4k@kYt_4)KTqZ7_D(2{?Di`kLzF-md8Y>1i$d z6=D;m!qL=z?R#>8M80Q>>UA?5f#=NN9m0T9vw{$w?PvLq$z3V{BqSj{m^1%OG&-jmV?M`ZNVep=p9 z+h)&${%;qgN_+*GjD>nxuav-tjQYTtGe6Vp`|586G;~h?XJdGeqZ#-{k?g?trZx&Q zd6e31IMswOI)i;FYCyMn?pd)r#lP#Fy)dejZ03B{?VH1XllNnzUYoibewQTYBg0^8 zZ)CUE{8gM?qi1gU-9fQ_S_d%P*Z0JqH7@E-_0*uwP&9%60R5fIV+r2N?k$Z=Bc%QQ z_?_wn>-n;8%!}?Z0uZTJS$SKI2_mgklIY*b9!|Eg@qJl2U8PLR8exw&{D!asm$@HZ zTGI2Q&WMmEn#%B}7FR-w|0{3Qjh>qwKEpalh3emIBCAZ9TedeL>PL*F)`!DVGT* ziRs4=ozvvN^yLG8pPCAWy!<@-^j+fbBF|ItzB9^pr|w**O$6;4d_m1uPpb8EBG|&b zQ)yss9*7z78~%@Yl+*Uu?}AQnKMCP^b)28U5O$_hn3YpOGt+coakQ?|yyMR7Wq7nJ zRoxs zId-#6H2&)L0I&>VRJQTLXv}7JN<_2~W|jnryQS1Q%u9~@?nDq*LA5BdwfP2SgQ(@Z zuc_Up!1$xotK}RA0I6j0?JEN!%)N>EYPj!-IL?aF(ciT?xd6FogJ}^O!*Ci66VP}s z%*hPxwfX_Lu%jp9mZ~>&%*H=}ZCY3eN|_{8P*27y5S>OdHJ{6U8VRYNWD(DXb^A#N zT!WZB!EA`t#suxV3c?TpV4$th5A}>F(Bv2a5A?8zMlr(+W(lbT4=?g=KeqM= z9I|f5*>UX_b*`3Rvh~C;m=?kY{E!b0BcLAGBPPnkSoennS;CxrM&!733EmdX8F3%& zC33WK&Y+JiT>_%c(^xFK7k+qK$VC^IaRi~8f%}>;gFG+IOL9JeG?HJ*LK;dVe~<)S zP4U`CJQ8-T2W`x!PG5>z$=PS*`;W_EdNYoW?=&ajagzMYtQ91^W-MGtM;m>9m+oQR zG@t=vO){qIeyy+vD>QnoLS)^jmM$3!2MZ%Or>1@@vlSGt@qwA9;G6--m6c+B)y{;2 zz?;f@!*Sh&^%mLtMBQ`|Tn8byyhS&Z%*GIcu;zS-cVPa!&LF`PSKh;OS}c5WOlLx| z=jVz%!MK^xbWaSt026^v@pO$>kO&flucc=e*HmCta~uD`RVXSzF`GOhz6MrTDW68a38S?WaQ9LH$JwOiAF`O&yo8oe#&Prc z3jnK1%2-*hW>|62Jgc#ECS8i$4qze(UJ*9g`Jl9Jp5LtFS5|t3#gc+nOK9PPBd8+p zbJ!g{?s3JcuOBl+uCpX~+p$yr7B;b;KnMHrJR|J?1U{DXa(&jz^jZ1>#(GH_!^g$J z_(yXGt>j3pyEjQT2WP1|tWwaxS&2ZxMl9~?3`4AR~0<;)%hIKU`^x8HQwCt7b*AE5AL`#cTkvk{-(MVEQ7URYy6xNZ=fY^6<^N#7mD=%4 z-RlD7mKP-~}7=8cHr%~#Q zd$A=%?otx}4~I6|k-Uoo3v;cdMJ_%1_JVjoKp!U#t6y&3=VTE2^;{Y!RdQjdEzl1Z z1G%)1jhf~6v5@=v7ys^irFjcs@9yrB4^5kw=Br=0LL|8~AJ4sSL{802xHQAnN<416yPZmUpx|%VZ3B6z~j6ls)+B&It$pV z?KbH!QaQztkQI&Kbh4%=2l$~KhDo6=BJU$rp0#rFNBZFehDFghj~0qiRYY+iEJg*Z**&LHfm8^$dRNJNpXFF zN1V_ToeEN}hd<+qtD93t6K4JaWe`~&5y6c1i~dh9X0|aj zbtmpqYrx@A3Q++xm9ag3qaq9(-oQvcB5Fb?Jsv7;r*#wyVH7#MM-o*6rIAI=Ho6V? zgM(fN(8fXnccvQ)l)HmacpZ|O<|_$jHHaD8bM3EY;YKBQ>plFR69$8uoZG@I@~ z^BI((j3@a?R*aHQOm?r24v0*i8zR=lPuC*Igr-MKEfA((k$7Abgso!T@N9-VuxK8z z+M{Uq(U$;Xn=dqQqzhKuHu#a%h5&Eggcd|a5r0YM;j>VaEf<>t4IFtK`_ z6V=z{iF-U1bTl99&gQL(IWJtrAL)_ef#$ZUc3PtuSNT#=O~Ut$o(jPah!c*@G9JAh zUqeoAK;K)3%_SbVHLL7lT`f@Ax`LRDahm|cFLK7f0xi<M&$Rthv_?B5p_ zn#(sdh5P%pOaIns^8J7W@1BuQf35IKrs1S$BkQ=#cOsH)NORe}#!2Vv@w$v3j8Qy> zL)Nb#^YG-OJlY3J?I0EiewEohafQUPlW>vs)~V@6K%-ese4#>RU3cBeD1iE5OTS%>CNwcczkJT* zJ%Lz&S7**mQTY=xvY@KUrns1<8u7u!6n4`_vU?A_P>q2@+c!AB^}TC*I(zVXDAsz< zoOfSY?kn8Rsl*~1SWX4aroEe+4@}<6iwdS2`xYwXOTPhcpfUu)@3bRfw|NdM_U$SE z=dD|;)B~Xe%zU&~AI)-nxUf`Ev4)WCar56;-ECvKfW3IqWC;?Nlo=8EB8@v9o9$7x zvg#r_)HU0qp|HbI$H|Z?uZ36nF@40y5L%Yj&Wboc{P7p3ogWQNHt(`%ggLQZie6>? zEmNsgD0pdStSO93=#hq=lLR5dNn(x945e^~R6Y^ZPPUna-%TKQ`uo}8;_0?WW&VL( zY4pEdWwE8o^+Sd1f>JA&ga$8+tQru6meNJ05pNRpq|D{j*r0OYL9W-ATHveE8YXW- z2g?~vOs1-Ez@P_t)EM%dsbSTrYB(FLn^REgv_LQeeeTPT5A?Y```xIa*3@e9o<aUVnNKvhH?Txx37qq7-oU5g{+|6{!PwJ0qF_BUYNhFOX zr65Q{FjDqOyz{g%kKKc&D^#Z?1pKeYi&^S!yXtEHPv@bFeuyaxT#4j9p7rd;b*@vu z$L6D-CWb+$q`q&E%OV|@5t3q4ONK!zIU(C4Ag#0xKt%Mj50b5`K@DwFuC+wNhq#xa zgxT4K*JP<`FzO&MIm0XN7ms0FmcD5DPub0c)hPqGIwLdlUBZrK!-Y_2eGvR4?%Xv| z<{i%=+Y_FI!L%0ovVb#$Wsk#JG@;d~5+6z=V>no9Q#M#sfVN8A;uZ{qma?9ZSa9z& z`bpJzVg-Oc$=G;eqL>eZ)_Xo&ymo@lv6>j1%XGat1M};EPL*uMKv~H=V1PoMV4g)? z2jbX(4%|oj2EU}O%ZjLSnb&uN_ZTmqw~iX~XjhbIJh<`YE2WJ3c4Z|5!)}SC^2}$i z{)`X1-of3$^Stk($+qnr2$vSeA8Vsxq2?CUxS}*US!uLKk*Fh+BZYwz6_|zNxzc@j zgjRq|m`Z@+xo2I;daU`DFx3s+s|qL}fKLRxg0!Gk&7o6OfU2!+E6+ay1Y|&er@n1F3f>v9Xs`@q@5Sn;}!HfR|JG!nwI@LsMZRP2yf<#`k5pcmv>eC6klq8G`m+p?yhgBSrJY&YwSV!&Hoi*>X< zWv`C)ObwAj{2jWq?A80;x5H4<;&m{I85$O-M1u| zRes+TD((z=el+ZlbfE^72dpiVhHuy}#8-ab#A5rJe6B7`dWT3Kty=%D_h}ljCC}@1 z5>z!X)f!teZucXKc7r-yJMc9iK2~(SLvUqzl8AcBCGO4j)jFfekzhM-@*dE3y{YYr z$MG6zp3Ok3+Ce0_*b71?3`&{rg(}Qu!oz7 zp+H%uu37CyAeocf(m?TSpuFp(q4CkE-rdnMi19XBmmB9^)Z=O39Z`r|27rDeU+rAn zVW^RThnr`ysw>>>qc-Rgt%|)j^h4@b$dpOo0iP!0H46N>xtNS8TxB#w1>rJ!k?U>| z8nJQ_LxyoLSm{qMB(V`|OOx-U&DfK0#>65lGdC8yp~HdDFS>1t8_C}N;$(*JxVMn41N$=R*Go`YOa+9E=fE>yD9Zc|ZQA7L#lLoo(s;qRiI zM?-=k(CKW^sSpTDb56DZ#Ljs-Digu#&txa6vE2q~VtvXR=!ZK09(DgRzQ5i}$oenJBZ9O5PkHQs4Ltn_z&?K@j<|$5xEQQKE zHr4t|MRbwLdxN(2`~82rQymFHZAEYKuf(unKk@tVnOto0edDc;b5nx+NA1B`m46QL zO8a74K;*!0g5Kk?hKj7m$7O6k8gG-U&)xosWT+TMdA{pxrldU^Hv%x&G}YOFri=5` zbm~Tu!C8O#z}Cp_8I?yqTOegJLkp67B;#5cn_eR3h6wXx8y(R09E(j3JUTLsKrBhJQv3Z867g0%o^6$35BAmK;9NmL z2$?zUJ6tRn396>r*V*Z^U^gkB;v5h@CeV=pjVYmwpS5_cP6>~riTUJ? zqWsWm3;VVXvk$EY24c|ylvzQ-9X?q!S|jfAx0-OI_;*sx4?`6^K0B3qQ^PrEjAol0 zWym^0$@N>pF|1o2J`#Z9687qioq&-eLdNxMW?QE1S_MJ%;E9}0V!j5+t0rJW_ zBQ*0|r@nNw{!qfSvdVMroR4cCJaV+Q<7N%n;3g$+Hr6Vd_mj87xAd&iGYQvtZkUaU zB8YwYMiW~#a2BJ(RLex*W-L7Q_8l#HAa4a>szZ|JZ)#{`Grh$)ZPUW;o^!VtAYKx1 zgmedY?`h;|(^5CF5SkIa1F;%jMQs!x8B%DPbX8#~jj_kCgk zzgj~e1v#vMFr0SNHPuGfB-78C7z{7)1eo9Sm-@;fpQfMf0PP@bAe2YI89DPY)_70# zEsWz*~<5|jWP;xIcX_%SIYwW$RcA?zXz?Waw#% z`a#(=X1^sATFG*;;VNr4h9zMX`!}lk0QMrKydNiAC3m66$IDh3n-HZ->k?0__1xRp z->wA4Y^9{f3p~!e9gOSUW}LjyhF*xPs8d_6QSsO^IlWZ~@8)_I9!R7PE7q@kTc3nq z&3b(Lev@8_4^FMmk52QlOPptqA{AqF@UH&OC2*1##3q--OksY{l`_}!+!$rTM;71% z;KjW{feuu%m!q0tk9@`TNKMbWuJ8h#P+iEzWH{EDVaPFwl%c9{tR@^_umMoO&p45o zNu5yVrEc#CfjOTV3Ymk%uUM*d9Etj3NQVLl!Z{!wGJM>m?cbh=fAlGJcBy3+SQ>URbqiU_bJel$oc$& zpX#XXXNZ;>OC2y|FECyNnYmqZb;^8=M;C#_WCSm+0;9vdkYqbq0as6!AxF7Zue8HD za76i2?y0RG{GsQU{}KY|Djjny82o*3{$yj}Q8_-<#uMdlGN^G-Pcw#|>N2Lfx~>02&}2~cQ4Q@;M5G-%oj@*X z?#fW=o?7v_)HgQjDndHV{?`5uvcRry-pvOuw7rSE2A}%QActThunNr;Cn| zJs+?EAxSEocgBA#xu^7*W1A++{mPM-V9GR9>}EBZEa}>{ldOgWQJpslqVyO+g}jfym>3x@+&Et8Z(~x%_qqtReE{A{FUGTP2@n4E$v39vy-F{mk;IU#Kf^hSg>SZZXc(G&TC37{ z=3;>ox4fIj9Ym;wLwsLf?1Ep5%!f#jF-ogYB+BWZECbjjjqE*O^c2S$rTP`sW(_!lVJb7`Rgn?MCUU>Q zgje{qqIT5VQby;ECF7vpU>-r=c(f8UcXwMSQ^T&mbvF6!qd8&V(Uk?EcVqDUnqpmHH-D&a?O>Bz4`POvrwzjpDHj zb)_ac(VyAN$Bo#2l*3S`C8j#`*f~g{?{kwr+%T&|MDvh6jDnhyu7!pkA1)6XFf(k- zY_|F(x_@Ofx2WB#kxR~T3ot5<8qe(Ph>+it(Rfk}*#YfzE+OHtv}B7Ar;@79{FT5M zzF#En4&3iQ#6m0?gEtkwo4?}~%sb+i@1nC|R?4YiMtaJ~(%4Fr(U3YC{kGk!AWRa_ z!gV;{tuxQ9d22}9h$qn4B03dyS0eoeD$l9inc<>04v>#Dv>&pC)C~i7TucX= z{~+B^wAnSG@F|p9e9>g_`my<;cbLk$^+{i?CSNVBy~htX`Em<<`JBlc&DG*N15sq$ zG&$HNW@-KuOzTB`$oQ>`yHY-|$au4~D

e=Ix%yfj2c|%jMW9D`c015fE?V9x0`t zHhG)bYm3@YeQ<{zLdYiql_I_@!R!Q+!HzVZADV98wMY_()UELDAYJyM;(tSIlIAro z_~)$VA)v{50wkMz3(cf6rI3X7`8nB%#JuT^xTnq6%H`z53Z6{y7 zA&x)1BHVCFaT!nYDr3u7g5~jtoNZ}dgvl*>6!QJ}Fu!Q#7$Qq2dh0@*Cx3b3jcB}0 zmBO~5d#A&EM=|Q0$&T-B#HMb00ys3L=#esc@NF9p&J^zMPS+s(fwjY)UxeQ2s@V>~ z%oF7xHmT~12=HHhtNwlV-T|}`JN$d1HTjh6-5mjkkI^o?SKm_+CV^ZUXSe}Ik#r-I z<8L}P;0S;kJ21r67BzqO4f;*Y$y*vClH8K0=FqngV_C#+U`8Hd#u(~A5;DpQ+1RFR z4-z(^q(a6L8X9XiWxPxO?T4<~e@aZu{Un!>eT=_kx1*}Mup-av&BHTe=A*3YrMyn$ z7vf)Fqo>rCp6c}P?iK(NOWXAsXPqPY`|(ZV;C=2=TWVd&fTQThq;qkzgFFLB^wY7m zZb*vP`a>wVSg4N2l^Hj9qYm(z zeY;bNH0iD&!1Gz#hvt1D{s!wj>oGLyvB$IyAt#uxQT6JbZ(^bC;PG6TsmOIOaP(mI zi`X3|SL_pHJI1mrjtL5}jr_f$BBZMaNY&mAwkXa|7?QKH_=my)koZpYNb|8JlX@dl z+=`W^O78hFCu+nVdAwVok~4}rO$>a9ZTz~3GJHlLIra%IBMhAH6_$EGNvvVez+`Zs?YdYT5f&N{^NAmrz8 zYhyBUZv8mas4Tb$L^aJ=LACkwp=D%NcrUX84mcvJVXK-mF(lR>3hzrJh4ND!{zC+E zDq9qeT2{$uvIgV9Cce|Z1^IV0mr*kA;oUt*-Kf%2$3t_7gDdi&&dd{g+GE&F#r%&o zoD1?4!g-YE`r5PT3+Xp>MtM9=N&HF=^@$4z8LB+vd8p~_>pnhkyB6^7%=HGQ^cb!5 zmLztHYsi^G)CygI=r!si6(k7d>9wcuy%CpKcu3N1^^DLRlD1gaX~>Abk``9(J(NG( zoQcJLc(%8iY;%c+*bg-Vb5`Zn?Uba41`T!@Vwb+?ep~^{$@aS|lG<)vLkvz^pTgu< zXAUA-8XBB}3IxDA<2pu2keh1fE;MJkg!Nh(v)5-P)s=EA*&-2nX#!+|B{llpRId zz2XmSk5MIL7XIlJtjiHe)=bG%dx&`?t^*EqOja! zg){Moj|jz7teNhSc%wpsPD4y!-59>t6+2txrCAtaNqLZ1d8U>A!-@nnd(iz> z=`}?UtN*Vc%)}}NWhIwJ_(ED7pm7=d$i+ox8g0kghU=nfeIILBs01A>v9p8bn$iL;wW0X8^3u4AD9<9eo@FDxNC*I!jU{+&)?E~3@TS1&Y8!ob8 z&vMZl!g~*?S(+bRIxqn%6k?Is#f{J~lSG$5q3KEm(r=Q=mLVRLtZoo@Md{@KdjG#V zyYM=EmOBw<@ey+!c9peIzL6w$lE(htFLf+uBg}svXv`yW^%k)u**uLV!!5>}$8I+i z#y$4b)&F|YH;54Nr68jsoKt21hK4}VH1tR&R$KyAjKzLyQ${&!dBZ7cXmm*S?~%YPm+MGOa=iQS@k0QrV_0Dr>{De z!Em3F>G4>mZrwELnbV`w9c_f7Un(6>ECoj$r>&Q7cm!Lw=*9u0;CI?~ZRCjYh~~QQ zAE-hKBBN8}h$_mGp-~VzqA7oqwh#aGU|?+r!PiUYO}F{S=3wEW4b{jf+;4vzyS7|1 zMeiUO@Yd?XyWP0ta&PFe4O$r`lU(CZJ;F(;bxZRgqoYn;%8PRnQ0#j*ZwxnH1SMcB z6hCi8ph&U+%og-qH~k_JM$5`I1>UIJ6^)}ZEgVyu@1gK22#PA?T);7b30Fc8G0fR~ zB1We+o`=RxXkS8Y@+0AoqBE3W@V({{;jZW-Ep&d-R4dQ6m27$z$^li4?y_(jn81(? z4KYsSNET!9S6|lyE6J3Z z;I|<`#lTI&`?(ePFQ>5B58&S8?|LUqE(-F432=MGX#7f0l4dxXG7H3rhz)vhfpy(+ z+63I@?L(`r{veb0BjN>zN_Nc#0O5;`a_8zo>MC{I25CV7P!F8`2S?4_EvLkRXq9qO zfr`9sj|K5ie`(6~lq-e8{vJhkUi#Svo@quYTOb~-SA2ASCPI!U{k_i9NfU`O5p@=e zwLD(Yx1ACpNkJl_1U_S_?8?H+D_2`jkl*Vc@`zKLlue5L1ta8H_p6Ds9g<7izC&zZ zsi%Wb=yz@;EPqu%>A)0T$v5(*gKOL3c|Y)32ysl}?#MQ(^`TcyhkG!ZCYiWbr$^n_ zQvUuq<;U4@g-G6i>-lKCzVF$EgO5KDii7MVj60;Mus&D|=>7!@fgHyrkCc|sAE?G| z7)K!y9*M;6`U`=!4$9m?vGdyyvnAS7A-ZBIFUvRk%3DQ<-NUG31%woSEyPlu1L#XT zsWcdyw4bKM1Rhk%I6Ud9`o4uq1uy9J1ph}Nw0mn;B`WUAxCif8n2`wg<-c(<$+mf8 zSn4JaYZ^}A{?I?4NB?1?9zq8=qyksVy5mD`hYx&9mhSq@`E(*lb@sF%37AI`4^f(S zUUh-F+}K14DIUEe)MgrkU4UOztzf27QAJL2e9VG`kYZN@+m{}7c-|6T)rfl{o*j9G zvd@Rhe4pu6=2juu*8$Z)b)whY&1=+A*)Xu;FsE_emQwhbP6tx;6^+i|JZ+857Jkad zQ6T-fW(a?biVwZThWmt-Me7{ZCVi07klY>oI>b`Z0nHoN%~|UodlUYD1?$y*+utbo zh*~%7=K<#YW1T0KlZ+%D_8U=94lDFAw5B-)lFey&WZqJBNn?y>_P$-K_!&aahp`os zqjNv*YZu`d5S}ckZ2I0;P*^cSiyK-|Dy;Uh|J1nePCQ6uvahQ1k$ijMn)il=_mJ#{ ziV-gnV;Lhm@QZ7aybX1p(Idj^d5^@Vevb_6G)ln33C?3J8nz6|i4sJ?W&Xem#`;FI zqpza&()Chw0oFyczt?tm331?ASA!M~6GZsp9iV;Q8DG1VbKH6l1c+`$zOu@&10RWA z-o{1f$X56tOx{9a`^xfhq9k_eHoKP_ z&UksR=5-yw?O0Y;MV`}$%rxCptCMO&)I}RzAU#GrGJVcEbRSp}N_73on9@Ox<(xvs)P<<$j+?z8ohr#w5Y&sFsHU-^9DlsUgI%(6;6;V ze1sFhPnHXDRAtk}A;6W7Wl=O^sTpF=z}-P}*seKTlM-_eN*b)+;kM}U>ADsw;yLjl z@N;XTaqAZ*4XYBvBC+-*T*0^`=R$(V+IiFVZQzxWNTeu2^W>4-iyt-uzoSbf524Q` z$WWR+uFe&&x;JD=5poWUHh}#5k{L#jB zz5HB^(T0qYAxK=m3r*?_LX)30qpS@?1rs%RpaadZ>F}w3J(N+M@Sle4D?wKXe&`;h zOg!w#ma*>w!7`2OC~)(GwOU;^!ipwRI3n14-*ZwyZYKq07OiUZ2G}fo_{eAJBkk?I zx}q)*ok#KOo3mwBK(X3eRpapFmi17*jb{^(0tZorO#;l74}m74o4KSiQ452 zCLI_sOQqOkq#(htw)fvUAl0C1*G?~thB9~@`;+nlpwVzZmuct(HM(1tKEpK(P5UDBqjzKr| z`?spkIfFbW{7h2tlW52HI(G5|HsDbIrT>tpg*wIJ3>qNNkLm2oC*G>wVE9)sftF;Z`)2S)zDobG{){N3&)()JuzX{$x5`DkQ zAvi{C{KbZ+3C>fMdo--HB#So`0MaVR+5*D+X0OB5H=5{y_`W}U#<{c{8J(l5? z>X=>8o1EXtp}IQdAGneB2nT0ZCqtE3j=L#71%(k-{t}wC9S3xkyNzG2??VM(vpT#U zwT~#~GM+KyV)BY+uii?fB^ow=Toi7ks$H=S8juGlR-(> zuN;P&?)m6i3%(7--1Ry9Yk^m5vB&B|oCZDAWt&$U`;0g7S8mIw-GcK}f3wrwIs}lm z9f{008lUi?2S<<;ADc$D0RQ9ol#UH4lK~(5-BD-H_LjJ3vFndHYT{9?MywAKmM}Kg76f4sf)&4Jk%6wzRZurcko21Y+7A;M=%#cvpg8uY{1N8Fo`aVHq$R z2$`Xk&4l1zytAtTQfa=2(WBx_q*eF6M^QwPF;Hy$Xo(qV_<~u ziT_@stUZOT;b(W-NMhyX-3Uy7(W|Tlem(dtyxyer3bIO2s|n)G)rhS40`WD5?g^+Q zZi`k=oks%lMo#e48WN+bZ42B3u*K?B472-)R%=mBAb!6YQ4Fqu%W6(0^*~0^B$gC?>{=BCQ>TSeKE&)ZrYMZT>PSz`c8aX23@Z3Y*bbjD$k2L+h(V6 zvRe~Ul5Usd5$rhE#r1H(%OfH!p{aBbnzSBu){ZM8%2N;evT$pXJ-MUy>R0EX6B5oU z6Wq9>x?V=J=JQu+Dk?5iVzsc59c`A%i3H630X2nk$;eh&M9O%NLLPfx6TF6+5N%Gp zB1!$EHI4r7uvK4E)T!UEh)5n@2r;$c7{zz?w|B&z5{Wypz!m!r1s)V}k>;ZqV**3|HP9&yI&OWPO= zex4e`TYXR4;Pae&F19e)1s8JW(kIsfS27C1n7&xX?{q)rC`^v(f9&#kyEo}6k8QVj~k$h8R*&gFgdZpGbb&x`Se zxpSy^+N3iYk>hY6)+t)9^Qyq4$MJh_cv0sS2CZ)qo~G0+?|13`132W?RZE7l+oE1c z#?aN4-9UeR!S9>g!CCxgu#CLCQ;_5e`sk3h!23d#h7D}sZ&u#Yz3VV1;Z%IoOyKsWrM0 z!DgIKHfenQaTf_9Z_nr1+HooP+ZcxOv<^uRxWMp31^&yFT73N-60Q<%?9w}y;+LMH z2b$}+nA@>bkMlDnlLY>bFm!J5ti3cFiYZ^ief{APtcfj0cziJRNYdwxtRX7{W?ElW zc!+ZT^~cd^p8gC)q-S?!x&5y(o$Q?9`fI8#lRb>zLcnZTc@sQtIpF3b7FX7)Gg6(79h`yeiTdmXHs+JH^tO*2fD7vngo3od8FV6MS-Qoi8v8&CAIIfXoij zu@*6IxxI%Z)^7*<*MK1BGP!;?2WO7(t2#txG=AR5l45E8G*Ls4HI5rnN@sZfI$ zA@3zppQ_Ue%Pe5s6RP_84;x_^^^+#>Gba0n(!5HLT0y^!i=-};0<&099?((Na+?1X z7rXuS)Nez7Vh`>nzyF6EeJ;k^vmn0Eng>6a1`B`}XValg4X}}Mb`C+m^!b}fIn-}2 zU`*v@)yc`w{YHK+l>yfLF(?PDHd zfuWBvIeBf{TLd3`10{kuD>b3FDX>Yq?`|I%A2=POzCJ)u*o}*ZT!stUIbN2kK>;bz#JcRu_hf!=oOZ??J#5$Zy#zRN zlSRsx6wprrHzNh9{FfOoT~FWCaF1Eo(NjI4VZQMCYu@0i7bVH8JEs9F3El{*2z}ua zQP2ZMLk0Z1PUi_CW;M{UHY7!;`A88mdA~h>FOjK2EDpnex*Mf?JBF*Jcw^&QF5l2? z7X?k;hL7y$Avc&+t@eznU(`mHJ@1jvJZuc|T7^lcN?(}VJ-flzB|)B+*#^RZ`>z63 z|1s?L*)L?V%JovQ(0^z#6i;;Vohw1z2n>LbH0)7CVa0KdfL)3STV$Azyh1M+(;o)h zWzeTS3{@Lk0&ur~=%SC_I20k4#$a~WW%rNX`qELrgI#JFPsHb z2;*q~HZg9K(vpm1F6~*#?5ut8nwsLt6OkTQeLgyK zh^%|6OE-i^k=F}-M-+MJw6sAutC73p!lMATTu%@wCrA!*BH53mk&_ER5jqub^keuHx>xq7K-h@=5 z_Fb>gygXbOf}#aH7ss#p|B^@ME=Q+YG~6d@yHpKicgMtFjiehc#os0%rEuG0kRm(h zLprS(KiI9ki<|K`tV9A+FqkUF#DCm3=~z_)`T(DX@@9L1CeV!+T^&KgfR^N^ff2=K zy!#z4&5%b(x9WjJ!h1UEAlzYWTH7>4>X*6aT>jWYMb!D0OVQ9v#HOP+k*PF_O<9}` z(D~n}dxZu5;{#$B{wyP|uLH0G%}_*lAV6?#3A{)CU7l|wG_f3?Me@Xr1N{GB(s50m z9t)LOB`mr_mCBz+m z;cktGuXB@z{r>Cfz?87$S+%F%%qs!Elko@MkuvGtN@xLi+{GHGH?l8#>FlIH?o5JY zBXss*?e;wy_6Pbu1{Wll7T30Kz{O&L3SpX$Yo|w^YQevp|EChktikS8WLZF$SV5eP zI-+u=_{Yi85w{u2;FcqJt4erIC|8kk=uHm>-?*T@tL3Z$pQKBt98a0CiDz+BJNmhX zXKeh@w&(ckF(B2%EHYH&k>^xRQDND(4{2!d6dLN(86{m-jIGCq9vUu}0L5Fir^|n$ zIi>mPQtmy)V>R@1AzfmjQKfGh@2Ga#(-`-{22|SHQ@>f>sY&j318_*={JG_nD%Fo> z7V?o7Rbya12`jxK`J+%1Ce5SWep)~9t>lkEtf%<)*p9A%iy7m~G<4k#z_D-WT(K|R z?b*g3YgZ$6S*b65Xhr{&WX~U^(h*wf>e1e+9!`P-J7Bmji(l@rvdXL-n7nWE4O61Fm{@In1G;-? z@f*{~JJJP&E;JO%A0$ulpl{~6{6|OS5{m<%E`xo^y&EMS49RQep3?huCzVD||L1aM znR6^jUH2!Lj?pD#RIX=a*NG?abhZ_8{9Tgm`Kyfcz%G2NsrcPd&wK>4c(sH024ntb zIRT#;U%*an6C#sT^1Es!z?=MOe}vWt*S;t5jL+Vu7x0U^@nNR#yIqi~TrUs@4+A=- zL^?K8eZTIR&!}uc(4wol+^VF66%zE7EC@!WFu!d*m zvp~8DA_p${nC2h1gAi$mh%UD$P}IqaeS|KKeR`YSXUNK#9!0R1G9lncOpH^by5bh` zFL3bL&h{SA!>W!>=tj9@m<8wLr!x7L1b-B-M}nR7q4kJ+^4FjQaN2zZls4IwLMcj3 zbm;A)@b-7?O?}$x5lo*#TR{O7XH32)?i?7x)G=DepWtn_V^gP=5#0+2~p;1}2=6;30euZJ3aGxrO}l>or8&*?q;| zn)*QI1Lj{0hCaJpaNHNu>n`El1`W+^DvE05 zd-U2U(mHA8Hfu0seO$+e-HVgiWSTn{T#mgTb?=*sT5a(?8ovzN!j5~4hau0Sfa+i@OiTbswoqm&tmwvV z)JC1!wUHE#_iV23r+2VA^)d@amGH6V?+@svArofg&JJfhqtSH%ch8X9IYX)!y}yz- z{7p=wCh&oxiyqXh1j2G~>1BjWbxMNZJD?5d>@tADXP&)%YyUINU#$O%S#s*k*E4TG z(ZsWTbmjqUx17unuXQ=Up`)g-{hh$AhR5lH$cszgpQfs+z1q4)q`m1<`kQ>cs^8H2 zbeV_yYN{?|8LKt?uSx>KjndJpEisAZ#U`@cCrF27J)!4bRTb)+ZyT8jEe~wca65)R zS5S57xD45j7a=6#hgmoeYQiW%MWl>KTx7A0yXfaV;dTHwKky4vPq)1I8j|Lf=eOw6 z2ZHJcfOu~C$G?6oH!2l4QHBCaL8Su!i_>3WjSOuYpH1R#e@DMMtctY{W#|Am_LSI zLLqW%e8e+f*Kl|uRI$<8rSPYQbajs3)eu@YW6$XNw;OzhUmD=L>v9y7c3UxFj99C5 zQ(V@<%d5g-*k{OH@i2&-pq$(&a=Y9RnEAhld@UZ?wxQg0DAEyJKaM#rAR@AppC;yY z&0$&FHmqyJZ&|sab3(RYXjoU&%74sv^?ysE8XqIX(uM;Kk9T%sDwYJv){?0NSS+vd!-gw_v4OwE$pvySMexUW5_ znBVIfUld`o_mT^b{gTh&4L1UxdgEBDg%gJxD`s&`Mr!*`jG1Uotc%83w)zq36o!o+ zbCKgni^qxbx~kJD&Fe``=ms{C`W)cFQO#r+hKI9eKSE}usf{0S(LD-YLt<`0&;>*A^+0b^ z){gG`8n4pFA>uIyICikqXjJej&zRa)1ndos#AC=Z+q|hTG=%W3@pb%4NCC}e>y1V< z@5Q-E$8r3a@y#7T340Gf$2K1lYlOZ>(u$>!J7wDnG}I3nrJ{R~M=7lvGudU3u=Co? z!_d!C!Oy_^mEYH}8oOPO^4ZZCOE1#{Q#LIyJ^m?|hm$Z{HiqZ>aB52C3?K2k^gWbV zdZ8DfNXMn70M14tjZh`}0DVALsw@@!N#|o_vd2c!Z4G{@l5Z=3SCEN)jlCBTQ1?vv zbzO;P-VRyyOGU7p)t#Kg6A0^FK@&}QJ$gq5h8#>R44CeHa195+yqRbv4(R5 zrEIjd23?Uq9A<(S)2(>_)P(kn`nnxrHu1x%I+bw^5ro6;*|PwJ!PjT7sh@Wm0<)9i zV~b7nstHh2)Pv@fG~{>Lz0$ip)jjyWr=o_a&p$M(KhiAKSF8_#;u%9@U}oftUR{o} z9SVnV1&V9iRw`UL_s0xF_VZmHV1(R>gv7iAk71ydJ7T|(9ZPF`k;X9X|E|J3hwf&- zr2lcgl|BQcR`t24eqvNcrw8Rs$~ZUg(45Dnk5>nNc@~NFn&1zXp8EyePe;>AXTH(i z()<>0d@#NKQU1Ne)N82NiK4xEOh9Ag!05|;-bI1r^LsvJdQwncVqzMmsAos?zHaMw zH~ByEP9mi=_R%$^12WPbYmB^G_3vG`uo+kq!;;;jDH73d|7zVzHW_`c#P9_3kvhzC zGk<;LGa;jYdX@7NGPtQ?OH6Up#o_$aq(Xl2h70ZR4QL`NpM(74Dmy1` z0>p=ClxDW_+H*J^cA$(r{+wyCMR{#$p;Cvkz->8;yRI#lQd?2EU7NZ(<+Yuyy4kEE zU@R(Qhbrm)UkN7|M)2*cj2EYaHzeIE?S$KQM^D1|`p)fp1xWU9-?XUqd)*3o&^&0I zMYhqJ#A$(g2yBJE)YZ4+=yPZ75e&KVBYc)~wl|A)lCw!GTQFg8X$sUCd}klF8GeA2 zdYIQ~206HpBn4HkS(+r3D+)qrm%bQwK5OSY0d$y646bIi+r3HnH)jp}sqrM4FgB+EEP&4sRfCRzDgGB*bf;&MTwj}fayo?%c~kyO zQ?SmcG8C4XnDMXSKYaXU{WlU9gf8(4_hBDC_+h`)N{%3(lYSc*B=b+RI?1A%+;dgMD1nv@$juU14KBAI;w=k9BmB8l0*{darwopn z*Hbs(dTYMX2qIxhJfHU*cP4GHHYL&$W2af^f@4<*U2BYMQhy8+m*H975Z*lik$jBt z{((PAS{l@~77goVx{(9NWU$8Eyt9`+$-+FzA!#w$wJ3?P_G*FGFcp#Yz14Vx_=W7a zrt@(>0)1%@Jl=8{=lzOgN$VqgTLW!8zH-WiRiFUDcK$&)KTTsZ8G1!Lv?exJO6j(o zT`5En{jlkY42#Ws%4$rh!NyB&vbc`Z&NUg4pm^!iFGm&#>wK-qho*Iu#a2e;_Ty81 z)yFPO1s&z2>U#~ox68)*nHq1X6h4o5QNQrshbo!hIiJ=1I*)jD4dEQ*n9~?FpVhs0 z_ymLkFO#k7icQp4tY?W4>Lmvg!>i%RTenwy$5@_SmGQp>iZ(mrubM>zI`-7^?-Q45 z%4=4Z8u%`BROBquM0Ujjqv;e2t_VKdy&b=OVKwI&cah_PM5Kc^=&fBBw6#~8 zhMaxuQoIw~>PRcvfEx-bD)MLv!wY^myRleVf0_pAv&&IaAPIJX-S;Zjy>!?%d6wGf zfQ0AMZS=;vRrR=*{@5;gxWDQzo$klj_2><3>f2D_6%L^_>+wfS{|;$r=ie-4pa7Uj z*3_Lf8~*)|7KsHqjnDnm&qwl>CJj@s&XCukW^zJG{~vy!XRnXa zAI3TO^Dp@)&IYc?qeXTLlKEhXV+%m{cs+SHt088GNuzECDj-m2*Y-9tTyU45eDs=t zU0(S<`SkX=p~e^YYwz_$aMUF6uqzae;*Xoiwl$7|U~ z@7#R#K4#&{G=&NtYK^bQwDWjq{=WFwK+q|vYVZX4LuJi*B)h#Y(xm$Lu=8m8WUFKr zw)J($*$|*#c!6K)sT30F(1=?c!K_I@lZaCLGZ~-gOY>?p&e_#NIfxbavJPNV6?X?< zax#34?Nz!9GRDF%=`nQl`M$`oqeV=0ZACe%?P0o{4J{6#irLe2>_-? zJ@?5FaQFgWuLF(ns*;kP5XzY9l#yUB+I_GG>PbUU>m%PWyxvbL=8tv9-vPWwnOx6W z8Ls2A(BuC~me*z$S0y`4vbp@=`S zh_M}2)vfWj;rbJ`Z{$z_3dUY!3_mIJ$1AkV=FhtiH}@oe7CVfGkqA&{$2F0_AL2J= z8jP>pgl2ReEsG-84!4**3mXaenSQrX49&QgiFI?RA|!Zmy#pS7UT_Xw|Dweht&%bC zMqXFx&lvEgEViZI&}C_7(?}SIP=7v3DZEDq#$>KjSaB9+H8y{5ScL@+CvTJYvt*I0 z%c4x5285bIK0ThPQ6)njI)+S8ocRoOa|*KV)qatVqn^)L2SxwX&eKpCo9P@AMX z>dGLp^G$=URKBUC{n!U||FIr~Z3vGGMhlV{S=k*VD)iCMC_r6)w}tW_27O|chWB?Q zCV}>&+JQ+qk0#%5e8A9Uz1nG~Unm4lPShtz3~!jN4pb{yI6}GPCdm~_L&*>x@H8r+ z`2<~8nCML?icH>Ws!-m<7uQn5_~m!%H2U5hkJM?Ukm7Y)H~mV|Q`Cith@ELzo6|sb_{m&xHz`aq$Cec& zMoyU)-(;0wYjOl?+MN0XTUIng@R1ssv!5Hu$`wBjp;-knJ9^ZP!eqwN0X#UO{4v|n z{Qpybl9dE>(|6naWDit=$)EfzRkKczcgq-orx=C#==0qC#Kf>wHgC-1d$reQtRc%p z8qOH9P*>~AUBI_`5|^6B*Na5QyIgodt6wW|oo|(<@^?g$&x$Oo4z1H%42Xu5DDJZz zUWP>ZwZ;K1`9I&8#P``(jzL*hgxz;oweQCbNfr*j$i!@*Z8?{tavwdr+ISg(|8(nB zRn2yXZd^PifU`-P?OkEXuaSMy;;nJw{4@XYt#esPvCp}5hfql8l7*&)R{B}N)^~Ib zL{w$n-bZ`Q@YJ5oxc_%phOeW~i?(!&I%`q?-NqwVzXMYlQ*3JswFb24L&%s;iFk`0 zh&bWq7DU~i##@MdNPIn5K_L{GKlV6mJ$ZY$(cmThIvJ`3)>B=L0%Dzv#0vS*{Iafh zRKO~A^dwmq=KLd_LPu0Fgtv0)WMcSwI^K7GC%-&RT|6P9()*Vm;Ae{w>d8!Yp1Ps6 zZN{FV)1h5jf9D_A3kILGR7Z9?Q7sqw>KY$44`i72aUCiqXg?1-`LlkqGQ1ZgP!XD7 z#xhBXq_rCrDg(O=xCg`S65Xu@SrMsAL;qJ!c6*25LS+$Qx73K^fXP^|`aaY;jC1p7 zHfb9v#3NG7?{!^zL^wPu;9~j!=C5_RZOaRwy%;|fgVp%?@;Y$p@^4pEAmD^;IS@Cv zpDYuD(+-E?e%++}|8w;1aY^6%|L>0=s2OU}f(eSlh15>-b_x^}>V%gHZ*A*znpwE1 zopK=M1*f4d+xik-uvQL{j+(ihZd;qWvTzk>JBq2Nn^qJQnwh4kY1a9@{QbGdIgdpA zeBSTZ>v^38+>nu`O?y?xYK0-K_RNRqerECtZy3W4b) z+zEAU4EXp$K4xmemB5Le6=LL{28|m$4)Qy?jQm0v$uTpn*H-{DdRM@(?g1?2#Qp^J zHE`bE;e#*BxX>~S@Nd5z`0DLF$kkw)tm}avNxFIv^6)_f2^BoV$MC0W-qFCKfW|>V z(!#N3OM@?HySzLj06EOD1hBAncC!f}IqX4k$-1h`q>{tX05Q+b9fPq5Aj~@rY&_z^ zEA7PPkbgS3rxXfXa3?~3{hqR-4*WX$aAPkv+;K{q<-y_~0in*McK=AUU}5~G`SiC1 zsoP-GMx=;~?(Ui?CnaX4jtFgbd=8is+C$rjfD@C`bgm64pIzlbq5~Xo3b%|m19B61 z&H85Ib||)w!j}$9B1zbxpX`5k5nFTj`kw3V7OZ3}p<-c!L6#*Ppa1$N9J9O(fUC** z+gmq}BB$``tfocJ<$u!lL!S*Ia_8r&bMaSkjWd^eaOGLiorw=Tww>7G;2i`V*3o!2 zf6~07A)$Otaa_t~6x~YUy(K0ju8vEX00rP&Rxm~{$lyXySs7&Ijuh!?n$%KJ97_*u z#-Ishg}tBlpP=$6iVd3Jxo+&c-9$dO1JdwveM2OTz4f28-<$|rC^Y%TAB8L4mkJlW zHGi#=oSaX0i_^E?K<;FBP2bgRUj}l(l!|sheGqR!MUbG&<4r`hAs>r`MGqLtBC<3B zl66e->I3Z{=-A<$atqPb&uJ5vR4QQ1(Dmajg?pTNkQJS*;IC1a`nvWd=i3Vf!=3U>EEj;J0Rkhm!5i7D3Tn9WX8ghYVYFpIepaHCcNegusl1fzPCcHX)BC*5FnCp{U}b*a!@IQLwe1}_by4W&YzOdJgz~Lu=`ez;cAJ(t3>5aa-d!*dCiN8p2M3e z%%8sVsz#!4*G}%-WI_;avfpOu!G9E;B6?{*w`_ZPP)l0>OeiuwZua5W3FSeit^a3R$qI@pJ2R`2c7T4umB(HgW-)ENR3=9`Q|fAX3|6T5%jSCG`V1 z-9>M~D*y`(y04!9|5bN7Wcqq(G}K>$C*aBVL^>SoK<|Z#!GxrFt$3oYLohi;#OEiN zE#Ynmy1=Xib^csxyp!ZyGF$W#0$)gu+H3GJ2Z(3z1rh&JF2`%-0$W*??{)yGIS>kB z{GK`l^W zr@0&-QyVuyd*sI;0V&WE%R&>4i%bS=*}ujuOo zG0wn@vVEojrZ$K&La%2eFLBDTsMf503*={wlOK2N$_b+G!%z{-Io+RhGghWC&@M>+EMYqZMOSUe*8HXM<4@~^fQ31*yzFJHj(8@p#%g6-!{mUf+^VfV_xPycSbQPZIaJ!dwKin1> z9o-6DRqvv&uR}9~Q#PFKsY%>U$Yz zd_JA?IJa$+l`hbuzz=_a^Gsz?Ls~rE%o*-48xg)~i{^kH$m?6YwGSoIRQoMGxaisZ z#Ql`vDL8ujr{QkaAkrb81a{a5^|(76Gk@5rNTa)mj$cdhGSkC*C1-_9I%Km8R+6Hc z6`Op&FByiEs7n`W^cAK}@ynkvktJ<~l3di+IwJJB>;Z>=J&9;jB#9L`Gn`1FPjd$z zT~(+wMI+GD;qvCFkV~_cDKd!aMV{gDIB(xtC~glX@?hQw&NWyYiJHxwD?NO!lqx-& ziL_-i!wG@mTWwZ4r=5(s;%Wv-mMvfWBi$&?2NX?o?U^O5dIQz?vgE&~vFMxHD+248 zwM2>!m<}msz>h{_G|eE@$|lG=aoR*lC5copgb_rWy>o|x1cY#Pl|&`$hu;-DYHt(= zFw6Py!VVZ9k8$OJZzb@@J#J|5hu7Lw-FRm8AJKYrnqKrlmE=5Nv7BK|bkycYwXnMT zL_@Rcc3N5{beR)K9KbJ!?=-^1+y^1%_oK~m>#NZk+jz{1^0TDgK)?qxDfi9fKgvr1@mu*uyx01t7U+=B9Qsu-@LyGJgl`42+}fqg9vmP1P?eyqNAwj z4FDppbGM+XM@lNle?QI)Q0nmO#7XG8TzY=wunUI^08f3IRyGvCeZH`Olf9sSoY?aw zUT%V@^`?(Fkx4ONf3ee>kU!Ln^C8!zwSz+gJd)^BGrQ0(q;_qe?t(^H@S(d}=X!)V zEe!CpPm1}`3z?m=enlT}P{~n=oH}~h0(!7t|4Kk~j0&gWlSdS5z5?-&_4fBv95{@< zGd(sry4K!xL-Fiz5XnrvAOzSTB`TzxkULY?V)5N6Abu6g9|EZkOcJg1g3SmN7KC!3 z!xV?Hb~7klk^ab_%HA9cwtC-fI{+GZkpFIr{_9rrYlzYZdsul62yh9U`Hb97h$g_) zCdkRs1No(P{&-T4m4)hXSrHfpejG~S&f)ncA_y3e2kUHri2_rueVj`iUG+H1yfrW*tBmDrwru>Zdvk6C5rGRgD5y{)%+ zLGmg7VAXk}ho03ypwF{Mz^>yrapGupZFP;UJc*{(#^b-Dx+}0@T}Itl=9uIj^ z)yn#>j4|Co@lU^Vi~3LFq~2AsF3alg(P;l$p8KLK(M#PUp(*d>uN2~3aCLgz+ z;x8zY^Il}Mu>Pgiwdk?qDb}*&S-q~_K?5Yxm*wwCP^J_m4X`4+t8k+b`s0k_y=Uh2TK3vf_H{y0$E_H% zaH`?m2jOaX4Yub76a=wmeQfZ|$a{Uf@l;YK>J>ET9xOrO>QJ zrqGg>?jZA~9w$iQv~M<0SPNZIR@ErH7n#C{Lszeok*x_GT>3B06w{N3xs2kmAmwVrXzn4bg0tTlt`W0P;}E459+6EwA6_l^0HJ#@N@I}$n;D1H5%2r2c{pt{1ly6Qb`ZiYklVmW_bB!|3|6LVwo-M+G z6fzY|^L|McV`EW?dBDv(g&3F-{ek`(6Wpi)X4$~$hIUA5joNMnilW09QVrLW7=uI+ zB&9dA#8;}69`S`$$SHU&O|n9@&knai+jkTWbq)$kQlR4$)CWb)bcj$*u}pvi_j&N< zj}!jfX9dLjjSrtKGw|RnUd|w8znO!ZGU6igs_5qEE9D0G>9@?%$k-tGKFhH^KCj7= z#x4lMx1{OX0t}rd;3>TNp`uEf1*UqHym~$SU5-3uJw;VuR;^4^~Vyr>;?4h1|!Xy`GHRmk>Ag1apavKvj)Fy0WH6 z2kZKZs6FwDiF*Ka1P9FJz#(Fc`~y^T0NjMS`&@QIVtF+jb07PbkBDF?0Zns4>@}a- zLkc90@;w6XSCmq{BRL$P5q#~o#K0BSJiHh5|TakQyJhZ+^%a2Ubq3 zU(xFUIQseFs51~(K%|%&aHq|n6dRnX^9Q9TW;$mV(zFZdIZLM+7K#(N{*{Vqz^AsC zT=I8!_$L-X&6HOcYavXOJ665^4lWe=Q~}PTgXrZ50DYZ#`R|Q_x%NkBw{`0*aL`jv@oCcN~B&cdtI^BLsmGJ)PQNfXrrAQv=QJ4nLP-Dcenm zixWH0mt_kvZXvqM&(w!>KuGfD-Va=q0*-7KI^ww{w%~LCoq7+KbToX)YT^fAA~71f zLCHLH7Cvh{Fzp&uZ_0E#5Kf!kyn1vvS-=DRh36E$@ZuRA4p^Lj`FZQU$r+9l9>bzP zyyS+2i*oihaz4;q%c{_xhht0ScV8Tqw8mZ;GCC~qxSzfBu0*v6G z^7FX^@*C_=b9)Gjizbvog2h`~W`!RSnSBp`IJ$r5g3RJW6~*{ix#mgDyED^v1t2Tx zAKOc4flLO7b!tl)6&>9qXA~H9dG%*0{Fz-Rns@JA0x2@^9g=^kC^PlyetKHj5pAM| zqW@|ZUkjPBS;v2JK9+d-Yl9OXuE^c&zghNoXBDt#Z)(-E_7%nXc;T zLr$9;pSp3ADho2!d=V$qnBmV`9xy#MEC%~ZAsPE%iFov0XF>AvYLY{H zTafW!OIi^abS{;0S-1-$(J?moBi_3EM%Lw|T#kIKZCqjI!4`Pia+5FbA0n=3xGzMR zGao2vXpwnae2e{8#KVm8Xhq4yBnaxh`#s0r)8`r)=m!Qs4yWDxLA6Yk*#QP2Dvl0f z`(Eqr$^Ly1^vd+9m)#&1bs7*`>PXU;rM;i_@{-18o)R#@ki2eGEvAaq(ct%uV%=42+W3@wnCj1M7Zq zmLb*sKp0BItUS<&ass9hX#OOHU0|^$^@JmXPV&)cp^3%tsW}4sD(SVgTEL;2>;l&k2jPbo zSz!>0;7@`UOf9A=f}Lh9mwtdYDS9C(b^*W8xg=2gx3d5T_iw7xesPFDgw+TWbCNbL zX*>skF@Ccnj#KcIc%0Yc=O=5M$h~pvND3S2UrI_)2`uAkhw@b)mkwN@Hx=4%HHVM< z>|2>7z^AD%V$_?O;;jxw(vFW>y`^FxnFzFHFil#jGmKGa&XyXF3luJTU83qkvoUV6 zZa7qh?y_2eu)$ZJfu`;1bO-@dt%=%}$$jApEgYEX*`8OmM%6MNc=3sNRe-5&?N{fv z!xq)*mqo-C7X)z3>k!q^g)pd9Z68|fM=xS1fP4$8l$oiadu>bPWk7Jzax&)=##BJF ztx?%o6+$bl0^8+7A$_5TcyiMo*b8#Ej~7IrI%!&UcRyVIM8@b!jO>woh?gJlWY<*G zg3UL1um_Pa+MC14Q45|w(%Q%8ohjWI9n}4<^L%k+4Qrh!0Qt1MO~>Jj!{Ch68w2pV zR)Zc{)OY6~Akb;+!rYE1rUMG0=15zCGeS!^$(O$@Dw%?@_Oho7W%?9AwWRAP16khe zhX^fMtQ=&8+1*k0F1*do)gwncFQFT3sh=CpZSvhMZiRi&pLD=293X2pbZJ6p182$v zqJ~bR{JwgjRF67Z&53*q=Fn2HGu;D#7&J{`k>z7BEJroMb6jFfhkXkh5k4&gXI&^7 zUz9>i$r-NBpp1eEG*cShcOKFRiBCvgZicLZ+fp~WA=;0OIm zhvNysK!QT%;H@$tr2L}&!>{-0kqex>Q!|c}0yu>vYItIk51fH)O^athJMGOA<>LC9 zHq%#%5Z$#ih;47Qj&0^mzJ~CpsuqJkI441y0_9h)4EzEQ_`=Px2@7H+Y@{FRb4CET zUoCbqttfYj+KrNm!FF(*(@BAsqNfNn8G~`F%y7-6@y7)>d<^?(Rsi-W_R6rdYC?_= zpej_T;3lyZ2Nxngr@gn(db91PUo035ry3%2lm}KKQkYzQPk%OSK{!~)?+1pIV+KVO z*Z}zj>>%xamqx(u{>=Sk1KgzyW^M}t| zlt_*a&m3w)?C+YL^rQ8-pt5Puu{{Ly&ACsMg`iWLLW+ZJuPOJ6uxx~D*JO3ZpT&}KN&GB)p;l5CSO9=Nf z$$~%t^>riclJsBtUumLquGkZz22=!p#0Ge+95c#)Qdnc&XI zTCt_5LOWS~9Mf$nhyoyNR1=%qVFJ!6UlOXF99~$jBAcGCT*Igo@!|0#a_mfyBpc;L za3B$hPx0C}yL4REGf0R+`xubXrd5r%LKR}T)?p+R1o-Yt>x0ZaT{K#9#bI%w%LZUz zG4nYee6Q9@pMpSZeU?7Z2j4z7DHXFrYK3L(p z-YAyUiZkl%9}(T*_4Nl5C=v_J&v7K!J85j@2k;t6c71`fOO>RW4WZ(GA}~ z|D%+z3LVHZw(P+A+r>W>=XEQS{ODC|2OdLxKUxDw7+Oz4560M{$L4eVtA(5k`9CTnS#9%HVE0j(oLO~!~ zpA}?)xtGJX;MzI%qRsccm3c6pLsx>oE@UVjG2}%rLpHxLX+XSj404x1P7+>tF$IKd zOPgbN2%_i^(`kn?iSV-Pu&0~O_|z9(hBQfQ!^aElaf&P=7;Y=>P+>wq#Bw$qqgI-P zIG50A;DgwKG~#z7zbdwc+v#gBZScuoM7GCq&Cfue1s2fLd9o;mDJ8*T0|;RYH(v9sf`$qLCU|tlJZI9;|hITW~o+;h+QweyHEbPb95HoE{ zDf4oGf2zCXEj-Dwd%cCUHH-iHng7u@ZAz%Ybxv^^3~I>^{g&ch931n`w_CaVOZ$V7 zVrTKz|2yVC9tXGu+Cm$Duj=tX)Iv&kI(Ak@+2gKJRi|uE)nOb|jbwY)AjWu5e_C7U z9yAecX21~Ve2x&y)dxn8mfc+RXoNjGiz9#LZDU1j}-OTCV7k#%;?PS7k%uBI%Qx z<1M&>GyE;3EHJw487Wd;dD~=3*=L0;03`=3!i^Ow?c@R%S7_ICP$6JnsbWk%zz5Waa4DIagODWDxLf1!zz1Ga`Cmj?ez z0h-eY7Ts|Raqym0*=)8Y;&d$EdJ4Ku%v>`-8LlRd?Nu>5mrlZbv^`EuMuO0^V>_(c zGTX(*g6co z!qa0g+~nG4CLlIGTInk%!AR=bQQ5>f#+ZG~_XiH-3ly?1+ z!r^DirDwfyBbzBU_bFKYX{WVX*ruP%x0(v|az=@UzP!zg0a^GP#l0(VW-o9$E7$bz9pI zW}toS4^awz_ccZ4&o8?Ji=J!Dwc_rVpXO4ipuu0x8CIpiiIsn(4vzjEmql`uWOFCi zSa9eZXw`O}5QZ0gs|TLZbMZr^R|{kB5D8uj(QV?&|DUTnFuC(WyHQv_s&cot0Zq8S zqdPb(aUT;NtaHqCgE{n&nji-^sjFJ*K_-!HDbsplMKe4)X z`i*d!yX6%u6W@3|-$0|^>^=qf-Uc-q+D~K*g(NQjmm7%E@W2hh5q` zX!)=Lev!)pc{)lv>k3Pbj3=8vxLUx>p+OK}G_jfTK}ov_o~^^^tMkx$96b~Spuo=T zCQvLMrbxi+j^e(yvL+f5!<8|5glmRaca@z(Mu%S20sbnRliyfPqyi31pm2DK;`HYN z9^7ZO!i}YS+_lpX|0dD19+vcE8-RS#KhFqL!0T0B$MFYhnM<|v-yTE(#>cvnUzxaa z51td;54ww3Us>N_lzkv_Q*TZ?{9c!`e{uJ%vXX1^VADc^kA%FG-t5j{$W2zaaqA5@ z4&b?Shat{M8;>X@m3a{k;PvRB=w^4?ngFGQuvTn}8T!R}#T$rA8Vb?8?#W$M z$8J|nQitR->uNJ!Xs>RVvC{}pg+R2?x{03sQ&UE$mGPm z49rkW_JOKvMK9NcmpK}1TYFNZgEM-9p3CIDNz4-Ux&j;X++`kEo`p=beVvpp2Gtk5 z@j1*g@lpRX&psGm>v6dtz$sWbyERc|UDYxNT-M9e6l+g!kzatY(_ntGduP!7OZ~o0de;)CS|+_snlZaF?~9C}JEuS{8_J#}iG_~PyEB9x zKj2OpMg({*;S;J6tb=?3HR78!fZj(-|E;&_7CB`UeiissuuC?feHV&B*aPdlfk|yB zR7(5-NyRNLhK9z-2GM8Q=7-5r*!kQnXR(h!h91!Vxs#Ph%?w_QBFKxD2DpF5ll&Uk zgu&aC?U zpYgvL)K3S9FIzkGi!a5!x8!d*zukNB_muRMS(!Hyw;fLK8w*tT zyDbvUi0+AUxf}QX0pro{(0CSX$b;E5-)-wv;~vRIrRV9tbc-I6xLqO7SpM%skUNgM zL=U-NcmGnAGg{c3%H6D7e}!ZER9NLk3GQ{l5Q;ac74l(GM4}=U_7pcV2$X+nq0wki z)PcK}RZmMhwRq)AEO!)AZSwcgNX1$reQnMYrI#qEVp8F2-#DzRflskg6P?TYMFLrW zF${P6h5AlIhpp^C0Co2)Hi0O^qE&Lhz2oZUu}SNm(=e;{;4STk|8YF-*MK;AAB+*3 zyi^xVO~V^Y@fywZbcD6q%bsqWX_kR!IRPl^UI_xz3HIC-f);`c-~#S-VQ8(xqF{rU zdKR|8*_aOe+mI-KpL-MnT%H!J==O8)D(J0&GiK+PlNxTlQBi z>tJ4y$J&a+ywosAI4eMt_%8I4_~i*8#XTj@IGibSY{Z_)V*Z;q(0LG~`=Pawgi+$- zHo;_4?>PvG8OD4m#Kd5gfr{*rjatt~cryr`tq;%|R*$QpUAMhBdU{e%W zndG(*urc8N=`Is-{DFUD!3rdM9)LJJH9p6r)J=iL96rZkwqt44IDs0SNBG-bc4@|1H<-NCIuws?^fNtXXU9XAl6-$ z>+}*`e&6@!h`z~xC_R5!S{Y-C4br~qgfGnm>1W(7zYcReIJNxno}{STbz4`_O!Q19Rg)eP9Jk{R~5-8 z%q+;L=J*s>!QDboq`a$I#zCs3aO2>m(bn^!C#WysUvNb3Q@kH2=nlr<0tjSts4(lU z7O!=;UnTowg)8Ene%naD*U76@Y?7B$qoCS#1pyDVS@39~Y)9nL6@fuOIFZ$1pk?G3 z48WT5W{hwk+xrGWkynqMmhi#_(8y%*^0P$bMv|43DB6D&8dVV?W zuAnW09Efdau-qrS1mrH^bm7z z;34Jd6$~krQMh!j&qY$OC=Bd>%*E}QHO8QgA5^F&TppB-Kml6ms5TLaNx=oDvFp-* zTsg2!OiauLv_TxAsthrODrq{Y7dhHN#6iXY zV~IBl$ArAEfumH5YCPlQTBipdUo{|Zc1r1XpX={e?EG-i5^2O3J>BUaDOa}KHCj6y zjiO7tINfouDPK7CNBdz5i9M9tNxuY!_O}M>k5i+ar-FY2%3*CpeLT8eN;}QxQcngO zoN>%6c`#BE{a20`?#W8$bf6}Q5jKcr>q?4@1or9H?ViHX;jy;?*jD;TzgB&y`B&b* zUbrhKTA`3V-pLq|D1@x3w!oD&KXO?;u+e73ALd$QeYM@%K+{tqq;BDg_!FpU*>{;& z`mvRE|54oC{-V}tSf5>&T}pYQ?tl7W9KJ;D!qG4K%a6%F$g$S%|IE~DUu_X(ttlxQ zgw%ca6fj@l=uwf|kS6olNO#zliee%HknoP)m|>;*Xr+K4+e$`zwOJyOVg^#ZjC@F| zOFo=yy1E;=jfd#Cw#3x&5OZcfdRTAqI6WMr%U=+>vUF$4#QlE+yHHu@0b|oCWsXx{ zkZHPqDVK4%L@tNZ_+uK)4iZ>d*;fxu)>tN8;WY|v@}Gct)|KWH(&ZNq(vs8I<;;%E zyECEsm+f>&bfM1vX0iCxCzR)M3>Mh2PIS9=%jXh+lFuJd2VgE-dr9BnAT#YOcT50W z1~%uGJnLZU^~8XWvh-6XXEZ2qE{X1Fphg zA}TYT@tZA!a2f`#rcP_Kp06LNxi`%OjXcj{n+aXA))V3P!~2fAlHoGQ?aqgxQ~o@- zAbmi4nqfZ&cvhCIzc-CYbBsZd3+vfnL71DX$s&?1{1?q#cE326w$zN`o#+E`i?vhC z2G#?k0N#0bCFWkhsmu9~IA#hIHQFPF$&Sz-g$(|A+`(>F;sn4~La@I@u31YW#I>zu zG`oR}uM7FEwl<%(`}a}GJ^bTwdH$;XRPv+bMtEyxr9oxJ>h8P;$GDJON4bvsekiYV zh^_v+SR8c35RZ%b>Mz~Bq?h&kpVYNyayzVWY*BDW2e}T;$(ThQV=6B&P15FX9->-t zK(K$lg?^W*yBzQI;`zTp=DYWUDPHQhfCdw{DO-pp`_7#SQWB8`^qu51pSP_I6#vJf zF-h4qGRix!`e`yh?toE~v1>gBb+XmSsifJh@JDuVuv~Z7bOr_B%Big{!u!5=&XO6k zVw{wVYyIx8myBWe|Ftq|1A<)&fW;SqeEC!Z#XXRENV);Jw}@MzBrYRo&!&n#*kQ}- zah=`7Z3*p;_Lm_7e16pwm1jvzX_`GmJjsJRy&c+O(XIa>r)TBI+@j|sWTdCtoLBPs z>4Kzx&cBg2hv$MDW~VFGOy|y^5F!OPgz4wf)NwFwN7R1X4l!yM zD_!OlJx-RrZW!TEHy58`$M;~v%ErTy-~DDKf^*GfxTs`wU4@As2EAq`UPVV}%EV>( zBa=MDHN)3f8N`SOr!k7VlM7{@9gM|8`Ht=)5Fd8NEj8(d;*|hHzlEq|(IEx|>~38n z)ml+;TcbCM^Wopn^l6Gr zYT@XP5>C}K_I7s5`xXcRL>-<@JjtjG3_AM}IYGu9UG-Z%^ zHxhO1fr#-WU9cP?bJjQ*(7fH#)n5YYIFc>E>YET<;L5rKLrxm2=H95zah&K6J$$PS z_oG5vs6#{mF;|4#E!*+#SHDjBX{>ReakmGqL<755LP`G;ep7@NHhgSAJJ+k}Qww3T z~!A;EccknE3n^xaa!JynNwwB^1>#juEHXs(wHtlH4p-saT z%r%Q!K|5yPzRZU|tG1HB(W@0DK-JyP)?HTgp2CLW;FpD8t0eR|OJkz#f=hM~B6{~b zEO2KJZArTl8B5rjA|bXRhR2vY2=NTh#O?aG?c+$KnanxTeg;jjB{-|wjPp@*lwK4Jt^6N-$o6ETo z^HJfJAA`LY!FU^WhyMGt{s1Z34!sCpbt$i_!*|s-KihuPyA4cy9)EV?`*N9#$3=SL z-r?rv_}%hj#edzATyqdnUD5WeMKtS~zos&YERTVV4l=T=^{be}b#pjP%Vl5h#nMq^ zzFU^8*wwfpVDGilb~VguA%0(cQFxbi?rF?LB@s8MeIMlxk`E%QuK)6=b1joL1F=3o zWhQBg>-@2z*Gb=pUuedI+ZA7V9iw#GRf;~h)1SBf_G-au{=q?pRH5Fy%13q=?snw9 zFQSAVKuOtBDawU%AN>9GskF3n)JB3aq(XBTeSe-is?6^@#B{!(lUD;wC)gF}afat9 zbQ78mK)i(Jk;W8?wKILH5T+(sUvA*BdB-qWAMAM@@dbq3QSl;WI0OVOa!lEhN|8?u zxU+CY+Bb)@BlcbgT`Ht^Y9YSu1WHZA@4JCMzM0pKm&HnB{3imOLW(``zPs2S50KA; zZ=XSfQiCX}^NvpIhSra&qhdGTV+1XPyDQHBrN*QdqXu^oE@dGc|Md*b?sGZm=eRwo zJ%}-N*JR77Qxu0$sTL_HLI`x}ZKPC`JvGp7kIINTskK-$lkZ&@{>mI(%PQdHBOLjhRqZ|%pv zq)Dnb*y+K!v>sgHo=TCxu6uMOfVgOKwGiTJoq}zX7uW7>#P_-Iw~kttI3rHLlC`}2 z-lG+F!bO4M;0VOy@GF&EzWl~t4|SNNTpB3Y_pl!fx3|}CAud)2)q-#!z@r&&B`Ov1 zmt8}y?d|Z^ohW?8dw2_b;ZoC!Ra)XoQ`poM&JQL&5pf9R~HIFDA4 zj^e}AhqTp+T^Cc9UbJ(7`C_6omx}qa5ux{1ciYrL^bh`YiYVILKyf3V^LKzIm&mH< z*~dAhCgl6rK$^dI2qVrVDcoNIvYZ%^R4(gDtk9IhJBIugH9rb{C=Qbb0<%l=GFfAW z#3a#z_r8{~_-^~?Saj5%O6hetsefAerA8NRrf)_cRJ^-26}oR_P|+Z?AzaKh|F&VZ z_c{M5I484huhgan){aTID~dh%`MrB_72WxeK1S@(<<`9d#gd8eg{L;&a_RU=5U0y@mNB3O$&67$H&V~0_>oPVMJmQhv# zvQda-BqqB6hD!ov;5L_cv_fc@bLj0Stv>xI8_JbFiRs2z!#(jRDp`?JCo}l=DsdHk zp?X@8I{!<^2%#LClifb(+>)F-1g+DrbpJy~KmHK>KW+k+Ivlj{3M~n~DzRdz`rXBr z;rR;!Y*c4D#de_#n+svc|6OH0_Ld#gIh!~00y^I3*ZJ zH=+QYMV5PosW3PIa~&}7O4Ju$P1=eM;Q%UjHzc>%6b`*ENUeau{N6~;fV-2LueaiC%SxZYzz>79_XTSBI?Iv4 zmyk2&?(g?=5~m~baoohK6%^~6EB#2h>|3qouB0tGV*M{a`NY6;^3C2P7LzZ(f25Un z5R%z&f-mf)HtVSk0iU2u9_)rHUE!FX_N0`XdFeAS^CI+c&1s@GOBQ!+i72mIb{OTq zWLHKU%zuB^Z$Y{!nEgqcW)I=0Qzyq`z!M5AKi2%HWjK~vwt3+f_-Hz1L&SQt%7{Dc-_W{+AA3mj&9){~$ zEt_J(Fg3V7wX85I^igIj9DnAn9$3aURbF3ag!9r~c3d-6_h)#%GYxuq&v+U0r%&Z= zRv!X``4_un7p=Fejv3uXiQtW8FV=wgkUV}vXqz&_+$bv!QoSFTJ<|=TUrt>6s~dq1 z>qIE}@lt6nxyIGGGz!9N*2K&k^LwGo1~NYnMjni0W%w_N26LYKe4G4H(d`| zG#E1?>VDHh91G#nHxv(CnF`NM7{?Zi_d z3k+S}K%d$H>bTld=~&Aj;{x<<-_=0N0{@&ZSIc55Nd;i_&;I#zf|gXQ@&2wukN?PH zd}<67ITKT6pWEWM^0`@^ygTKd4_`fWE5P5mf zeg^lHX0rjxhdqt>{G{k{-!fl(Y9)yaA#Ia0LXep3#m{|xxd4v+V}aK`J_@b%H?Wv^ z#N{m`QuV|Z=c9*2vk(&Ujar%$z=Xj&tLfU}*3Rw>MT#d{m=e+{ABp&~zoq`N`UV4S zO#2maN>d>ZVVqsRZ0{uurFsTncb{%Gwq&x`5QxPg-2oW+$YoD&x<}~Yh~$v+1DAjg z3v6ZmbLqyis#>i*w@n|F@I{(YmZDFHa^#jLD~^2i1@i$QzU1MaQzEvY`7 z4qA22*3P8jMhXjHc2`UkWEO%HPGG+ZNhQkng{atNuA1^nn+O-V~eDf9@l_qOYU0I^m<7J1Pez^ z>9Gq>EkTGjb*f}?2`QJSfiLyCgje2jsu=5_nk2BsP}}ElhZC>m?RO{Sf+NUF=~|#-R-sO*l~EeRpt*tTPs3}R9=8Z=PpDzn;=DB2jtB;*wVFDE~_}5YTT*? z@ps4bu(`ST=nf_WM17@(PM`jix6%M7QKF}3P9~1m=hA6yf><^a6y&wBh?M{qw)ka* zExrFe1ubJtL2L{qvw*nG)$ryzFrcK@R4V9j!aN`fR;IpQyS_T1JCGEmxAb5WuS#?w zq7ZH2DW#`fbJ6EsT}4?kw3eW$p$C&UXjso%A&}8Tmaj0TRu9mO<;~lSdVq|<1!BjQ zw+?UYL*wv1l6}O5hZqUwT>^moptsb#vpS3v1#{!8k=ji76O8u362QIJ%|8u!Nkn*R zRjRZ``$m;#luykNYG<7*yhv(vZUt*Dvb{3zyur2==Q4pBP#-v4%lB$J!%Lxo8x3NQCiF5LylmJ%n6ITFs}o=2VqzrT;d z&ux-M=&Bluvf>t2H=I=99mk=V&d^e6P9P*QKxRy@&et1rYqj&Vz(qUVI9#M@rz!IL zj(b-8SfQn99eNk=og~Y~4u8E#Uiy+l2iIt!q4gtuBWJAl#0Rr`JU<536;Iu3I@qugC4Z;>RA~tYKw_22! zzI9T1o@Ac4PMloszs~`!RZgo`?c5p^0a?1XF^e=GX8654%r|LB`U|O#8r_m&hw`(S8czlX3?hN9$ddK}|Igxo-rTo#b8hTK7OoC%$ zk-N&SdBLWgB|MW1iGd9|@PBS=Q>M(@J~a|JU^4zd&1I-&=A;2Dvvn?Ji8&NO#RL6n zSe8{&>=J3~!S&lDjAkrKm@oVWgpVq0zQ<9UWA&?P?L!c%)Q?gMUEZd+I<)pG?d*7(e+vOO%eO^vhVMwvgNsk^EwxHz>8fHGlX6@0F%YwB@~>wP#=cOGz8nBGJRE`4y&6?XkUG z-3aJFjbCSn37Vfj-Cl5Ol0aJ!wNdCQeR*r=Hqmw#1u!t?(fRPHhWp9u&41P&2Um=O zetEL&hmz<5NK*?yd%m3@5$SQE9P7?KXP?L+ZUTAM9b_>kGX7M~(<%YJcX=)qSoROp zZrfdZ;`fRpSE`Vw-cr)s94c3CeA?=BN{M7WI)yDU(6MK42faHTS!_#WFWCyh?{o?GMYbSC}N@Esl_jMr`%ovULfzSr;@9}C_TG`%PB$Re* zZ*`!loYLcD{jqecgM65*T<>ubI<9ar(8O2N3sJ^+0NHZ%kgWR8(16F$lvP8ZWYEmW zt6lBkG2XuenpBq=L-Q5)|3GH5^zcNM-*hp)*z0s$wB1r+=DvV}l|Y;yz|14&#Hhk_ z)b5`}5Xq{A&5!6HrC8MCNly76`Zc!2T`>GI9l)s`*qzh>BnA?oL@Hq}QSEo4|h#)isMP8&KI`LsSarF2~h*!{DgxqTxkD(PaT zx}UpKyA5#|qg2yxiy8RyxO0fs<1}eR+_Bh}LB#S;gHLo>`B^bWt2n$N(L}|uPQyk* zDQ%ChyVDl9B!+qJ?0s~gOItiBuT@A_wfwXsN&;nkjht@G(Zu`tvNTu$x?w??|6c3a zNH^*IS7+nm<+I@OTyDe3201jGw%6lmC)9Hy2mU7a!~Dakhky8c&5Yz{@TD;JyjbaK zm~Ay4aQi!9P<5ofP^6@FxbD34EZ&>AC*# z=w|yJmjBeyhVTr(&RzE?IJEkTnWw<;x#^3Vx_LylK+WF~6g}XdboL7_2Z@%u;4quJc-fbN&PnTSnP?U5)`T?%~ z{^{3vn}lLGr51-TBcr{aw0GJ|-rXt8u;@JSiX0ephRdTR6;0wCx6Er{E=oj6&GHy3YT&I6{eLQHe z??UPQaNzAJLmc9x+f~xm+xIYIT zJ*>y4J>xCa6m{r{He9g?l*msh6rw5%42>Pkb6hJ~Kf#9&ij(z7`g%)Oayp=f0=JQL zyo1gbWW|w@%Z%?8H_OudI%bD455SSp(Xu~kR4cQ?^Yoa!Lbo{}axH5i0qv`J>#rYl z1ak3S>U8XH|JTfOQfLx>$)r5gPtEn<_nN8n-D^R?<6#%L5OU4L+gi5^9R^zV4aA|h z<+29Kx;5u+a(KWek9OzLFLco-@?Sahgwhw${Fa97?J^KKtQJl;glAj-A4g~7&-DJk z@y~8-Xol#tWms+HrcSv#Y%wM$Mzy)?=9JSRN;=Xu<*w3jsGK!(*NxW9iReD1a7t|{ zqZ@>86lt}wB+6Z-)cL*o{Q*5<_W8VD@9TA4&xin`YV?n!yCl*9K88k*Rn5Jf9+hZKdx!FOq6EC@WUX2_%Z$tu}- zZ(ouf^a>yYMN88Xyf7t4mbxb}bP#`E!=%OOWwB3>FSI_?tO1fI@A|(3P?BexR^a2@ z`cC#m@6~QlbXPPB2QNiN#h-lTjEc*2Ls@LPlD_HAt2N_?qMEhvyGLs6$gsP-1%O!C z*`YXA2cEaRS{)yo1B{Ks3Y-nE3a%}i73NcbqG&qW7~;-NcJ z4S%JJ6QVoL*0uaN)2|1dDH_yM(VRp%vhWIDu!^3cHVgnZI40l$_nqXMeZ-c4HoO0j zi)>S=`~TL|$LkD6-mg(Ed4`RQ{BdE9UMR>mX>#uL>MWmrmQ;*|=nNar4Cde^8;{iU zf6JgwhJYc07;(v{MLPtEZWmWMj2p95Zx}#W_ysjAHL^sy@d6Y-fEs(*(rNytCI0*C z+>mTbOzZ!=y9P;Hy##N-uo3GYCO)c@+zZRq5N^F^2ONR^pYIFW+ zVO$@MXPwB#By0(Pf*89+z1I*xZfF$PXBjlHg~0wPT0sFEl9SZ&s6F(ehZ1R z2PZO5iwd-^uSn=&HGsGizDpw$3;xEQjWKnSByL@4XCu5ARsFFo;d&{+iDyv*AN z|MREnR^BCNe`4Pxbl#&Zx2BExdKGqyO?MzH#ID3sgWisKXJ1L|d|Ei`Kjf(bndl3h z#CLrGVyi3~`(b|-r7-^{j0V6Mr0+hBsa9YqA-(iq^f=Ka5s1{_xUq{!`xciPkp+JP z?9A;u9mjxV0pBaiC+wHOjP88c6T8ZEu{4NdYsucQ4%+M~sj=BmU6+L<`M`|JSL6?^oWM^#Chi9Sror2qyszXwto&jSz* zz#77GT>K)%C=f}WWbIS|$y|#SXEX_#6{yr!!=JmQrV|xm`33ALEL1C_RC@6;y*~#N zH=TDyfww*g_ejQ>BCJG^k#`w5E%I=Qw!6XJGh#%Omi1Ibzgwi*s?T&?jqH3VbS);g zc~2tDCCb7=$^N$Mj9Tj1wS+pP5Uy_fjKfx1&~!8V2vHtgT>-j`uKZ9a;gS=UgSdG-fkv{=u0Eu!jALb|FvVhit*hx2uUH(=!ue4w}Z#< z2`Ly`NTP1Om{%nE9RIq+^e8FjN<>l%s|rQ;^s@{VrWMLeE9_@UT?>agL^fjw?Wkt! zSzo^Ga3Hz2&$;8Y9UXm;`tcoSuq5}Ki?BZWc3%%VcmJ9a*|SJQKy&-q zP_J~=6(Xu`F=PTLX33oOke&U=qYbV|gy#0EHogAe1Q(yMK4-24? zf_*hR7fTPx3B8rdaN0NlbkOryje9 z+ybBdl*;?tH7?$|@doUMTT|PAEy>o4xaJ+8p|x z5wQ70Wp&#Wy_4L60hDY<4$JQ*X@U*acBFR(h)t}u>#)S(_ZuUdk8ftf0oOm*tI9gB z!fg(wE13*|C7|IBbJTJ?7b4e`nAogtmZN(@-Nz<-w6iNgn=9p95C*_cst}vY^4r6T zkBeoI65u6R3E9IXC z4d%*zLCCDvBg)0wRv!rvuhLfK?Ec=FaMc#Ppk@t_dP zOVj-357#70S0eNGS+twKjhiK#$Oo{|$>|m~X1sjj!70_93S|9W@zfH3wsMiQl?~R_ z?WmmOCfq0_--xYyHGANlscyj!P(L`Vyd4lB=?li5H6$3Z@`A3T?jpcxR2bbQ#13f1 z!S{M1dBZi5Zd=DGzXCb1wb7devS zm%tK0raL<0GlX6Ilf>h=FLoYT+<&;e@DdMB7ASKb>`-mION> zWYjb7(ejv&mH+NM;^bPNycj{ZU6}}*)vbG?aNn_oHZlIqlF_$zci`z{vE*|ao zXKG5DM3#`G_v;;5nP`-KV3cF^8M3zxpC#C_44vqc$A};A#>3rl8}(; zA1?be5=7gA^qs>ez?XC#2^qaUksxT%6M3V@v)C&-a1a+r~W!MsHV=tsv2}2kHpkTg-fcC@jf( zzdhZ`9V8ax611opN4%Acec&9DpcK+){>&F#K5!0Y31Mg zq^|-jmi{7Qd@~Q@lZ`~55rS=Bh4HdU>o{Q6Y>Buhw!LCUD^)<6GHLSupl(=ZwYMGd zRvg&q+(tu-OgH0P0}g}$xvLv$r+0d#!ATp%i7cOcX^&QeyDI!Xmc8v~D7(lf&O#!a=p#zrwnZCE`z^QJ#>Ibfo( zQhz}&`WPbNfzx>|xitkUT~Id-ZUV^|!HSq_N^oj(D za&yJZ3s<&zVlzFHxZk((&ZCw>@iL6{8&u;zg>u(G5gBRY-W5^G_|M{nhV;&eW2Ciy za@UL!WXqWts}xL~<&f#DiJd^vyBOFUcP{%ILh{YzO1KXC#HRD#~NNunQ?7hO;%KdhyVmV%+uaBYNk zuxa_A)VjX8?-t1QxD>3^=h^7?&;a8N;@$8pi&L>xHd)MaYZqiXhi}S*9XWAUCfm!r zX^jJzVwEh$W5npXAHuf&JanHWu@t%-pai4D<}b&FxS=FiR>GXaOvTI7x>z3KzJbK>LDoweyjxC=-6X3gEBY!gQz=5o0a&C93Rq|9{NE&Lf}s+` zLlQ7mKH>DzokfI6$}h9Ov@0|rtP$*NeZh#!LrE^)Z*_d$t>qxpw7VEF`tU2#o~W!% zxsWt$yfn)!y+)(difv%f<+nP^1E(ru2WZ|vg~BxS=Iz>;qWMwhF7a5}FN++@%|1;r zbA8V?YeABqI|bUD&~}`bKiNn<2_1y2Dzw$#KJ@I(hLo(3C9@AQFjgt_cFeD}fhxbI z6xv13`=tEx%Qhk3huT8ehP9uOjQ8B%ef^2Btpn8N8O4!K*_XsntDfopzDb~qpaw^O ziBD*I%{)D_1JwwLM7DAFpDl!oFAKkI_psk9w$ZgC+alz`y`@2Z0#fsXF|hT14a%3? zeqGYN>Ls7WgmXqvH~WH7tMlp?brv!kpeAE(NH$!4D$4D#zojp8Lph3LsG6?=p1H^) zo^Hc}o2#V*-r$fX{O@rh1Bt~$ie$Xu!~QhmgdlN78@!LMN`O_7G+0h#w*3JyK7rU` zI;|@`8fSNz_FQC_tt}1=t$*X%uQ=h(QKn;UwxjWY`R^rvi>qWn4P!fYVPbw^o29!U zc$KNMT}##|D3g$5a5_rA;`Kcy_(WewqB;%n6A2#|%GH(`zPjl4*|4s#ql#n8Djkpd zwnqv%gK$~J7WlhLQD?I0ruhN&@HDw!3ZIZTEpNXWah5>mOtdK}~?c<_%g}?JHjVm7e8kCCJ}7hYW02*tb1$zXD-wB*DNpf}b8gRldi^!3)F2 zh&DL4TJEWUk|YraTMCb-Zni?o8p05hZfMm8Y2f8v{Hj?i!^kGM3vXP-V)EOn6u5ng zop)iax70GQ#=3kDF<<3pU3pfuz!ekg;h(026D>wp>h1PL!m<$uepgoA+w!j9I`V3%fH$ zZpdU~!V-Q!nNF}~UC*-zH{Kt!Ufz!RK#0ZAZBBd-zl(Zq`pAf+wr(&$?0xoCZRyXF z8&LhEZ2Av>XqxyEiiDLn3}LKKa+z8xjBDD(6CwaX>3^ALHV4Ps2e^4+^U(GEe4gK$ z)+fcbC!qQ#;nk;%p~och8wR5(Ef`65uvM_OH*fPixcS=~WNlb8R_cBHwB18MUV=D{ z;XV=9F-N+L_Zqw?R$qdLdTm+(kNIW4!|8>lw951%WaOUgD9MZHz*M!S=xy$RYfDT_ z8R{&-KP*5<@IGlWlcKBx7ab!bbKv5#udcamvn;H>kE4;Nz(Uu7NzqHwI*OT8DWCTO zVzZ2q?h22jcVd1D5OHp#wJT?Hh5)_J#_W6K@8Vr^T=_!v1q#0CF?z`<0jGX(26WQA zXxcQyNo@1Yf+M*8Ej(`IW48fDC@VnifKp>Hy6oF4;Ns=w0_fYqW2DK+pA;0>Jwx&R zD810vdiiekA`zy|+F`@MRZBua3ZR}j1J?p&v6e`E1md#)Q2FKtKFl4x;?S|Kypy#* z2FuI7`8)^H$_>@nCwGfdFc9qW-S98 zbnRcAD-ruwLPIJacGYjs30Y9X`&dTN?2o%y zYhDurSW2|irID#3wY0ctJkRd|o7_1|%2g?Ad06||>EDXOY);vuzM9(QQ2Fd{4hQY9 zv5O_HYUtVpF+4$K`H{SBxCZWJtR9q)10dcA{Y$j zgfX7qGfT`Z2#*bBx8Y$hTR^+yVcsrF&nf>8Il2K~`MDi!R)O*orH?KTy?a_f$JE$l ziVVn+Z}}|4;WhNpCEn}2#Ng`M!PjWBwP#EiOW*AuJ@p1PhAONPnJixzW>Ii6g{tik z{q)O14<{0ZeOVLE#78V z0D+=wbEv46R@YEM6rbI85i!cjiOOVpb^UejD75M6#}&9sug1@;vgq^*O=y&I6P0bj z+fm6?+XkvTu{NwBKY>d3qkwBCT`W$NJ3J#X8P+gG;tHHn>3n9nG zkX|E_HtMjj`A#8YysC!_SBy-jc!6R?sn=-aPJ;epRr@CLGuSx9%OYch*S31Ua1FxZ z2W~!f*zJ5*p^EmS?T5d`iNt4}O~w?EP)ZQ}TkNQyUzG^!dy6#^7Bo1iS2&#BzJ=2Q zyTxyPgyfN-Ak$Ff=k|7FLo|s@N$XsNWBq@?5OE*tFS-G8cVaFQFWeH$LirT?lMl5} zENJA(uGpNn>_s9oqI#lj<1yCZwn$e{I5C4>rpLdADJjr9I4Gi%hQD;?a5TW~ z*#d-8d6$woeZ?94bL`m;R2iss{fE1qt5wf&T*jb?wsogSrkzz95iCuJG$b{{ZOlAo zMOA$ZV~J~qyk=@`!M!G{fj}3=8&gY^>9c>#i{~eOWSsjCiONe@=Vdb}`Z=KDKCAY* z>pW6(1nS}1zzbgLCB!}nnEu^{B7dd#xyoXas&Y;)b~m4p13qxhJKNN?`I|_stR2pA zo_s^R40v!qH)P+x|45Ky95bCv`QTkNH2HUL68-AVrR%BV0;qt-&GKj=JXY@;3%kE# z#n$g*)AjF5ko=t=0=JT^-{kiFg#Gq}P3%dUd71Jxj5B^n)#P)q0^!LD80z4aB#L+v z?6T-W5s7#L8`A1ICkj%sS6bg6OSTrQ{%(fM7{PRG-)wcrnsMIv#kYDlWL%l11Ia_2 z$>p?w1Y>?`C{x5wWu0sNY0#_en}%Zg#}4Go-Xjgm_1FEM!MYs~HUttLnss2q74`ru zyV!mA%v-F}f2Ck5BugkYO-ahdrzzs$I{VH+O zpJplKO2Vkm{hqJwnBV*YZ)WuALZhxKmaVe}Z{zkY&3+O*cX{aPt;HfNCX*Kh68d1H zeKYO4>p~|^)Vlyc!qay31Vz)3iRsRR3q@8&N*^toXy_po-?MMKw8r5@%?t77-s7x8PZEubTXqo42=PQ=3wn?Yus|&RbS?S9{j3PJug1M0|GJ zoQqRk@!(r^h^%Y9s5B(9GveCP4ZE2?RzdB^uu7#oBlQrJo`k&(^WSVzD^wXoOnD0W zi2X&#*0t9LqYl2%USFIlW@D&d(oIFdfZczW)))Fx#99oF603>xHklhPwRkX&vW3xXx_u zQQ&U#2(A*=zyBd$sth(p-gAB!`PCTVW8zd=GOgqEn-mqu=kygrbD7qN54X$w6Vfsa z+Y+5j7tK(DI+}pJhRW!!3_?N<61aw=_@Q5OjmTt@b~^-n7o$%*`akIx`8Ro0H?2fG z5}dQWCc5*7*R-@OaqWfRN)EqIAkIuPHk6hH`&x6syy65e*AxqRQRG>x3_Qgc3GX2d z2#r*3dyK{lPTOH1fK3TE10BfikoOk0wWmA)c6ikyj?XgR&atXqy!5l4q`--FJJpn~ zLl-^=r>Mz5OLM`!i-|qkQlXi&5_$bFS$o!E#L%WH6x3FppNFgrJ!d%0i_RP7M%3$> z+CsK*Lz>iA_oZWBJNDA5qPQ0SlvC}1mW(OzLLuF?Q7Gt}uaJ|9@_*WBD@%>YGYxGmvCS(*-`(j3@` zxIRP+mkZWVFeQleQr9{awe(oOINLhqBaA+GrCii#Tt<3CW*`m9%mJ&Ecg8iB?{zA> zx(6mb=xEju(%o{)GsX*WE&?{uVO+d5@EtFN6X?jqJfEj?FNeiUouJmq4J8SU(5}mGb}VL?p+PAFlIFFZLF#y`|;+-$DYNi0W zV=g;0wJo|mjo9^c+BSj7Mm5IFLsF-YFMUeQYO8Rw9x7A=(L5O2aQGN|QmD??x!m(hM#z z1AMeN(gvl#Gp#aK2=XO*)_s>>&?|g_oP_v%ZqiD2kda--q-kg#eq3P`w#ez3(u>%2 zNNMI_0Ui`se5tmr#Q@qP*K+wO$T=FSz)HAg{NEAjz{%&wiJGz-nGM@fX4yiQy6C&m z{kC{#v6<9qy};&7&K@kz%OVpNY^NhXq;-zv zs5UbIhoWM7eXN|@`>9$j+>Xj_!6wjXmpnW0`n=6-1%9@FTC7h**N(3}4sDoA&g|JP z#Th+P7x%1^Bf(`YKJKX3B8tpl`da?uAkwafu>%SeXTp1^h4S^ur&YAbWf9I!Hn1(2-x$Yo2B?ttS|wR43&CLPp1<5PUOA=-#u$#bD^$AixXPs$ z3-fy+8(I9a<=PG7o12#O?bc_k);@44EVKoeiRbP3DeLN{9h{evx~Nm-UtWtAJogQT zVjbE=`z8-0Ia>ar3KcC4NW@rt;sTs8Zuso=i^rEeT*OiD)3oElqaR@-wKfg*E4+ih za#M8>@X;ZNM!psKzW?cRZaOj{4w8vk`Le2L4mFVVJI4nrt;)~vPu4*|pjW3igG244_#!-@jk zG|U^>kzym;Mvmc7JFJkUkb{cu3ga;Jb{G*XgU$TLD+jfbu9M?VR=`@Ho3uLK7b}jL z#|>Q8{jTq}PO;W< zJX<(7$F3k%VffeskGnPH`pBCf&A%U zGO-&ESu#W1Xn^OV-!G&rRFj{P{|O!uwf0JD&7OTc7!6oa|$c?QTk=znF?sFY@JPjD~r*&c`16yP@| zJrTRVi>`jpHfgbMG`NcA{v+Lb2rFG*35KX1Q2qohXXlQ_R?@(S%uaXCkZmh%3>ZXs z(QLxHk+#i-ZrY+PK{E{>7;u=GUrBpMqty+T2UqzY@B$IM`u3a~#U#zh)P4G&cLw7L zdu;pN5Z!9R&V`B3WzANp*a;b-9kXJ&z6bG7>pG;6{-Joc9hH%)uYTWgbY)VOgU$b= z7cZ1qo%_BezmtB$BNna}9s@#OGWl)Xh?Tq18#dvHdKcPhO;g5ztgrckaJlNi{03Y`SdYl^uFYK(=#at75DKJI(QU@4*=c#L3{oiKcEd0=iDABj09575}7 zI_5Y#{C@8;K9{ifbYGwq4@|q%!^D6nap=E=<5yI};Cn3jpPI{l!I2+;a@_s~uOmK_{&3-j$7 zFrWSwNN&a?y{vE+z04?Teh<9Pg}1IB%4&>dYT7Mjm(pO%kE~mmFDMmsb(!2d`F*H) zhNGA@*LMTHX{|CUzrD-eB|D?HTH*`BB);FSDDtrHU4@T+;>@M!CBpk8v@2j{XQn%D zbSNF7-mX~}p^2wcrtET&68520iwUK?jUhKyYGxYYm2MOI?H_|dLb%4}oE=UIX149XnNi8==P5v5EdYYu;yJ^J=*E6lXDI+tW!bBHS8H_3weVYTC>g5 zSBXth;ED~ncw{_|mi~+hHc=94hQ?}$TUBuTRxpN~!)4l@qB+NBO~c23FS%^R{_2v0 zc&bGffVGi6`GK|y18#c{2~MHQEp$qnyhyz24wP~-!JJk92JaPi%&KWaLI{ik&PgAZ zYgzTfJZyn5aJWT*LuDGl?)S~h-QNt5`5G6(>{NV5!7tBUaHOyBGkyRLI zINU;(B`ytN6BkNS4A^g1x60YZkg&WU*+KmS0(;b}**$aB?!gn8 z6@spLNT3wo(ioG>K%ef%?#woMn_h&XT4hx&^k4qu$OYdwL{mWB5SWpHCe6saFeNHH zv+^>`oaR8jRkgg%Ke+lR6+9Q==oM$<#W5hFJD_?Id9pDwH;!Vj;ro^uZb=!skNk4n zekgN!TId6p^p7~t5+oXz(sd9s_1JOQAIE1CjiBQ6QeaDb*y%NQ7*N^-4H!&2W`x0u zj>G=ki6$H=1L7}YQ6LnFzCAP7u0T-8Ha)a$Z9@8H-WhzueA;DN>1H^Kzlr~HN-#}q z8$aW*WNpL#6qtCfKCw3`l;8g6#F6^2Zn)Lwvz=v9L6*ofd{w@v>`M9w!l<4-fOA#i zOZIIaed~OamQBkC*&{fjx;1D_IUI5cjb^qeu+B4jhRVx)*#E%ps%E2Mu=Gd9M+NFH zv39H=ZK$MuDX}Y>&PHR6K z!-QKu{tu|i>%8tJxv(5kPZHh(SGCYtg$p1(L02PLj(p zK2Q`oEARD;{j6TQUMRgZt9;o&_3^;d0ZnCAuBQ{bqPqY%w;gFTbM0iUBv~i#{RT;i z(&(qsuaRFt=><}_onOnKh=McJ=wDv6X12OQ)W)Z|$$@Q0(%YP>LOwC4!E;cI!PbJy zo{RUBPg{bcuoEB+W_Y|;aH>xT0DXlU(46=FhQ4aDMh_v_R)JV%8sv_?Wdnxoh%C`* zNj*>^@IT`pz|Gr9o8VObW%rk`QtWyA z>BBlWXvwzr!ECj6&4Aw(=NGkbDWZd0J71#15$d5z1#1qgrI1zxsz>m8e1dP2i6@pq zkP-nmhI1mdU@hL~ro2i#RsSAFrl-*}5h6GccQC!nhZLh{HPU*z=Pu z&r{UY#I9iM`SDkI3FiWaLdJ1{SeqYNv;2G6FU1}&Y%))|xTtSt?u9X(eLMcQ>E8}U zB%PW6+UeeukMjuIbcylYkYdc(qT^R_En6d5H*=B1osr=`O{d3eOV)-~L?0GuW`hK~ z1Lf%48c1eTMpAYq0+j(PvX|-yRYQ^@W$*g3C~*c1rgYzNXk}$gVFJw`MdAE@xaFVzEWZ46uXQ%F)+Mr_TsFo=-!`;8TM?NOAzu#WdH&0L0rXgF zdvc^g=5fghP$Tk#Sv`9yn&u)hObc8Gl4`y5XX4D$Oq3e9#uNL_+FfRxW@s6Ud%=8c zY@}>{j>mumwP&cw6fhTjB1bnhSwXL&j|YH)W$3ksGAJ{gY6EU*w3j}Q8>ftk7PNXA zBk4lcn9A+;z3+fvExajje0tO?GWJgTB;sk%7GX0r%m4o2bZ5zn?daeN|0G%Tm9tTY z+;)}zOXO}x{}zq)re@>M@dleJ?nxUBs*GO_J?=+1{UhX>2fD)5#Qc9Mli z4r_;<|8Ku>om>nM4)I;GT@@5;s)jokiJ<75_4T#%=+KWSvoLU)jjwAGa2k_06St#O z$_KKk;lM?0zxQRpFqL(y$zAj&D1YcNwbdw;2LxJ=!FZ?P-sjS5bGD$O!IbDe@pCTo zyG4y8OJ4fmc(3t-voGqWF+=R17WQ<{zGh>YQTWc$Dw_t@p)(>9$55Bpwe0fK3%~ha z?QbN7T`K@iH8y44(hJ@JB)xn`_1_PYr-i}aI{QWR!fZ&YRZg8RQD$F0ZGX!UV;ar2 zEcZMnB$#(wK0a+O;w}f}7uk2A;cM)4DE3)bA($>+`9_@8-eEj8bhdJ61-CK^t$j$B z1HL;lhJY}J!8{d=L8sXe_lAz26ID%bxB=nnnMaKYs05zn-8=V>V)fKqv_G&~*6JiCMZoiNUcxs( z+(eHkl#l8it1|%`^~n->@=#_WnYMi(e`b=EDd++~B{GV?%--AKVUqW+)ffQ17sQ!1 zrw8BCp-q<0331s2Kpuc;3m#m4gb#Hns|z+_egvh|e{U?U`b^c%dOHPV#^MLKoau~V z9&Mq7Pvk&JCfX+`>0QQ<#E1dH`jK05JZmEK{zP-x z!Tt!_i^wmV9!A%hl$$6!>A#D{H`!&tk7j64H2WDuQXBbwE`fM>gD7LZ_35ISL#x*= zSaBp?-#Bk6mN-f^n8K8}k%aW92yX&r2_2C3wZ4e?gd8njH>fP*{eVmy9 zLx7#bI}8oRm)SBWJT%4lzc{7hSm(uul6ssGLAMEzfa%fkyCytnaa7D|{Hgk{>dNbg zV&_Nz?W=Y4}KpB-Y@9v#H;0#D%gPXn@@X_2Go=OA+0O1({1N;;1a)`TG(e;Q9k&yb@H0*Gy^;9hBruGxl_wqLnPFRR{#6} zv}2n{MB~iUve~{mxLdVjqAU6Z7FG71t0oj0afz2eKmM(Sf(n7Gi9qq9?(;r79Ci;Rukp}#A zhX(~55)DIgGvji>sh}{xzfYnQ@oBz!C!vnX0HY_z$IeV}4&vn;C~-suQg+g8Y9iyO zeOZU$AK$%0AA#n)>x)b=%r+QF(6U(TFQ_19^Bd<9B#~g>=bdm%P_iA7MdjT-07V0y zH?r~$Y&2{v0PY`%8j*89A16f1x05|3JP@&`)_#4MQ5qEGxydtEFE2vMYDWylQ*~h< z5=DK{B6}3(uO{!!FI=ZPI~TajmGWo}h%w(I=3lx0nx%{=s1rUTvRZ&nEJ&K^!3uaA zdK6LF*cl-2J%2P8L}J$crED)bOxB;JUrE%K7OLQ;&Z+Ox@YXWrEuk?^0YSj2_Sg3CgfB9PUP!Z~fWwfIYnc^(J6nJ9axyw!T`kw}VKh z?}0CQH&VUiURwLxq$+&D54fsTki-`z!!6r;9*9s%J2Dxf>VB=kh1~-~C4bsIxF>Y+ zfURr@33iKk3D<-gnwQ_ifEj+>S6x;yVeh1+xHmO2!1uXmQAXx9A`zi#Ge$z>gc#i@ zof3?_pn4S>>^t<>4JTa74YzRx8PF+e%X*Po)_KZT982Odemqbyz2v*6z|hxqE`6O! zW1h!fiTL+1Njfc(be1?$4seYY7=mI_<8i1PlZ(p9I>mB?uo_-f2>V7jo73j<)-h>z6THbUc6 zlfRL&&Dy|jt!lrrRL?fWFP44lY+Yl|&`V9Z5O_8va1(n#H(5g3r#vtP4#Z*&#W*QV zCo3Fc6PAGcK<=7rSv;$&6y9!gY7rj((n`nC)kn(4+MRw_BX5%EwyP&Xe@*gSn$ord zsiw9d;$krY^?m<2(*Lpurd)8&Ufnn7EPB}DcJJ`vT}4;a5da29%})?zy}X>3)c#}{ z)hzJ)?+`a5XiW+WIZSD-pNs&1$|AI&E(+pq6TJMQ7iOm&U)qB%h&JRs&)Bk_$e7t7 zzB4S#Z`FbjL$p#tN{6|ie6zb=lG*|n09@GZM5#kB&zLKOB(y&!u21N5%ILv#G*bBE zn#-zVlBg5DjoL2o1n$oQ5GOe)o@{ST! z=}7u!!8p?TL^=Th(@mPw9zEgn&IH8vnNdwA;A4lB5zPM(;)?(tIno>z@*$N;(gWNv77GE8r&DSyrV#o#4SPKgTk|UY)?KU*o0Y7i@J$dm z`+kd2$=5iVBi-@Q3Z@c}tiL?_^<sm!Yc zT0vQ%@jb!^&d3^iEDe>Iw-F)b-`)TI@{Yvpo#Jwl3lTs(=ntk$pGHYh2l|{`Hx3&o z`fHc-tyTqzU&0VF<){jh2N`@E&gZ6Y;j9|D;ZQCTXBDpX+8xqgWbz1|=^`iqPaQR( zNvQnt@nhM?EQ^Uucz==uTFb>}HTuc&Vh<A8XJ&M6xgRuzt&MoTzvi(77v4Z5EZ&X!tIp8D*9Y~#2zo3 z@?gvf!R%KY`l#0=x@1KGDFz2J3e*27-&p>tL9oV{^seP+cr#lr5#*0wq2Vd*NiFrF zD7zn}K`z-x?a0%jR{Z9!JMQjfudDzh3Q>u|qb+2Wo8O_qx2`n+Y(=@1x*(eTn_^@4 zr9oMlC>juj|3d$@G>{S_FUnz!qgTy&Q5p#mCkE;`;gCwn)RmA|KOvYXQ2Bi&$X^x( zYsPwzypJdf0Fx#;0m=wmRZ%haUfjMFI6iE|$h55A$*<+QMKRm=y z7mBAw`hw9T)Bg%x`%8E^RThvB+#al>A^j%a4;YRDRzaNK!B_ovhh!k zRh~Jp91(c!d#R|1w~g2jD_P%yTYS@yRaxQ*aWoKWUuLL>eG~9JXekgn&Bb=a#;FoM zv!J^1kK^d|cF#*~AKL(kdw^|}ab4nCD2l5a8ZVUG*@!?kdWk8MRJk)uJx~kXE(BjM z8}?*nVuAZ7xW;Hn(}7b%LA^I-Ze`k|)YQnV0?)BESWEhB!4HBloxN|T0?3a@-TGS~HWBz|KeeMp+_a?o2;$^*4_PE$x=AR7;D+(~N7H{3M)b2f zWJ-YOO83q~B1~kI+UV8syBQJM|B!C>EZgSFxIRRa*L{)2BR|KO9?NL40U#bd>m_1i zI#J2^_Ds(JWuw%i8J7ZU_*?I-_QL+D1^+%sBTKCNRBjkl-L_grgo{)D6`QOx28$jK z$)ULby7|U?HtS9Dd0XaGOI=^qzG55REACY?XYQ{?Pp>I`g#{}n_4h~5Kq!7)bmS^x z%H^&HO`9;A?UmJ@OFrEECJ^o_3M~7U!5e(yDs$sUbzaMMr6GC6CzCc|q|RZ;Zy&ev zUl>7K{i}&sN7v5l9+-p=On{Feb`K0~m!>w~_ZQtXuSxLAf3qF!@AVTsrjjvP9)C1q zwncLP$JZj8o?uiI417iRl?#i7024M9qd8u&XK-=7&Nn@C;P}f~>0RHJM*DpXVu=%w z3052M`A(_#7irEVQwr5EdY^O(+{|WZuQSFg$uuU$rvmY?JL>%DeQuacAp3EGmwmn~ zwz<_|UPIu|k}k(uk#HZ^<oS0GUt~6_(IiZ?0p5GyItaET~W4tx+dbgq3jtL#u z-tOLP(!d5MeSf*JU+URUfhue8dN>xh;=y0LfUs#3jnU?Y5{A+nvb(I0hYEg6bP!)X z+v*>{Y_&&iq5bJ?na!Ylsw9lCj9_rda^ycOY!7RUe=GkCt3S6_S5Nb!T20e76vwn+ z9n^n-8@+AC`WuFAeyacg`Uw=2*{+A(JsvIaWKWTYSG8a$ui98|(1FLVN`kT^EBzPn z>#&IHr{}FTGxm-NEwoa-_+IEcQmmVFS19M8@K#4=(uX>yXfu)&(YID5A&gZ z(fsgTPCG$*^7`M4CCfgF!akkYfchM4uyAYsp@cxYJ81=TpTAnD94W9zN?h{)ovYj| z2wk0gO&t3vuB{R+T3{8f15k0p8cJ%G8UF!LkABT9=$R(#n?0Jz4n1(l zCj3k8pOmP7$0R1mFBYCTI9B4f9V>-u{`7CDmMHq6hl^Gv^mz~+8pz<)g)>@^m+1Vn zOs0`_27Jl4g3>b#v>u{eW_wafH^=?D^vMZ7sfPgu!LUufs)0y))FIc(ivVzPx-qv; zr==zB_$A^R6C>NHRNysCfrx?KR=_4jo<-!&z+l_Td>+_B%?fFdfwx4eGbTP#z)@q`Rb=`lcgG#1QF%uN2P4wm5DvL?X~qe@ zOM21&ctt33$RC2cBx?tZGim(EdeXE%B~QA79!_G7Ze)RM1Bp#xc!8xf7-MC-XN8H9k}vx*S#Bl88~nyws|&^)4juG} z_f8vmosTRDc%02x;u(xG-*vniLn80G$ob>X0#lSEy#_h-Ol-D%wyEWX#CgceWY%_+ zHNT#~(2q`@;aL8~pqq;&&tYA=UXenOTn^KgAkCU1w@Ox&-!rX(5{h{^zFyvF=|qj0AB0?LAJ|(&8d*l$ z9~Db|sR&yg#hz6&5}{?CEk8=5&z!y-(T*-tFZ9)nkZAeDeZzHh1YR|v~w!&40@VKd`trAss%eNV)$@C>hefyT)6yf; zNX$o2dY)poFM-jgtlTQeel0c@HcV7~0{(U!chjy17jKB>qE05q3z64ySXPy7M2;Bs z1;P_|5G{_%uoE=fZbXEa`nf5#%PwMy-;sWN>s2|w-w)98{A%iz)71rdXrIk~#^<)D zHYQhE02QBB2G_~l$Hfr?85H?TUD%Byw=ksuHF8;fRdf-?E$90|xWQHgLIU&V@E<+6 z5CG7?9UgwgnJml zyQoxSAG7oZ5&MZ@ zIBwtCTg~LUT7}(fTZ-y~c^I==0WhrTYt7t&XCQF)FiW6ha@ULf6Q|I7=s6HaNRp5W3+(#SsbbN7|6x5o)S0=GO!nl)Y0 zhyJO>{TH26BPxS@D|;WFFXlR8SMaZI4WcW?j_7JWg7}(3zB25Pg6_!ps38!3K6X{un4K(Q#_UonYz4WWAZJcvPcwf-*J_&bl=)p3 zu>}e}<(>h6AqUzbi2$`~Z$lh#u%r)Pfftai@4^CJ9~gh}$3Aw~dnFPbs}B78 zOX=Dt4*Ih}ifDDLHc7&WV7&ru(3r5a4l#Dz1qre;5Fix=RUt9Pd+=ig?j`GOL~Pm^ zL*kg&nbTGlp3i8fN;}a>WY6;fa|V629--AF z_~+I=NZd{XDN3lrrbpk$gT;{;P|>0V!Rld88+>;cT<@S)Jh=gpzQB6?0%KNGw&Z|> zIl^EqWJ9;euAp<}RI~*UQdb$KU~rv-a?v0D7RRb*yCHx8Vjxo7MUs+#XQ?L+)G${@ zBh6{dl{)sddCYoJHYA-qdf;JUhx+5YAGg%&LcwUjXoE*LqSepcu-kCNSXMkTcN4!5 z$0tOJol~d3&es|&?aBa;T(amUYD<=$01p{O0Ttw{{bEkV>$ht>YutV$%Rd5Dj)OF2 zE0U>)M{*kR_fC+W+T%eZKRVZee4cFbGyZ$M@lEI)k$M5@p4>~`d%;VOGd#5UDL z0kjGxCq1*no&Tv^TSA{1b=nPa^89G{RZ3^QUe7zOE!a`>2GrP0MFTfmoz@vH2>HHy zNc_;=D3j>2P+YxM1`U|v+9SwnUQMeYg!MmtA|eV2P}zFw5|S9f;_uL0jNP8J`}IQb zL^n3bav@FWRy_IWQa;JU=Z`ndfv@tt5H05Sb`50_(FmZ)wcXnqJ!1W6+Gvk_b!fyD z2DU?GLc#S+`EXiVrvHy@$WM`cgm_Z;L|tE$t8bcVDUuHnGhJfzy|X}Nl?msC#1&w5 zv(dfG`59bm#!5YCxdwJ=@(sF=VPG>>i*1=Twvsf1;qa43>yi1qT^O}^;uvaP3iS7t z`cx-D1@d)1X|xS<0#V4*%z?J;W<&H&Jl+hR6({Rplco$!(?*r&v4w@%|N19tTy6(D zMGR6G&-BX7GG$&q2US2E2`Y*_AxxvR@)6&xme~MU+wi=GA)zaX81w*VB4R_0|cXB%--jbaSt z-yNoDgQ-OmlXO-JScVu8Hj<=`9;Qn;0=3=O)A9X!vyOb=vk^h`w6A(_d_GtMYqZD5 z_DqkM4WLYU5k)Y3_(b_RFbRpN!NH!2)~t&lln=UtSF4u_VnqB&isJ(E{13I1(=Yv5 ziBpAK5T>r;S>QM~vTMg3#xLWwc4^4*={fFv5;m?*{9S=d3Ycg;L+z31v?7~(#tAEu zfaM&Bo_*4W0*ave1`!RU#g4a69tVK6GUJ>+)sShxC58<22}mGX9uiTTdLULhC|;sw z$tdrqDeJkGKVyJTjoqYcaT*AP zNIDT*;s6NNgM$!q*?E~anEp)~IunUPFwfg3)7m02KZ+g6{#b1Hiy;o}G`-8|=&|{o zgmLk!tWpOV3_Z@bh**}e%$+`Iph&{tZ`wnI4IX~Pl74_|+ljY+2tH53Kh1IX(bKLv z`i7G0=YWe3Mve!ZucWnF&jD975TtX&z3n!Nxe5JxL3*v71l=2utPGK;qakJ0c(gYK z9DSf(&@9cg+8gZ(85E4d#~7Q9uKT8&PS6Tn{31@gTc(fKLfWy-yts=L4!lX}710?(%l(MC@K%dU&0zZ%3g+=Vx@lheB~q7Lyt7DwVwY=Pnnj076v0z zi}3+=H4T8j3Ob`_ZdLc;AL`E}rTNHYS;7EBcK z3;lGqX!kxwN_2MpTQbE;PmEM&WYPg=84uU$Wfm#1V1ig+6&KqbU0AU(a^_V9Qn#4{ z1vXg?);mRFB@9eoGc=Hv3WLX9wr-VPn*o88Urd1WS@v{3?@SWZ%;-#nxt@7?so52Y zwgHI?Z!<^Fsg)E?dgm9Dhv*{(K#-7N8JJuXJAdYBZ4T2n{Mk)cVEXw_dCWJEhI~`I zi`yeVhtvV8id#S25a+YUy^9^tSof2rk@>S9i$tuKutRyb+=w8=`%3B$EtrcyPF+vz z-QCB>s?7oeRf0AoyKOTglBm=hkZZ(#Zm&aMM!H;B;JKJ_R-{oqtCSCA1VKXq#IXDwhITp=Q5Gs4efLJjwB+PF%maJd-+l2S+GbWS7%m;*lmg= zjm3^xt3_N%muou=c@-GP{^~ZkZl2a{?%V6+{di`#S3X-BI(r8~b>3^$`M!3|IPH-O zdFpG3m?|>0tI~5}zqGg^5f{y%lN1~@@Yv!BSHiT*iBSp{D6@b%1Af2wq~KTfS>{f% ziMcG;ePn->`jUEiliB4VUZ%7~# zN6==MjFOG;@b|UtL3#f%Q&zDw#=A|B_c+5A5nNd-dF9|T%GNV$FnZ?45MmA*i*g{; zcgc@g*DrC9hZePZMUKUyxVm1WngVOji6y`SgIeODT8F7=kE`oJM;T+6Nh@H&mm|^t zOi^MM)D+E7i#qvrVT6?M7N8l``V|KS+UN@#R~y5%k6eCVYjY*oBj;V6<2j?cR_Y6O zS8N7Z2%NZh9!9{tn$s6@%2*IWUsA@5S+nv~Fa?+{40bC?=x$rV52G`dL<{ zn{$Pxt83LBz>t9L5zotccBbR1cJ}i&HCDST7`GqPg701>ocbR9!z)iO`i^up7%tqW zudD9V3cxr#=OH}KT-W@{{rn{f5NkD74AU-ZHAT7B9zfV_p14ZjuTCk$Fz46P4lVMK z%99?n-1uIeRbn>;lX&;ASvTW@ase-o^u-jkT-s!ro1eVo9?B6LlOL7CkT|`5$f{cG;QF5EapXz^WMEQ_#HIoM z*)bQvyGX*I@>wI90T@{BZHKkv3t83ojIj6BPlKR`4(x9SS$!j;36w=}Eo4y)B@JT* zAPNznMKXz)4gLB&M{F!muS0>%;Ge@(aj5c`F$scORv1sXR~V&$X_LB%9+VfA1lCZI zWFxsS^A;j)9030c++qNC5Jw)*`VDs)L{RwPxe-g!HLHSrgRMWH(5;kS#{h)fsteJh?t-0|d~kaLXhZNmGA} zfUS%Gth0;b>4ibV%WYl@FQT|4VJ0XR!63U`9VS&<>ho&6sKr)hvNqs3XBSoIe65QJ$Wg>(MI zRHYZr5^#Y-V%LnQl&#T~w?LW7J`(VIR-rKCJp)fBVwLz`lf0@(&j~Qn92i$_E!1;G zb4fK0=z%k`F@J+pe^31S=j2)~sm>#fVWqsrk;+R9>vq(|m<`$;VFTi&vQn#TFtbAc z9Kp)uh>ilt&(cG^uRLE{v>wioAaRjk$Y*r*h}t)okdC({+*y#Z9O5FGN?_tz1gV#oDJF;Fasz<% zz|vrr`)ML>HWU@FP+58c&=-a{H&dp2NeGniNfH#}w0k$%LZtC>LNPP`TQ&1r2<%$g zGCLoDSU1lqjI$&`gH76JpM_euVfSKgFOSgamH_U>NkhT5Mc1e2k55&q~*p;T)_t$q%4aIY$YW{nE>L129eCGiJ?hQT6SFo{GeBmT0 z-af7j6*?K&Z3cutI*IkQ9vA7NCx!17gIu8qpe_)UdQd!P6Bk?!_KH1&4Z=s1VOwra z`m^O|PO2Gsk~+d^k`m37e9i!dOEEr>T=>BF>9ofb3Y44pV2U?+s(c5VRyC{t zt%~(UKJv&dGpJ>8SEpl(Xq_>)5hjzb?Ti*M-Xb)j%b;Qv`(Ic#UbcCyK@_^Ux@l>ukX4`rI~m$u-F)m zl*S$^D$-S_jLJkLqGT8Z-S-U5cnuXV`Mw_XOJF8 zEHQ%pnSnSP;rG}F58q`N#ou*p z=Z6nRdg&EkKm_6BesP|S5%yEQiGtmdPo6orN~MthGJguu-NvP7`Mu3x`)3X>lH@U> zovH2P7Ljt`6^chM*Qwo~r1V(!gZL4WL|n#QYGqjV-X>~Bo{5~_5Ihirl})0t(B&f5=E zQjJMcb9l}_lVsCPs=#6~Fpxf6b;3P;H-mQy;G(X3 z8G8?E{I=gU+LY5u5P;E_*b0`tbAtKv<-hkJx7P6wKrTcH^cda^J)IJ(cpI`@S!9HZ z>jWCyt3NQ|*H6dPuLt?-@nn6@fErUvp~)?}y~W~x5PM`;o>qvAh)UF0foz`NQw|1U zS~Z1cigsVo>8(H;v-wj{0-xC=9bGu%^{UTHo$5N2K=+a``fCH;r%D_963^6iD5_4DwYb_{+sYN zi2N@JVgS;orIi6=UW2)%yUUQ_3LoB7gowoi6-ZDMJ<}>$SL!Lxiq|EG14#gzytT^5 zA$#w=(!~K%-M!`;^A>+s+#ei*w}l+t@7V-Whwps!Pvwg}EG{d}F2 ztw?&SMMeRW$8Jq6(O9qdxe48}t%1RIHQ`f)DPZ#?C+QO`-Pt`gEh1pt)kdhg53!^g zDHHn_d*B}cmNJX5!JUi`CjvrK)yW9ruMVN1@&BM1j)Z>=xLaaZ8#6Qw$+HqJ^aT2r)viTHnCy z5wKCN^glpDs}p=D-PO|V^2_@tSs_r6xNrKq)2tPqpO{4Iv6MeWP`M<1={HRDJGB2v z2ms|0FJzz+uvjL!cVOC zNxN>tYE2A5w83KlW5(`%fxAaKpeeD-YSlaYT2cr)Zg8V`SYZZtILU(}X1|q=vu;y{ zDA_gN%>Q`hJG6M6fY^ZK2sx@eYI$QTURgc-Dw1K`m$N(bVTkk%=DUDc&;UH{kX2f0p)!C{>?Mj+p( zs=)4{9~O|AzXPR&qMvJ11|LqLA3Y+;%kq5tCM zP)8uj0`yxi$p${mwET)Jc7ttCC%{a@Mth=Jw!)F68ks>rlmJ>kYd83OfF!K~d2i3Z z`W;lD+RKY_?PIJ;hhDc1GE5wF2DFOn^CohWgR8YrV&a6YPz|KmNGn6*QIU?ApV2N~ z+TECWK#n4z?!(TDgz=PXf|_bXAQ{y|(ywX#bkeZagedK0jAg1BY+I_|ruS{%>%?{7 z8?78k(a~0eN9Ka)?n4tt;UG=Wg#$QCb{Y_#2v(0DOFZ*mf^-gVF=w#T&)G#@H)=%_ za2hXRCcqnUqGJWF^D-*xMml{w8`^9_o{f^*yI0HqZlLT`VK-!hXqcYK#>C6QGRC13 z4E4c_g9v$mAn1V`xpzWCh+rrKpFEzc@viat<*v{Gv{G6>WQdPpBtMItafazc<1hCQ zAnP_kHYZ#hdC@C1G5N0?@t8?aZWC~d@kuCs??bLTfc`|KNwcKGGI%&&U1c<8)^U!c z*e7Fft7^aj#7@aX=r9s=LE_uQbDlktjk<>e+%-!<00Yb0w6x^SGIpNuQZ#M<4N4+! zBr($EeFAW6Vw(AXnVt4;bg03%?je7?A>hFN)!*Aru$1)kb2;BqcuS0->*5nZP>&2V zVX8c>pbr6ClOF86G^AZ=aFU5dU77(iKp%yT46p|3T)lE64yt@W&!dy%9hGugk#t?6 z@-0a;HEHLeXaJX*t^cKAzxBd0H?j6{?r%c6k+%xH(v~E9KaZ^QpC!d8ao!*?NU`e? zMTs2OidTCCQasO>jH#wxby|L1b5G&VPn`=5jjS;2sUg4bFcBbsl?8*hZ?9@L}@Hb`9`Ab`@VLk^%nyz6FCNUQ# zp6uz*;BJCUFy8z0Y6{LidHx~$mW5$HO)Dz85SUeJwmE#va`Zd>vVgLiXrY}*W92qe<_B!PkSi-rL zVo5b=>8TzBj*P;>(Q7sf5>f}}4UX&&8h?cHtaphfsqCbV8`P9qUMw07=UVN7t2Onw9Q}($JO=0EUn$Zy|{8yg>Sp6$@-S2up$aL#~k z7XZ!pU6?oi+14mY?#J%^LCBv+o2O(a87oi#!=PI&BD{|yDkgj`q!N)t9*_)Peg44m zpXcn1*-hk{j)Rer=)5lAFh0Hc?1F9E9-79L+5$*xnO11rDt_cK7-H&G53Knw6{c@8 zg6R*qK)ORhuos`zRxYOK6b=rTc=MN$@BG^%%-s+k>Nf_w=7ia710h}OJQ_Iz*2k?{ zGMnLb`b&3;mz}zNZwG}ArPX>3 zwa4?`&XA-m{{%36m2ETeSBAxN6eM9uQy?-FHltnl+PAm)SJypr;ieuNRY@i&Nrv`TVVUPix5<9gVvfYk8c<|WSZ_u3AsKv$i1iU&(HmCVDOw?E5Gd%i` zrF&2W-P|xJKgzGpgq+wgVG3O*8}mWADeo9a7)t~(;sMQvE+8klMgYskH6VYiXyci6 z4w4AHkCT8NvS-xy?tsJ`-UdKE&1OR# zdD4o=rU_KIlPJ-?MnvFgEa zkYwpQdYXw*V99IZsCoKdI;)j~Obo_!8b}J;YL=c-U<(8EbgKqHzL&L|_ejFNbPt+1 zrxiVj1TNFoU>rZAHF`iFw$0U42LdC5*Asi?j5c`p)kBfeqKZ*~)GK6=1o=PQ9JUsv zx6Z79^6Ws3*kiV6L{fL^4|WmMfNn0c0wd+p^#_rEiQrlt0u3vN!vO^p9`YrqdYVnJ zKV#Qw4dWI&kZ=A9>RAmX+gxRijt|Y2Hx;K2!3J@F8kI z9>=eu27nii_B?jqo(Jo}zt36M5n_K&fI$&&5nsw6BQlz8sGsqn+r;l*Hoc7?AKv=*oi6$wATe~b7XRx@1 zEb@!=zb-mumJ@5ghVh%l+CsdvxtOaH6b53RdTef(?{eQ#@Gxz>$_z3&i$;cmMPYci zqfny=l0^(65zMJHmpECm&70U7k3wKa+H`g%EgtAW8AT5Qm5dZhN&vulH66OlG!i14 zB0Z7(xH88ac0cl~8rWpe*SjRR3m;eOaz38Pn4ANNL3jM$6u3aY?rGyFU8a!ZG)ewp z1&s+w3JS@?s!@3kh{`IC)K3 zB0ev~w#AzZ18TB=lRsHUb~;YT)e`izC>UfKN!s>pljb9yRcJ$XqI2?d=3ij9FHX$i z9j03`YnllHhd1LnSQHpvC(pJGyw75&R4X(>>K)5vWVJR+i4{=B^O^dSz99MG`nZqU zg%zPLaB0B^J1X?~%R7gn-WeB?px!w+E3u!D)*j0Bxz+ktM!!?I#v*`0#5@9ak0lAr z`T$>=1}9p6D)8ou`zrbXW{LZf1_7<(#xZG_inmIDhJgC{Mkk+V^~8xJHU2c$lkEmq zvKaV0(eMla}-yk@Sl?Mi3a!>iVUO6jlygnp|uRMg%h55Vf?B z?4ZP`(Cgl!D9ICGO}_x@Y$aP}6E=3b-;%r8zKC`*qZqhKqQBE78FoFAYZ5hCMwqu! z-qMcXPo1**T(Jh#+HX!Mifm5;w60{?3Jgi5m1z?du-^j48pYA8p#=c0N{D%P6eL*sxn>@T^zxQu{CUsmd=s#eIw^K_b zNol&=+x?1qR)dWi<$;Dea>+pQyPlhitJQQ#`~F_#EHy`K#t^($N#YyXbAfh`D6dT3 zJCXLI49X3=)YUv7%+h?WT&MZ|2`oe`5SAL`ob!l25O$Y==Gql|zGP)Fwq~mYl#>q3 z@sHPTeXd9=F#JYPKMw@mEt#M43&_&-d6M7fJS`~u3dWbY#$Xe_c`PKN)^4V`H++!h zw8+OwN~vAZ1^PY21*O{p+P|_Z17R2^eT%dpE7jIH(q>4r{iK+kyzP}&lIL$aPJuRYTF0BO_k1W zVh@UouI0H2nXz>}?A^mAzA#`r|1SAI=s#&X58W8}>?F&YnlN==8REv||0jWE2CgU@ zOZeYmqbO^DRg+TZXD-s}5oYfXaIa7I@j*pZt8apQ&&%z!{r$nI;MqT8S4^;p^x#}D zQ{SlX)W7RiCGk`GXm3kkfcW+LJ`(~qqb%k}ryy^TW$OpU^=iYjS>KUW93(vJz@;f7&vXBF`-l$850+A{yJ(p&SrZSw{o*s}k!gzOrd6rVmDf9{a_KuzfH=Kms zI)*5GqYSM+xAu0+nFx$tspm1Kpopp5!%+4|53X#^Ft<$xZ$hh<%CV;`M{wacq z2`J?)d6dEC`kvgp2}U(S8p>->y!oO%nsqrO^pD5UrfR&Qo|qK;Z2kqvNy60Gn9XV(Xf(F~Z*Z1^(@sgLGbOG~ECGwfLb-|_&dQ)JE;H{YRMs^kmZ3$rS zWQYr7Ov$hapGtOlKUgmi`k+5g1#glbnMw>RAp>Cly=u zG*4j-@<~cp^!~YTssp+Iac)OpQO5?r&;oMdNBh<5H zmc^Fa<3a|T9Sq*E6mEAZs)|8P$+1=2-fS<3=;(^&z&-`(0_O#GcwbHP2_k}K2GhOx zSQ*4_Zf$lyekY|WE@jDs1b1Ebq}5L9N=CdlHYgyzoZ ziqfkS=;v^LCk~7}vqcUMvnDK7IvSM=&rJ_VdA?_;=XLsM8c%nVz2nm!Zd8K{8gO0< zA4MIx?sjd<|w8`I;qK?pe)4p`IVY$<%SM`#&=liQn5d3y|nR_X!bn>{#XgX`9Jug^}w zI(1v2Y)FDk`Spy3zd`gBU9@Os0QKTFfnUYhZw|Hlp685yB+UTDv2Nlt*R*5WT`1cs zd;`W&m_oJ+zAkCYLFn2P$1)r|dvpyTI9vESf6d@(x+=3W`aMJbceh9>w^Vc@f1Ql! zw4|~C32MoBU}R8vt_f`N{49U}Qdpt}`_osKYSpoH}Zm>9uKI-WtEJmJ+2H+k} zM1XR7^z@te`S3VPkVy+s!uyBlGyBu-s^FMl6(_}3(N#BPdc>X#6nzJ1=a)i@M4?z- z@{@$s8Bk?)5e;33hV!$KK<67n*F&C}iWQ8NcY+lGa z=A01b3I?D%)@*sY_YX&FDQq_;M!bYNu6+jF5u*RJgN>dgYz|lpI_{S!>vx_^2P+x8 z8l0*j;gAHmks>6>iCxsPe`#@!K=o7x6?mbsg19y1o(Y3Zx$98=<^S-(sgf-nfNlsW5){>S@ zK``v}2-6Lo9~rN}Uu|?RA|mUIz+4Khcr`M5LYi5iqD)O&FC;07igY$ci8JokPDwkY zzJzLF>+$f6!VK4-bz7um07K#BS#>9tadz?3Rh9(1fXY#1gWBFdy z(C$1V2|d^?zFMVvlfJi%K4M~B)`r(JMR{Lb*ztS zJ>jRBx&n2N-mLOsOq7+FaPB*+0Jd&1vS22^EzO^YQSr5H^KI-1An5?Mrj%+*yEe0g z8&`Q#KakYj5g>gZ3nCSy%)UH+p?}wV!Z;KEW>>_LS=QtyXYsvv^FLt0g79kZZABu2 zC2?9>%*0xG9tmIMNF9Ck#z69b;M;s#qUH>e+fM8^BHD6p@c zHKv3Y`09LnevVlNTs#R&ThEbAkDt5~9Hsy;ox;PUqTANtuNa<+IoB+`;;{ z{5B@Z85}lJQjbKZ*jqi#pJdN_R^9}0M~PGjt1wYftg&E@?1V}tOb5iF{h`_u5gxU^ zD(~j?3-Z1SPwC{d0Jomq-CPq z&m)X_rtVouCVCkgNR0#j797oxJ^98`!C!k4_tM!7i2%}{d2uVZD{I-Lu{CTakVDBoEgB_tcXW4 z=tIF0`D?cqJOG&hT&JaI0RHA#glCKlj15l)sZ}Rkh`4l&f`~J&;5NWY@|=-X}8L7yVi%qC^y;@ zaV@C}%k_KZjTge(bkQ8Ck+6NwDodFu+pWs`O@JSm1pemdkI>hfq?hCx)b&{^L#;^l zM-5_n^E-El^tumk`&z&L@)t%X0*A!>4MQPoe<<#NM5$(z#j2kXa}m{6XMV20EEgF) z*O*2fyeCP4``5g%rAze_ab^&|_)@7D`6J8WaAs=^73K36>?2~!ulKC{HZU~SxoB{M zYqbe-0VeRf`6I|$<5JnjHs?TK^i8pws>H8E<(xsgsylpmHk1!f%m%wq*4=E0^%p)C z4>SQeRm(ly6_z9_qj#!Z=onOn-d`FY(OAj!=|h>2%%JOD1)cG*K`Y6c12r^^)uRSj z>F@tRXv8Yw`)YlrIEc7^kpQ}IA?*B^iG!AlxOUpTtO=7(3?=$fDix{0_R!N=)de64 z$>)l36{ek2_x2zYpqmrSMbK8%WbBl%wRx&P2=Y&id(`z{sJ)Rc#&Z;o?B%`$FcP%f z>3!~4>6DoWr%OEo5U*dIjIo-u_xX>>V7}l7)PZZH9xf6+XU7)XgB1c5*8vd4$X8F8 z+0SI>biHA%K=0n6KA~lq!zdE;m;H~%CD%~b`eiyN&ktgwNs=yTi9rx~Ti~jeC;+vr z>Buc?^w{P0P`qU<;AUFa-!vb2AWI`o-GKHNvPFT5TpM${C%a{WZYKew{_C&NciF&@ z^s(I_L3oo6#yYwaCuN0iupV<3QazR%4`YD`xozv{GQ*g4x%q%}_fjJ6yZYlIr~sm8 zRDfu`|1#3M=Rz$-#)b!!`55$(eLLF$bao zyI{Alb`Lvt_5cK}4yrwJF#GZ*>~@JKlL}rIZGdM9M?>97w= zX$QQwIU9q3%WVu@Z*dpOYWKcJk!s8}t3O51%7u=el%tn8&?0ri>KR_RmxhtzV*=-tm zA-d^K3}I@SBVkrp`k!I)&qxDbz78zUcm#mDQ;i@Gi)E+lwg>$JArO6JXkLhm<;$9! zxfy1~ToftgrA~vFu~bfjG~j`0W7Xx`?87G*5_Hh+QSuxyPp6Qq(C{AW`d3wp)1MjI zQTzJNlizjG`zZ>CZ3{Z6%K#ABD6MiE!#W-9Ec3*`#mE$l#?VwODg_yK>QY-&e~%?d z5r+^#fzuPGy|df~+9}fH*}|Vg@K4z$qj=_$LsH(;+s5u`D}YwI-8lza8l)B{3S6xp zXAbc2=$Y9NSe(GTQKrCrWfxVSatJNobq_jNvKe<0PT<%0O7`S|zNe6c@@;_^U{2E- zYKkoX$rR-eWkZ(&sTm>v+>1Qzshwq4d#K4#x56a^Celmae9S3bCbstY>bXgN9G3!~ z>jx1Iaed0w9oIh$U(?^YpK`=73Zw-n-;~noAT+A@7IbOqAwYd^?j>Ej-#`#ZYc_jn zSi1g07i|Wmt+ftHJ7CP3uNnGB4`O}-%dPUQHW6luvkQm8_;bHezAuoJO57Fk(AnnB z?tOym&zFVj66V>^Ws};0RJYcdGL~ujbqC!xGiWV$z_7E)YjIu=+&3NLJ-~=J;k{o# zO&8xEbv@9xP&1zRbA=;4e?xT3%K8{G>~hw4vLG0EuX54n?` zGfE7X`L@ptwU*;|{rK{{{&phBrO0_a0s%WfR2v|8_EPSh7pNNA4gzLhdBof=zm3su z$W)P;mi2A~URLo&ZE|PLN06qn)ucvk&jpUhR!+JrBQHeEqrD{}S(HCl z76GnJ>hD<*FmY05zwy@gpHXps5(#4}waU>+Ds@m_dwL^&$Cs3^|Ek~6c^KIRYESxj zbIWCh8jYZ9g$ix9rqzIg;Fa+Joo&K~v;HTLV?SGN6JULN&?ej8020y$@A->Th* z?qq5_Icy6%a)JuO@YEf3K_`?LrW4Y!?)t8CWBRb+Gn7N`55Tj_a)XUN%otcWemM6z zCPdg_MGk?53xBUv{qYI8@E!d*y|#ALH-hK)2Y_P@wJNj*oUYQ|ysi$l_HfvVk~4!W z1l(5fax=!9S`kB)twfSjk|0Tk69u``?wClL0sUSV4M;JjFQ6bb-#=cjakoEq>;RR< z2!=md4o6B{k?njOK$iPEk6=8jDlJfDY7SJi7BRnN5YV3)SXz+XwPj}Xf)CA|9;TeG zbDo+sB<`k69O!U39!3_Z=*UZ?0!uQ}AS$V%D}RBasnduN0W3^Z>o2WiQm*XaI`cD! zbJRlo;OXFuW58vFeaaa0ziN}#x$o!FDV9dBnw-0ATm_mGStnpXxGDedD9T*F^-yFY&O$^oL`dN!NG93tP z6mThDQol`{HBUQm>0pq!#4rj(SHq4*zS)kgEuWlx@F&)&FZb?7l?wrE!uIRrXXWc0 zZHEk!rdq-Zr?%Qn$@JiF0VV|9Qp<#h2-YZ8zJS*z&$I@ETlD1V61OMuja)*vBl6|Jm&N+srLnKb97x0M@nm%Z+ju^Pit;>@9Q#_QYaV!GJyY{*G=7l|=*kYa*yqyN3z?%w)OSmTFcGkHW zNA2eIKhr21X((IYvTfki&g-=c{rHWl9J{i=<7L z$|$A55epXyzrzY62nsn@34bZiU&bR(G5`**)~K+_7{3rDmjp2zPOLX|J=XscXHyd0 zV-W@BaQm1l1=^G;0+KN29H>9={p2?Y>n5ImuflA5hGw6n&FQfc36B{qfvN!yER!Kt z1cM=?&r^}PW4e?4iEQKmz&mp^Mm2LOMZf;$WO6hY={d}hk80D@SN^W$rj)}&hGGV9 z>Oo!Fg=q!Wr43ITCL-0~z>zI?q1o_^Q6~>MR(wR&!U(dsUuXw$SHDt`ECg zEg3F!(k5xI&3^L(DfT)k9X-HpVAtlchtAR)wVQN7f`2veZPJN2erot^)bYug+y@DR zPaZ-4=*)jbx{cPlkzEAw^f$GTRQp;6Q1U51edpb#<_Jbn*qgpI9hf6`N%;v>1^p|JjMfGt+E-j~2fpe~1UmOyO_jy~}tWsbci-mof; z2?wTKQ$ZfG0Xwx^Wqt~?9;pYDp;ky8V~<=pdRxpmfBY6OmlD}(1!+;n11TZ63`Tnb z2-ZYt)Bw4|p(i(PR&Z9qlW#woJKsT5*8^CK!f~)xjJxRFqxlcwkJ*3s zYTvO}0sTKI3KsEu*=ZbwIj1{`w&`UkI%i3GL29}$7hR1@_Qm>r2i2TYPkbY~^O)nO zYdhh0U@ZDz62z8U2}qGk4QsQz>cnp8E74UCfx$n%AsnCf__htDI!OQw7DT z->N9xG;0>TO z*y$a~BZ@v%2Wdr-JRW6*z+T+Gy;Be$juWkQJ8Jf&na2zeI8wLqQp-vi87qxSI>zHU zZAbvLF#l}Q5nSKraLmB?$@aY6&D=9whKAO-Dw6RQJ>#e}TVU5qI3@|~qdb|(ZLi+D zL`ieRI?SgQwj?RqdUGi~)+;cx z11(ZIR9L*1HE8Iba?%ZT&ZFMln3#fq7gMLG?Uf&~LW5P3_FAW6e0xpqGc@&3L zBX??`M%378=$-T%XbC7xQ6lxEL4D+Ea>jE^<(S!=@Ldqf5X76bqbp#Mwk(d?>hmAWYYf85vhm zy!F4knd%qk?@KVW(@yMbmE6BM3bMNTGCxmVl1o7>gX7)YEA&4qz=9sE_WpLo+weT2 z=zhq)HHCBgR{~`)793_!arIoEuAGdSd#y?spbsVJlXlIB}n- zN>InUj7ZKNe=cy{X^Z`4{wkJyUbpvXX~fpHZpa$9Ik3=%o<+~kpFVi!@lUNSpRM=G zf{*Tfy5yB8Mr1s|{_hX%>foIix|UP_nqlxf%KGRfJYm3m3$-kY>7>YE!&qmep)gm) zl7NIDA^u6N+M76mBk>CW3e=pharBK?g}&pJJ28Q+)Wl=JfGWSQ4jHUV=e|Gva=T{f zdtaGWc*P1-gC5&uqkpXab(vdx5P88v4|d}TWWrS9H|lsNs7Y311wa$_JtD}}@MFie z>pPk@fAY`5pGDRKNA^X9L=&8!|LtdgurEzWkj@@><3JK~2wF6_-9UZYr~%~_NWFIf zmnXFSA6$Ce(rG1^`xkrjRDo0Cmh%uS>6UWGRVG${!Tr+zDLVIfrvERFf3{)F%q40v zjIG=?m)IB$HJ1vx#W$6^6uvXLUm8MbX68~#tRlBky5Xk_xht1Yxz=PTh1@EozQ1?> z_3)@3_SxrsIp@4y&zFpS=~O=%unFE}GUh6f%lu7#3FEOa${4!P_Hjc*&i&k)@kd|j z*)SiKEkLyd6|kc#-$DoiRK4f6yIsMJ)7&Pq`DzKkWl<}P3VkRP!KzE6w=OFf2U@=$ z(c0;w$wx=qJ=pdS-AT;G>Z&c88fzjhi5VYJf6lK&5C+Md#)n189nhi zJuG$hbuwDIPNUqh<@hO~t=GZ2S8|;smHAD+SWJW+l%YvM&ZC`}jFpvw?5P z>+G%lCaAATQY88fN#39e;5ovEoA#&>1_~MxFAb9d~YO~;P)=|wZC}T(D%GCMKz;)y7 zjXx)nqu8>xcEh*t9jOz9yoo|YzUhnpu14baTZ^vRBw07vrN4uf`39d$v+34em-M&s zu>hrA5G_n8iATK2eM<;`_v9DgL@pN)7Sr-prT@upIZVc#20D?7mtOW$G!Q5GYw+$Q zX3+BJcnTj`5E6jdhD!ENd3U`aPU!Os&^h;QD9tDXw`Q$4;7v$7Km=6+?B^i7J!?UO zK&bc!N-Wi-(=7cMq6S}&AaUR4MS&9mCpRs$EFHYsdAYpNCwU#(RcNhUJ7Z8niX&WR zn|5Dx;a7odiUC9_kH4ie6gMc+;dCd_4~*&v;gq@JVF^N!3Af3r6QNuQ6}TChm?o=& zxcEoO-)x7cRfeJ4#fu%%jZ2PfEoRNbAAOGTV2_iL?;68Q4Oy(x$cZGBQ6I3Ak#y~R zOl7;}Wm%-w3g^fy$PK}0IkeQPgPzvFM*?@Z@wbWgh0lm;JYlv1c2GrREs_>n zhK9>v_CcyKigU&lTnXVfP#$?%O|lo2+l@j*OvJt<83E>3k=^(zra|7<6P8y(1|qQw z4`1c<=DjOsnR;xbLuA&V9C`fh;AJ}>_+MHxYO^I)u!fs0%c*?Mv(=bzMkvrXc`?C6 zFuZZN?DD~)ty&1?2>7;JVlZL=r)_Xwp+@*?bOcx)B0iK}4|-G6fx zqueJ;=tgVRSu8TKpcMoo1K7VnMRB>A54DQGO88NmC*X&(6I;J0hhoTO^FPAr@nAOm z>iP0rr-ICvwCO$R^s=uA-%LW@_-#n?;c=d4{G-^ao%IF^mcD9t-(3QsPJLK3X5n4A zww-U&9iZof5uGs;!P$@94zO?E$KumKMc4WtRBl!b(IeTbj;I>;?=MVSd1&-?K`!f1 zVGE^7<3jvhND(Id#uxuOAc_yT<78uJ?>~!&IRRHrxHvJTpu5j}xQOvXB9#doYS1)~ zRk9zmHp?w5HE~nH`V5F%E12%GFdN(}28G+M@{zhJak$#0|4j_%gv+bgh_xbeR)N5} z%;lCfANs!YioTqQAGkhyHF=!!`J5riiWsY@-EP#F97yi}2f7UE+91;7uOmyJoR{&( z${PidX*A4Heqal^BSaEOK9>DAiZ7eVU+Zqy`NA1=LotQuInb3n|Ieg9!*fP#hPvuO z%$=#1R%zKhaOv|H3(q#@m`Xopn!AB{=Z^`4+xFX0)fBjSS@5>aDJW_ln%4rJtSzyK5Jh>I zny|_{sV~3M@S2mep=~*&&0S7qF-W!#chlR&k@!e}xWG@=ak&-OOXCArW$dbqti|wU;#LVm=IDh84(zsHds#TUn)l#-E3rM$ zgWAmm*)Gt zWEk>1bA2+-uynF}r5Iy0^7@rhTU*kENj*=`Y=2(nq#sTGP@+3EbIYQs=LSm(^{cg zZpqwk>-Wq^<;h3*lb#3n&-TqY%fseRwn@mu4k5`PhT3HHH1mG@1D)ci&#vtni<^zx zSXQ{;TpIc+;sB0mlD1~-qm5QE8f5J_#i+ua9QQvL*k=(}Ib9GCth0je7&IS1s0S7) zJBQ`fiq9K)IDE;%3Abn*0`?z{jm9WHdgA4Nt?#p!1m~=Ma*T+-qW^=oLkF)osB_GI zVa$heIV5TGMlxS~QNZ4Ix+Q9l=6bKKyo7F#9ZDJr4!Dum+xL*zMs=S@Fa;L;NrcIa zJ{AE?K_RoY+89PlesCRi&_?F44{Q#=<}y~Bxw*WET&ts5X9aK!n4sdG9y%W&C)36i zGaL2^NM-Y($WG9X>Ukm~ec3Xz@xyHzwM%cBpu}uTKtY^OhHpMng1gW7j$M0P51ae} zQ-aRK4v1jK#26CmRhJ~N>NpsX$WCa@`TO4`4x`a`GG<=Wnipu><3oNOP?xA{jNLTf zesh`2>w2*&)~fjvz?cVfwf1S?izD|dzBjy8)Y45D0uRO|dWy8IULnqF-%H0d(V_2W zfBIJdCZo!?Q<=E~BEISU<+eJxq?NNLTQ!I(sVv2B4^M1%IREI2{{6*mNYenOVx8X% zEI&4n`D)E5x6f% z*^tM0{n9#4C&u{ql7wZlG|tk9hT)Lx0+sSg4!`|$P}@#K0DBJXTBlk;s#!KFQ z@3H&eDc~-sj}$H-s9fZ29bYehWQ?Mp?M{|f&oNtrl=R5dB&ed##zDa{uz!vwv$#)H zYa`)NyzAhJ!tohhn#CSK_UK1v*B2nJj!avwuC z_fdd2&sdiE&l!dcMg$<~*kc7kH2@FwZMxj{d({Ihw z1Y7DAXIUzA{}DsGPyx9Axp)}j;^a29vhz0j+!t}Hf8?Fwl~+K zyElG70YD2j!4?z?5SN*${Hc}4!60>VYFk0*mIvQ%>U-eAB-g(2C6t)e#misf-CS?% z?xPPpX%ae%)-^ObJ2{f#T{?acgR@78Kqe?AHX&rI9&k6=XYY#b_5&(Cw&jwuX+ZSC zXBX>=m%8KUkZjNnZIY)G5sgos8sn2Z~l1=>Jrq z!;NRJZ1CqGbmDo-!c4ZUo-Db4#*r@95~dpq%jw&L$%lE;)jfnIk!zL10fMVD;3k-F z?f&?YiycL$x=JKH`A9i zez(^A_L%AZGXHxkjxkLF~=Tr zv|paT;v5^~nwoTl6AMo{$utH*rQ!$Q7)jn>C*O;DcaKq;a{?#lJd|#_c zc>HiG#gBAG8@i@@tI;BZ$xpS=;Gxt+ROHajijXW*@eT$7W zn+PSfmx(j9-zNk9H@^L%2bS?Z9kTpseei!BmLBHmhSBXq!tbBHh(b0o4K>;(lWocJ z=i0gF89NY_OF5ruGA0vq;dCeBV25(pWaqno3!DxFv2a%Q_-xB#hE_EwtQ~*nka}(! zNI5CgNE$$!1Ue)>y&C^VUa%e6jZ7OdhRNSED544G*ipR})9#o{y6%w=Gmn=;2e{R? z_+J8H)!K*Zo=bF;IJ#U3V9|QTopjG*-09fwKZiNb9y89STHl!h@|?2|`VHCX;gKV# zRAH0xDHTRV9MCh!{aLvGZbaW+A9L(_*L|p+BleUI(;i+-HIL$p6tlUv^K$#k;e-g0Qv_cdc|A@9ZEFclzO!B@TR2^*tV30`DD+Nj z2KD6~Z_N$2<)(qC@&-o40|P?N69D;}lA6(u0_Jd4-jh-Q@$}%IPN4vYmw%pu5ADu#xb4pWO(0 z20~YYk`VxNWI=~%>}ndokw!Y&H52X}9PLcbhkoCrn-^rIbQ5YV=64)Y3%5hJ`46&o zHUx>Fap9d728Zq{Va13GT`d5-S#KFII}#Y3-Pii|?#-*+x?u5^HuaxzfLKjZ%?8@E zjVB?uq^|-;Bf``;`>Z>l?|&%XHrIL@LG)u0V!u@>*&)Zy&G3vKz9p3u&0FR}@-Q*O zkCV$SRq(t9yRt3WnHAuNc^U5uRJ9Pq?aZ#HCp&k#tIl-cz~F={HmT8@IRa@E4^E1$ zxUoxSS!z!Ivk8X2T2**|SrxMZS@504FeRypwEFrd27(QHO3N2;_i^Yc_n#;)pivez z37XS)s4!sQ#m!pf7LJi8hq4lF48on2LMVR~Pn&bZX=<<-pRWJjkQ?JO%Yv#s3tA}6 zgqtCK;{_>L2pnhda9qHnGntjfZ;y^gtW5ufU6=<$Tf3$S$`&E%R^}_sC7D3^&{mXK zJ*|PLD*Lx6*}E7dl$_61PgjEBcv{bmp}IWC=c^{9d0T&mfDhJNuWZ&n@}8Ir?A_Yd z;Xz9O%bnL$>>z`Pm-mh0{>`7?QwL%7lBssxkc#C4=t_~%<1HhC@0%fPiNs?6#btof za~x=;@}%qKU#fmP4}4P??Fe2RQY!D@hMnba49HCVv7=ewlQ?-|kLK8sN-U7dX`B#I zt*DTHCmBTnJqs5*p`6A2K`zuni&A#<9Dtk?8wI)D-{r%UQi90gbWsQ!k@k6Dpi4fl zzaF$C{njO)2j4MX%9k-10+JIG?4nQEPn8F~zi3Kz91zpVG;HJ95%xs$rMmmbLJTl~ zkXmRW(fF8~6?#q7;E)MTxO*S8K?4aUrx>vH@1K4EJNtZoH;vGv)e7D7kd=e_Z}Fhvh6{ z>+8Ecw~6`-HsaRi&kRN-Ve;ZiSWm9lC*lelV9~6~Ci~X%FXVRh2Xr5&D9|XWY*8f=Fx@Dc)MR+2j*|j72 zm^5f+b$qZ@CHb9c>YsMZ-J4<+1hbkz#zS=c^PVjk%~YR36;+}?8!V?Cs}3EOhnnZ8 zL6_6pvfQODNU`!TGv4z3FlQ(K&+C=5KpltP^^PKRMi5)}eR^=Zt4hverH>{EP<5rD zi{Wl_C5E~(w zLx}{s41q3PhYOrXsOvYYH`9;>&D1DBe_)jv*4ob-wWGJ%<*#YC0Z=V>i<)vwaK#|+ zlUFP*C8Z05c2pX&fVQ3LW<<|$&1eCwebw;lS{0jm>XxD1z8^+s!$vvrDi2`tudo9L zFs3k8asdusPpl;ha8brEpRIP>6+=2HqSv9A*NPgYcr)ICSn&UC)k~+c=oZ@QX{yHT zSmECkwRn2FzuJmXSFgS92Sq=u?iX0}n#h1?NZ3nP`EEioJt50;VRJ@j6Gayf6dB=qwi%)%@AX^R;mASoB#)1{m_sd2i_F`8qr`1Z-vHw!^mZ zJ{;+3+Lj~Xo*m?3uciS*)E~+(nhNJH|T4>QtBxc;j{%m@w%52%DPL* zj$0aV7STw4lNZySYJ2-Y_}2Zgb3E#l9TLpizI1)-z>jVKyR)ktsVcn-7h=6{f3l~gmh zymUQ3;#H3h?=br0Qg#YYWVQHHL{3lNhO}%w!_K?!a1EE*k*XOb;>$S!Ux4Pk6;`iyhc3OI%Z^J#HHevpKRtl<$%Jo?T0_eTI~u&Z3H*2jw~AX$W@o z&SDmBm&fP7xR_g^Y*cn%bof@9m1;`OHlGE$2WT-j{|Eigd_XiI-7Pqe=br5dCFB&O9N(39844`E@QraM zQ!Rcd9HDgHEsfO-2-ND)t@K-%DbDX_T~)Jw_wQ(+8biU{)$fs7M0QhM>1X$sC815I zubm*C8Kx?GLt7;ieR#udzxk3@^KlwTbk4u8nQQR87`8V|E&GFY<7C6)0!`a(IQ%21}%KJ^dOs?dOxTsH8ZW{hoBn7KG> zanNb|gXl<2f_Xkr-snP?4A$_wjb;sNO}7zvpziSCnE3Fxo+}MF7xpa|EpKiscI^|` z4rC{foJ7=HglIr7M|Lm_X>)lrDbMd(VJ}vg$@y%)d8I$#=v#ns+xf&sv8jA~u(H*c zlZrRAjAU#ujeJQ`;X!>{XYcw6k@elU_5=1BebUUs;6=}N{#|vbBNlJQhFg5PN9(a; zMhq@kzJRvD}&-PhP{ zUbm11NzwyCO-K^~F@W#jlG_xaZKw)q{RWSF+CDYM#I!iL_@L|_Z8=8o$ph*2!MWM|= z>Bke76ki44{(^Jx18`%W7PSz+T>8o8?wNO?Ez|%NkOOKW@Okv7cpYiH&?9)E_IpYI z2R*>TStm=on>i49f)L<;Lix3WkGx?EXp)bM4!lQ;lBa@XFC%`W-fzAfS|SH_9}ye*AUHY!7G z9~j89XuT>>2>N)#7!r=vJYd*2L#`j%*wLICAV|qnyE{!5e7JN@QYIo5Y;tkOwa%j$ zPA~c3<&QswzINh*{$isL#Eb0jN$b3{h3{S3SJR-&CLPhhr@Uk78%gE%kPXpmO9zWC zewlZhn{DBvmIXCT7!{Yr2!e1Ke{lZg(9Y%051WRB`nOEDv7kYxlkt01j>Td;4qm;f zd!f$FzXTu!AZ{sXn%wE?luA`}M#Ev9*T4XK^Nt3#lW=?Y75{A_s!PC6D%z~n(7kb2 zRGax2oizXu9Xu`FpN@>#!wp0C!&PqFP~)>jbO!+fpA@^z9h*gk4CIi z>S_kuHqU^WKIzq)c|2HXjG;9Kgf%(_87>e+8 z`8W>~>sws11|!XY2S|d6*0`wE)UUt!zeZ=bRGFBKv5z{lm^Xz^nJD z$U~unmZ#+8d+70rV0B#)worABwsmX=4=C5*WaaFAX%D!~3jhHFBcb{gxePZ>X}KF@ z`D0omB>W)cSa=&PC-*?RE)t!( z&G|Nwb^4nz%;I|faN+|hk>yu4CY-Q;$YD0KPYVZNkt_Gpxq{P(s+;C7Omnv~t4-_r zKhZ=+4RRmIeu+;CDL-@(9gSOQ3vd=KLWkelBn8ePo>+hMb5-9O*gSXBfF@V4ZM(0K z5-gJTwiLh)=$Cxz94M}>fIN4yU#hsgZWy+-cb@~bg4F|aiib`Xpxrbzus^rFxt_nx zCXH2r1s`&YFkP~+8@VUl;_VqmY0>rUn=M`^ovcLI_9m6oGCllWwuxc}=us=<_66G2abF@#4_xMLm^##Dp!&`@ z=EMF84=}fMJ5c$(-ExPTLPSe9K$^Vjn5DcjYV-$TcX$5LTQr^QVY3vV8q73cPtwX? zU+~v%&RDP3{N&)mg-i(<92 zp@9-`%^7~!$)4jSOh2?mo!_Yk0)g|EXj?f;yFGEy9U(8fC2yG66Ia=*Sy1h0+?Eg# z=}X0Ki!;;R)Gcg(0I#xBc>J(Wa`*1ciz2tgA$D>Qi%>i@T@S$?-&dVESZK(sAh#e1 zPar$oHrKqX4YOq&8c$y1L`xSR#XI=)_2FpLGxDdmSp)YQy~2ZXPRKPk^UO%=;L4y( zCrM~V=a3dVleG%xcfwvoaIUlP#c_@##S}%t&!Nr7*SKF%l|lv(Wwz_`>Nt2LRD_MP z9Im`~^H@x3~GF-`#UI##pz+W23~Pk`=Dp3mMvRy(wdMdJU0)ELgdheG7@i`-7LE(BN0UUR_fu zzp|Tj(GLLO_k?_% zsKN}7A*Nc0LI;&=RP|PFU8{SG{a&R}N9b*ZJ*)!}!#4=&F~Im+*b%Xx^x{0pl?i|M zL=#GSu_aVHhGgv-UDxgzg)?EDHg8yZ-diG1;TrJ;Gm#VSam@L%3!mx<#h^Q8mi`t zPJ>gsYD@aRrRpYs!|m)TTE(2{(mocVWh9NWv0hnh*NEZgy@F<0Sdmm(VjWwh1YGF-_JaL2)*x$ zzfh&Kk}rBmyQ?aWJfet9N1bdX#p*BH*G6b^0RvaSjm~?5Z&{>f5Zf(r7X}u zuFH_cuC&+(X-ec=O#ci0uzuF6!}FI{9_3FLnZJ+|f<^pGoAb@-AefGcWkz$1x&Ua~U{uF%jR4j{4?BTrNQTaAclzr#N4;rx|q%o#B;HT21)+>4Uk^cgQn2 zN++Q7ZN<0@m8+k;0=iqHyid=vHwP4pn z$P16vz%j`0RkW}+`4|3*pJ=Fa64-8$pTY9p z!6ys;sKZ16bor$@tS<(wqNy{Jl0oi>2L!pwcSX?p=1ERa*yb8dN%yh!VS6f1HKT);K;x9~WD8ip8C@K3s z-scFDNwd%nF>MbEyLxt`)YJk3L^VrS??h6od~yL@s`rczM4M>kYz~NABrRY*KS-VY zUA;?fC(m$^@cYfC9%P}u!wLV@?t5iv!~rvS8FK_At#{EL$p$Ymw|zQL8k5hhft405 zj{}1ZX50myvEwabSn!H^=vK-~GuIT;<_s5A@9&XX4HwSgP$lZ>&P;hzs1&GcuA6Oh zw0Gr=%qNAsP%884uwN58!@{tlIg`qn6}y$XWno@q!E0Wx+6eV(yy<;p*z|D%Y>FRs2!Gh4Gbq)a=Nvens8JE=u1X zRH0p3qF|YO5F+~AVsT=G#mcvmJCbSSb8g^K(msgRtD}=I zSUfE9`{d+=`YG1bxsBL8cYc7X| zQKE#Q=Fw(L&pTE$S=}R7uZ~A!pCYNz6|D&EB{o6{-;t02phs4$SV--TxrZQI*y~mJ)2_&nXi{*rCqK z8k74QO%VRxIuI*#=@J^Fr_Pf@zbk~@Xw_ERhyeE|Ee}@h%KpTL#goPpCL8}gsKYsn ze5AhM(8c0Q|A~5`<=$D<%tuu+gvCNQDYnWcRmQ~LhMV*erdZ#FD?u=mJ_YNZ{@N+> z_^~XQ2h?Y&L{c1`P|5lKLFVyd9TI{$0I6cy;~6x)oHv&PcWLdg-UCdF(Aq;U-!aEU zueUh=3i*Aixp&}?B~s{{w(_u`#cc^{1cMhcjuy0bLy0}7o3*BFAoDChj)N&`du2M< ztL@rq+hd(`5eWBYIm$v3p`^e!XAx290w4{b^2<`bn-PTO%W^U;U!!$f@_F=xIWTpd zO?R9?tgPIAmyfQsGe6e?3Tb&`(Y>XHEIZ1llGlb?T7vrC~am5K3se(onLwD5Y72W<5oB2zl z^3MZq70OMPouRYFmz2y%Q3~H~Qy}suN@1+7ShVAi6F`?HejBA7J{9oCtx)Y|@VRM? zf_|{=ZW--z52`=YBmuN`7Lf-NW)h-vv31cajbHXb?yl+`)OOJgCC z)C;0R*#hO?|0N9XSDDfYNVW7&YX+1oUy{L+O*1~w zlJWX;UhYBE$GV-*o2PNkh$0g)fT!a?7<*W$zqSAdk8 z@!!-KyI$}Dr!5RF9zbwQsa2|{Q!S(wG!F{HQHLph(oW?N@z#5zU80@k`(CgR=v^lv z(*Uh*7Ys#ZSX54dITF}K0mKsb16eW4T!VHK=U0vHN||sLk%Q9-I3q)i{%|GjYPG~f z0%jQM_S4Zf^+#xAT~Hd)2}lDU7Wa{Gwz?KG&}vyAYwK*IK;#(FP%qmLG0Jkkvp!k* zo#mnK3Iy@M->$==Q(vYgxLrt1X;3?&@ji_vJ66RzjTY~uC>z-4?e3dFQXc^Uqsf7; zNo)4>(|xmy_p~l?=~} z1ss6)roH8ZTUhYP%syJPaXX#C*yKPKe`Ur&9T(2qT)jc`SF85P5~Gb_3?qTaj`=x- z_a9_xb+8-TXD2N6r(w=YQ^P{9DgVscZl&huD4}r&0QXwhTSFJ1=_h#C2Ma{RYkL4g z8NP@W5i1K&wr2ZRJi86i>4ToxZ44{ba;#GH8l1J=(zSsXx87&q(A8t(o3soNeUA)d zd`{yr58mhRg^;u0dC$#+9KGl8lO4Hm@P*cx1YsQH>F%XXV6d7?pB&FzHMuiGlfq|? zSrLC?9y~LOe0?E6aGEcgzr`Ga1(F4)E9(kW4F~{dEP3@tRYyXk?K>Az#BgF+u`@i( z$KIcv{NM(1p#IlQmO^Abk!hZDd*^dZyng%uPP@Fy(E2*9!Ym-aP zhp84eQDBj8HEufv=f`+Zte@#YcH>s;tuvPXCHsiI1!Lkt9$0`t+Q~6L(%y#wphlPG zn>ta)A47cyAyh?oK0IczPfJ|Ci)A1IJZSR>L1b?3n=6dYms!d;&`+#D&*ww-4KKw` z2Ji`O&md~4nmasVB!Hc@D5vNQMqo-km9{xw;%1zuZ}raBvWPEPVOX^HlFRMf9Bb`{ zmC^Cs6vd7EAmUq6?Xr4xPi?b*xvi#VJMQX@>>EH6??Z}`EWY&i-+xYZ3?X(h9FbEr zxsDonMV$d2$t=1`?PzbhyZ(d>Xf*5)&lO5v%IT{)`|Cgr7OI9@=EFi&*Q}@9dP)g* zDy~)f=Je_l^H8kqkk51ULU-_dg0zEuu8BcskL_OqB2OIkY20gh>%wSw*-w;bxQ|O+ z<&ML5EP5fQFx+j18eUV^fU_R#+T(4DdKA40Lf*dd(JLb^y*wlCLn6DnJf$TN!C8aZ zeWANV=4CeuvmU*=QbcyP^cFaay!$7Q(<46pRsnsbET1h~pwsyfGLcY`6IVp0O4KMF z?#dtk54z1w$Cm)1hZ=oauSwA*=@U`%iQ88YAgic)j6I?9^r0bryo{YHJM=F&w*3Zo zc#gkR>S3=ADd;u*tKH@w?gpG;P&h!LlHSDtap+B5d^*45n-41*4s6PMyCrL24Udv9 zRhC1J)uSVaQEYUBjU}uLutZ0AP@-IogZv5 zdXMr^+jQeKx-8H;<^xNfHr4IlfNA@hH19U5##4W0D5z4Ff(#J1cyZO&tQ1|}Pq@`5 zK&iyz7pcFTVPv?MEYrHK!!cxetZ1uu?#dlWn*2h+QN|a5$yhII%)yYXrvKJaMgBa5 z7V4hCf$2BA1==vo^`Z-K)qD-)4Sq2Bj7v-VNI$hl^gx2A)P)XdBCt3vJTq{6u_g7v z+bc?5tQTmNc-#QAIJgCi@WHNsK4eEsd9%Blj-7s_3G?O^xeFq?g}(;1x|O`F=McC# z0LbN02ivACOM5t8hLQ|}UdYk|JLZV&!C2;3O6q9mahul{XfoId-Vv-y8l?(>DFTz; z(2VZ|Lc$5x2blO_q(RRur4ui4i4jXCKf*Xgj|)FI({!!AtyL+z>lTeOC>!8%BFqaYT+vioLHznlFg~cOqtv~< z5|-JM`r`W$dFa;4 zL8Gpsl||XSkUi(BzTALK_%E?q8Gm#^Z;iOmHaf(xcO<$jbKA~+=O0ZmjounV7sYQI zsZH2ZRc#Jt?bz@2ELKb)FI=PW#TMfHG=TV|nL4N&WP5t$j{t9g1z;~)f#?6!@TYst zj_>+1@5vyUq_hJIZxd(70z2%_Lqw%}$9hggzcGw>=)ApV%uf4es(Ve;FXJuEo#(fp zD(b1uB9RBw3l96bFr273RN+7hV`UHe@C*dxcK0rtV_vbhl$^}uR`g1gzO!#a6oUrA z(?h4dY)q0}DoQ)9m9p;L{ObN_0W@=(RAxQ=ZqyGXyxy%dXZhM=vPGAy@`1JW-QoNLE=iZs^XnUyS!G?dg9*ffb#|`L+)H`_L_{I)ooS5 zPzrLmYKxP8EVbV_;kqWv-vZ!9Yp)cFDo(C4LsIt@9u{s+CTT)G{tc5W`GKFT5KI2K z#URWSF(K+sZs{O@vg-``kj*7k?^#>Q}@;+lh~kGhmwD1)LQ#+0#2lGNs;%U0GF3K2+LS4(a4j zQft4&Xk?}NoBMzL4BV#Krgx9dL=3C^$uA1U2~rIc{8-8v_*4qAUcURVq8q9Ww+L(v zA{Ta~_@+`e=uky#P48!1>sF&kv6Ee3W(6n~R1PotYW zQjA@jrVc>A&ax*rZti;7ZcnH^0driLSXb@IBTBWc39=yMdoqC&w`4VMr?=D&=>raQ zFW0Ojzq#{N0N_J;3ybTzHnpr5ylW?_7Dsy_HC0ox@>fhqO~cnlXP~{MinTaj$#Arv zz8visN7Ve>3^|LO4Q$Ho{Ku*x=aKp(+{-w{)4gJPjO70?*y0XDZQ`)V4 zZvp0sSXC(=nucyFu)@i})(|bGp$io05kFimeVbt>+2bR@Nz+NPr)kG_D81di`+49n z#P}`27k?_~Fai;6#vFtt)PE8p^LC<0ovfsj0T<9;dcP<8@WC`6uK$S^+~??-d%2io z020Agf{{!uTGp4a4MTLASDwPOwO}6_l#fK>EzzY7Xt2N~`K|ZlnQO z5X17o&cVajUSoo7q11Or1Ms~p`wkT^@wYr<9DR}ih;6U-^6Wfu^7Bbe?&rD!aU~3p zCe;@9r|>0wp;IX#_u$nmZv}Q@Ig|Ve%)hKNG!L-8VqPr`8CQdb_MQF=Fi#Hx0?_?h z(*@=nrz!XRcH%1|@+t-L2RI8!$qT_2R}|MOht#!R4-2H4bKjPJ1fcrF4oUv2Ynm@I z5Dit~ihh|V83FU}fOLhE_T`m>muEqr_@_h5jI9RqX257hO^FD-Fe|b*_d@UhL;EpF zx9-uaj5(3zQX60i^=duRwV5B?XD)Cj5Hmi^&rh8Tk?yIeFNb7};QIr)K0gPiqQfM) zMywNX(}T;bD~CoRJ0d*zmacG&7u^8(mFeEKc353T?DQUoi^&@9fg2ILVc0evF?63= zp9M*)D?^%RZ6n(NM1X!1Wp!WMJx!^MlZyF|XAL)D&j(-_f=3WVEtO+lqWyX(&P5Z- zD~(U_0 z&Y^fP20R;lvoZb+07lw2#~EZLRPJa)3`OS0`Q+ano7ml&`(aCKVZD!T1fd)n^tGY4 zWYvIn6Sp{6F@-?F?V++;lnX(i!wS1SQH{04RBCDa4}X9fz@wy6W~hAcYB2HFlxCN% zJ7qg}!If*78w69cJkJ;=ahhSV zn+}aki!1n}lvU6V3%PnY+}4~|RAvx=R2)Z=2Is>ESF~XgW#u=b6PJ^Z56ACTUsgH` z_9T(qEYYdVN^MxRfRLe`*q?B-pWpgpMtw~@qwpHEgS`*|PIe>j+@B5JC6x}EcStO< zr?p{%Dy51nnx7~^S|joc4AZ&43Z2WBdh)Hz&9R+?!S?Zi5n}v9+~L>VkqYHD={P-w zWaBU*DvPH=q2gogj|y|0kq*Dlft?ueCYkXY5lntD`JWzX4jf8syZYk|uxB@2K9-ws z-UithHXcLwIfhz@9Oym(S`_O%PAG;7_=%pyyUPd<`(*+^cZm}ZN1g(g_6)bjfS4@V zXabRixkpTP7906ZH@I%j*x$ zg21A}FBfm5b^btEMy15AHVpA@cAcoe_0%}Q_B{4I_df}kNER17%ZtI{D3qWb%A0dd zJNY*GGME3j#000cCB$^B=@E)ZOmseVb!&-d0nceQ<%kF!u+XO4M>1FuKgyFy?Dv*N zZTf-R)?g(+-K%)f_Q55_JUMaHI?$+g_k!o*fBfC^GmYwJo~!pc)Eknc1I25kK!RNG z=s%bp=+Z~br%k)4%#o8hg}eO$kcaF|31-RFU=i;;^Ot83;TOg^==mbhO{c*l>xANT zNRJ)wycZ;l_@|Wy!R9C6)hB&hk9}fj(3M&)K6ps&2%7C=aRa($>RDI+AEHZF&)fUE zF?=+#Zjby6QR#}X-z}%B-0Ksa}eCI%{A*6Tg{_C@X(9}-}R zbD#~KqI(@-0}>O9Rp%Lw4IGkTM8VlD62nWt*y`M=dZ98}xrDkx5>DLV5#)I#F+O_~ z9U?f>1NVowAAWnLyZTJGIFit=K}#qNZRk8ql}eK_Y$Y^(jO)4@NH6<6!Qr9krjUYQ zn&zrdyE?;i6_~(-Megf8p!gy2&C9Z=0>|xZjRwCb5H4R%Bp8xsT%UiE<)X%J0;3-8 z(j6*=52mdpyWSDeXE4E06xYLzVL>aB7iEK3gcU^|sh_|{Lu%!~@6Tix@2<|;oz(P7 zEWB?BX0$?w->LH}Z+{!;Rlh%yXe2yyfqLjg=8=KmCz%cbBy6*skfj4|a*c?7OP;-x zAHTihyycdHzenNY0<>ctwutO>;(+w2@^&;4?GOf3SNir*9<0PSkv$i2SXl1768k&s!>s0g#{!-g!V?xvj@JpV}K7a?Mh=vH;Mn zKw4y6<;D3OQdLiOUjJ+-7*k{wR&dL=4wLKE6PYm~9f<$PB`)y_c30Nj2qVIY^eZ4O z?X{Fc{@qP`xkuy-RL2!{1Nz&%#X<3#k_Lywi^X6KAHXPG_(v7wF(o(!I)d%0vO<_w zVx+;|Cqpz(9QeL|5sU0qz?xPX`@>E~3Pmiki$mTQr1nU5WHmX$!cDI<3C53wj9Tr0 zAD~zl$*lsDp$UZ1oONurAuf=p4K~`uoEt0R!Hi@3^(EN#5dV)fqmaU|g8xZbpU(!K z4suAsv^_rtTaG|84PqRj^(&{b{qV${`@^lfz>~mBFxzv>G>&nO;R9tWBn8>@A3rLq z-SI&yRUNx5Wb;P$WXSj%?YXwWY0z7#;dy1sSCS+#c{|}iD9_eA7H1rLI`L(pxOj31 z)8Q}`s`S^NZ7-!O2^e18wBOJ@3qoUA4|G2ANJdu%1S@CmZ64Ml*Gbm zVNF|?07@Go2r_iotlet7!>nSbue2(p5-=>s5X=44S%SBcw^p?74+IAM;1y@Rj($mg zw)J6i!3L5-A=3f)%A|CFTZzM34d=X{V5tR=_HxW2T>&w}h{Qd|bk-#|ARQh+*OJlv z7Q5x%hRje(^d=D+T+B|;|6Wq2{2~>x(=@@E=N=okg?5CndF_9ja-Je1(=maF%y`k-%#%P0loDgglulLF69f5aEG;dC2ya!hLqbifMQ zB_+mH8a_Ctb|%{oV+^Y8?$*hYB?@;w+_5=vylN}Tv(SgF4Li52LFrHr25y8+d*W5k zW(;kI$mcuDufUOYiYMh|`3gdf`_nE9{q|W%82F~%X}VwFc7jW;c=}Xak3|SGHk`#BH+Zy8 z2)TW8sq0_JpvY;Y_uN*MBTN#MgdNg8-`W_gkhA8#`Of!k(yMn?e5Sxy;O*Oyo%(BGuCGMhFL>$P(#Nn6mpvB2<2$(Z%EBq!BQgkoNusz6$oUCUqElW2 zA2fnPc`w=+)u%$rRIS}*s!S^pJN-Y=2&~MX4|c8(8cr@#%0S6qHAnirop}hpeW?-$H99iD2VWmvQ8ngLiPYaMU_)_)JRa+ z9h)rM!z5h5Gf<17YQ7LO7gSbt1H~Wa)Jl(S&XC}l^bag91r6SfZ&mnSt0s4RN@wz1 z*Gwx!24=8gA^}S;OmgF^`X0xA+pbAYwp|tur|?-T8;F3pJ9k@MXo9W3(V)KLp&J8^ zb9%T&JQNWseRz-vwJBcBujzH!RIx`+N!#ax*0`uHQB$+mIf*!#v+e(_jADl`t55|e zzE@VcQ3-rnz%7xr5$V_uBo33f!9#jen-Db?=+gHxmxJ?bk9Pd&!;A%ZiP}8O%JcjR zYO!UzwxD_R9dCk;Z|rq1qRJl!ZxA4}%GxZqzTlY?s54iJtbg1`*LX3;?yra39cC+5 zJADn~k>!a?v@NKSBf%d|dix-79!>lG>V)@LUG0n3WvYq^ds)<_`_ixU|1FT)-0^BJ zGJ7UC|A#!`pYIp&OnPs*N$_>F&Vbf9?;26iv-I@P%z2MRa!JMXOfvNP2iq4OItu8&D%~yM=Hz=6|xxF8v-HYs?m~rJ)%W;niieC%w9oHY1tb*eXw5LhS zG<0yZgg9HK;je27R9(+g05TEXLNIAs-f|;x_4w(%qs{XS*UYDPUQ_YK)5AsIL(M zj&9@QC7Ao!<(OuJV$5P;F>t`6xX!DwoBX)is?xD$9QZB04sf&>zvT?sL+thA=0g)h zj(YD6y`(@>ute%RO<1OrT_hve4y^(5Z<~Vtc3IU>vH7bD!-qd8b6Rl$7YspPaRa_U z*PJ110|S~tg2!S^<|)v2H%+R)9+m7+utqt)SP8!-z z#Obq8Zvx_8&0uOmdXHGD76x@)S@=ZczI{rjq0-pWLF7)1@i1K-%-*)!DdJgg(l5UM zO_U+afNAMtHR8ao^F?N#cYFdHC!Yk_9`07xN1ZftwhVD3oxX&Ed4jqc`1xdV3%!hw zqPXNWyPbfHW+9=_`+ef9@t;$SM47<(gL;5!kMz{o5%J;VNZ_<=#rx^^;{gkkP(u>& z`GJ+Jp49Lnd7Z=VJ)VFIx8hT7@+S7%5H;(>5+75={G+BGyeFCFTFE7Jdnfl9g5DT& z6&9@6zy>h@o;o=o42DB_A@SMsWZWXON*5urtOQEZUQd3NpF9o@>OkT$dJ9rX!~ z<8_`6B8Y1kzh5>1*LpyK)3V+aUdYiSJjojPNJV%KkmjLC4GBjPLy(u(#vcjR>@U(SuNEo7 z8iQx*rg^4+JfuynD zj{BSKoj^25f~#YUFNzgZLSnzJX!Ct5Tq;1q0IQ!weTz5pBkd-5i7UTxy4a|q}%KlnCPXUWt=WToDO5}&*gJU(I&ECTbi z#5wc4uY*a=?i?E#Kl1HKK&6k5pRp$Ji7lUQ6K=Xbe!U(O-Sjm+ zMGh0yp~__rx^Z>KHb;_o^dJ`KX_?v4OIcI}QEijcvlUJbaXaIC+R&0)QrWdzcrWXf z*X1qvI{%2oePZMf7OFufD=!b8tz7C{16>y6h8c;?Ii60VD;FCQr5WdfU5mqr!uEiP{52kP;PcE>hs)tbnwTp`1e$C z{PiGIceO&{w~C$+BR_xC=pb;g4_nfrx`hnFmusJYRma2$lsOUik^WZM_m?GraAaK`r! zoqhno0-O`vk7gn*t?`Ndhimk-a)~BGL%AO(Q*S7c#)3J-RcyA`LBMGbbpVQ3y3}Je4u|)4#$6{+HjAwseo%9l%-4f1kVXff4i4el9#cJ&Fc2 zE2u$}`y(GJfr!tZm^`Q%IIC&-*IF{<&Q}()$zFlzD{I!rW%qmX)B-twdL)AZ5S^QV zEoolobph-g^`D$u;U7&vtv1Nv84z`Hr+++s?B*R|q)HQIi@*YO1=r3(;@0Xk7l$Nt@vVRQIOm0m#?iZiBl0Q z9_+KAR?IlB&HQ6JDZG>2%0hOe>uOOeuZg#(yNIhk>#+9j@d6<%N+0KAC3z~x;TnhF z`p6ZvxYuLcpxRSdHhSiZuVyqEhCJvY~s z`Fq%-sL!X_+a;us#aU4c`60x-`a60P_Q7y z%}jFgo}MybCdf8l6#wJxRXO{kdj9snty@&X`1P0zRORm&XWT#wX1gsD z*AzKhV7Xu#5uvJWNfg0~2!o_dsoK{qJwUEElJ264bI&d+E}i@pJ{F*~axA|Lv>hs) zz0;qPNXRdTQrHajrgUeC))s>jW`L|Kl;l!(hH^m62^c09AT zaQc?2PlpV^?$wN`F9P%4o==mKc`IucLVxA%en|(3 zHfKxqQSpjCUMI~pSK{Yb{^;$Rt`6(xh-?~vIb`t8-L^z-i=W3ZeqK5T&Y|L0&F7#E zll(1q8PAb4z;3uJ@0R959pB*x=^hWXY3A|;-fyA@lu0Llio0+&q(c{$7LShTO3X{S zm*t8M!~3Hp1OYtS7jN&#fU%9k9HGHrW}mHAtNYoWu)iWOP0h4qJ&YdWizzBRJ7+If z47hrwkyeJS>q=5i;@wT6!{>qdVoa3C7RQk3-;PgV>|RvQoJRA2_{mpE+DD z+_C>in!d{nnJj=Z1Z>eq*-=B8p#gUDGg3z3sF1Q+N)IpM3aB;SwAdi%>o*stK^k0evY`K)y4fACw}i#t>Y?KgN6 z`zlH!@n)unr0({YQY`Edu$n-{{u_Tt+WgZ?m9CM1NkOn_*=G!55iUc(ROl zO`G!;H&zlXoT)g-m4CoBUIWWdWAXr}$#%*13)^tX%$is1oKK?L%senc5KRn>c7&Yk=L+ryDG_r&kdj zd()`ZD^z~~WGsBLf221iQmtahrqHHy#|q@gnTFc4am=03h4crj1e*;czBrUaayD?r zi6t(Rwj;vU%SxymsH&$py1!m&`#ZB#o(OIipz1WQkDqXb;M^lC&NUK|eP;*Eo-^)WChMWE>{Bne0 zdlDffBOm@1l1E+AD@(BtppE6U_^*b3vWpBEi{ap)Nr1PhuX0RpY%=ra>jGUu^VOiI z|Ha-H=&#*jDWk9N$msGfOE5nuB7i#EH%;LazVz%E$wTOXt|#OV!<{MnN4}i~g4JmK+uu`uhCea=>hT=8kz6-Bs*54YL;Vmu?@ z)93eQouEMeY-#eK#rW&A#(}!w zps9gR-{oKEb#=po6&Z%cODcE%!d*vozR!J3_W7d5X=^XB;wI?j$^C(0ug%#>R+Xn> zD`?_T2jG$$j*ME*#)BT69pmS=0}c>3Mm9{MrH#N=v8Kdaq7v1kMNtbChPMOw>Zc|+ zhAdxX?|A8iy}sdVnG^T>CjJXAB-9`+GiHrGesXddzA&Z8ix1jwAd2WTZ*ZclxC(9$ z`3k}LZJ;>xVQMv3XmMEYxfbeyM-h%yH6C;-3aZx%7P8Zu=}_DiY5x2-N7Q3}Q6MH9 zB=(DbGMM%oKEEbkL@J-}TOjg2rin*+WR%3u7FXPc-U-Eb1ako=jmj5>Iw5qh>({H; zB<_h0VM6V|xWyY0sLNwe+_Kk34<{MFY4ddx|Busp>;w$*L;DQXlBJ#O<$Z6J;z~Y! zCh|mL@xgtsR*kuyS_kVIsHqpFf&kae!xtimy24XCc2RB%uD@$iqQ2oLI5TuZdqdiP zigC;wqI zu#3Xu7Q};g)ahiP9S7#LaP_j@hSM9p8_IpHFn1qaXk3Gf;-371fsqCqRuE& zG87Z$x~26$%I0D0n4k$DJ~FLEdfv%L5A>f>eKVB17^r30_TLHfQN3=_Rn@^|J%NfV z4th0J{X;t~jYB5xXG@Kr+d%$uCwZ=O-nw>OI_D2T`{HD@V}IM=A}Ib?9rOB-P-o9Q z#-sf^#n(SSxKEvE3@#XYpxNs~`~27OQ;$c!%J>h}Z0_Akj@E{`a_Y|ea2}vP&$LO7 zK0aY`YQMH{|9K4c9}ly|^(>njqgI9=*ZcdUYA4Gscd<)0{9$?jLaNnHd2_ZJ#(xNp zja~;ApT!aZ^mHJuy;`(X+*4xjLhXYOA%v#mOcU?fr;6~`MgzCBCb^Cn^e=VEZBD|e zm_?iXwS6NOKc|%*@w*z=UQ(4tMvqK6Z5i5o(5Ea%5a~`X7bRA*>A)EZDa(o1rxPzj z%`t*bLj#f&m{|hJ*mxD*+m#l??MMmfXTL7&W>T{s5&OYrQY#+aQR|}I5 z$o=+1F)w6dv1BIFz*K)LdY;4#XGr~b+JS7lmaauaaTQ+f)$#v9nr=OFCyT{tzOmMx z@RrLYS7PW;2%ny6CktshSmhwQ{(%rbhBPK%b**!)qdbH0URPbAKW%nCUT9RHU|-42 zds{sJZ5Ek1e|N>uni>->Kk~Z8m$(bgZB?L*Cya2y|S2*UC`{4!b8XdEyavlB*v4A;-4li*&;t z9|2hT3C;`W6gS<0P~#dq+o!b?z6N46?{f18*!U$98(zF|*Q_|@@pl$di>()UsF+yi z6>7$hLC|lB!0@CR`3&M9cHhdTifOCU1%v2TU?K;%J~~2jmX1age(S}R$Pqsk-m?PL zIFFotbVj%b<#~*+921JL-}XTlKo=33YFGW=3dVP6Z?b%+xnzn>z*&7jM^g!dG%+;@ z_du72WYZIgkMDGfo!Tq zpVI5X*&6j#p2Jje%I$9omzR8wR~9SDV^S?egj!8NH4{S&1io?~(qmoSQ%0tQBlP4S zf~ktdijh$6xeo%Qj!&bm2>qLw1#uW#d3C@Ep%1ZBwTR#@zuewMMk9#S?d@=7A;Tkl-KmKZmeY5)D zloOG56NnRngvS-?+?$&YnsxS3GwIQtpeiofJ5I$4$b4G? z2b6Ckbc&wOyx&uGLXaK3D0;B~y5;sUS{HrqstcVprO}F5{0ij!({a(ENO8*E{BG4d z>8l}6V`)2gz(T_V-&`+6UwK+hyB&=g(8mITBL4{vjN!FYK8;$n20CWIX(cBpm(k&C zulF&3tx1E6!-zJ)aZdb2*W$O%)r;HG`IZK3FltNN{*2S>aEE9SMu!4_9$TkKV^}<; zjuerV`lD^%$6;SQ=E<)KN|-bxZgZ??SYUyuDOZD712ffW#>?Ai!(K zv<U4KJ|PH;(<|4^dL&i3+&HH7A2ymxoYd-v@^n zs6hBexLTROTlOQjh@SE@QHX6mGZyC#O6e`~W7wx&^OrjRLis{hk|0tVPMw`#y^_up z=*%3_<9c=sl07p>TcrOi%aK-%R$vhK6`|Jg;u+r%TIMP5EwdIhm?*;xThTL8NTvGU z1IZ>{hCnQ?5Kqq!cTM)T>67=^o6iqiZ`t2M+ugnSXRD6EPC_(Ye-?86? z?$~)1ULNIOz*daF`9tWw0`5-8n=c})FDRkdm2wDOO&6iDLyVPXw)V`oWBFHzFpNS% z)XF7^$+wW$MByRGTi_?mmP{{FddGPqEN}cQ70hb~6+MJo$*g{n70-stD}R8h}p z&(F$};IE8n2zED1a_M&hUd#O4$bMBu_gaBZdV-%}9>Fe-4?r$RAr!PiAn`0rg3B;E7tVDy*NxG(`UZr); zb~c{{tCDhj;AKXexOkG*P@^pfj69s4*7AVblwxhMw{t0Gyq_jY#sKO ziw}Eb=@I|teHmR=SS|sQAshKjAZXP*ni@;3$eN}Q>!wmY)jr@Ubk|8iH3m!N?`1v` zP`f}eOzZ%a&zf^w|H~}guhO*Mt(Fe-e)mFiyQfED7YJR_7X}LNoxZ^Rxxm`unBvfW zJIoYdCB<<#YQ`gKFjBuZGU@gr7Z&E3X+{(skUBf?BA?IFVad)ibbTaZNW2mN#z!3g z?v@Iq&t!de>`2*^vM8+nk1_u2JS60WkiS|u<&kxMF{mG)%DRLe;Nqqq(sYj?Y_+p* zzO~l}s@$=NqFeC?)(XSi@1~w_;b?o92t@0UL$n5cz0T#z_-ijQf|E?E7Hyaogld<< zcV(6zo%U`rQs)LfbJ!V+ngMJi=o`whrxN@i8=HGT2ES5v)!df0jntYepf<73dkTdV zKXF8eJv^p-!vnPth5v%NbE@FrG-aLrBf!K_e+A5(jY~%~7bwqri<@{GW)nPplP&oP z{&2svAh&=WH7z|T=vO5{i7>Hb5S85<+lR4z~z z8l5hQ)F&Kh0BlPm)UDi8rnf^PjaD8(LXuC^6(kg027Bc=wbT_LI*i3Fi4Miyl`%Cd zeibVIt(AH^t1&S2XGZ{ZEsx9)?c~aK=AzI~<2TtR08*;wYTnINeRq|Ux+Qh+ehV`Rn;|kFnQF_I^3PC`=TN|2@0$gq$8U;#7T1)?70TP}KOmorHx-niF>` z@wFpGu5IHjdMxDWY0c=yyqU1; z#1aPj3il2pyF}6);xmFF(k7?dsY;F&ej{W2q z_|{^8@3o-~uq}~F|6mwBesU~1v(#-X{Q(wFy4;Af-2+n{fN_idN8J~#Zawr1IE-7* zy#A^kSn|%!=Xq6)Q>k+BjJ5k8kdgbDbG&u)Z3xY;UbUO>Q705dD=Gz!`DzT#JDcX0 zUFaN59yWaf@mYf6Zkbzl-Qgua;_Ca+yqfYp1*GvAJ#NDbRB6p8v7tIU{n~rpNGtEC ztHfWR^q#$~QqsAHb03%b7y1)(MC2k6En$b|6Y2i&{2g3jiG12TZKdFVVB2g&)oLBr zE(>8OpXRt2J}@c|IAzu#>BSGy6azjCTGsYrh)vHE!NacB!n>gSsoB@IB3pmPGWxUI zQbv|_W?1t_^TY{94zGxg0U25>`E^A$x>Cht6EeXs>n9>L3IP`A7CYm`X`}m+fOY8> zD(?lG|8}^7QPXD*7yS#s{Yb;I8&d2JPxO!Flw9($>0tqg1MK0RP3b3ZUXtW;`8o9k zVyK7YFCmad-$86;!s>vKSVGK4R%*6NO7KTM*Z4UQ{_4*=v$qt(etPcZ4CyB&)Uju= z7__Fs?43lLuD3t%pO%SDpK41tOxwKhd+5bMbN(1;l(~8{c}}iGZg0t?Ra-_`@y~+) z2Q_Xmq>_tO%s)|`P~9n1HU62IXp^2x2?xCp{lwmYf9=+#Qyjwq|P!^ z48~6I74O~^&D2O4JeR*i6z-e;F2UPNnfcah0sx~Ie7vqjIDnk#31+m+b6=miL~O6o z_!9&^oB6scd{sr*KYu(2kO^gwzi)4NUUTQt*J=yN+YB#SJctI0j`5C<#?Np;I>xtEV8E z^(3iPoXrt`zutlkD{%k!u*d+zATnErG`$m=WM*12}7brF z?pRKXDPi#!e*hGXS_y4{s9J8Xn!^?D>HingU`OnpxmSjkw$G`B@8L7vaVY>lCmrcM zcQTI7(h2)kespnfAOg~o`^Gg#Z>S-I zwZ4w;i!~i)S*Yfb_M1^>&7gw+K?-OJ38ZbK^pT3A)Fr_X#Q`KLLdlEU6ZV-X z$fu7AE8H3-QI$2O+l}K{NTor2-s1T~$j)vZeSqLc3EblAfM;cYt1z1W_+C!|$P_n* zRTVoryf{Pyy(aIVdeGbIT14)c#fDDyJuSOoq~rC?&!Arc2nrlwQ}im4K(u*-=A*t5UD=~SjT+GVNX5#rLR!Ptl{AWDSF|CodmCO-KNnpF z2e}X-lM_I7ZX~0yQHzY2f1$#cn+h_DT;Q2i@AiqG{#yRN6)M~W)_k10yS_|9)W@O& zReLwoUWR19rz+@Oh*$~H(+EE-Q*>Z3h59x2-Eq*C%&w>zR)aLRuKx0GTZm&dy)2$XjC%aiCE~6 zMk~`mIsnp)to^e+f|2U}T3L($CFFerX) z7ftIAh<{n$ihUo}@*fsI>!4oIfTJ1CHRpnbxE~%m z)5rRvSr$8iuoBZ6Q8$-ch+cFH+ZMZIeJJ7E$cmHzeF5#H7d`1)F7YiAiOw^ElxN-))cm3&M}2}l4w~-) zPTwzGmaUX)Hz58UK)yqyiP~fjNPyjCS340BcAI0-Z8GuwkkFe>ad^v z!B%^ z56q|%isSYlOIvB;N1>p=ZL_~Qu(OJho?FiK*&t@TO&Fa#dWrMdx4jeI)y570a@Tw& z&wjgcXB!?hSj|Un=1;P^mK8=6wbM}AF4GU!$MfxeHqk4$>b|qE2&#VHn+@$DcKY>t zNsX6&r`OEychIL`KAcfmUlZy=uS8Y3n;zkazurewxVTI`xNv%y(*wTb0Q#_oFfts4 zm%&ZUDh!!V*xgD8yXv_f#S2IH8 z4nJ!B_OYNBeh%2v1+UQ2n$x7KYlUpiP8A0bRWao&Z^t9HmG$8hQ7k)rsIL3s`MgutEytDI zf3xChiGsGNaV$Pjn8JGO3zN-mzoQw}{T74Tuz$&m8 zteWTyy|Ittv>(8IDe5sDP!;I4RY~d|jUyu{fC4)`PYfju;wLqIG)zJxvcRL!k9%mf zBm5)YScwhGlRxXg1@1z*5O;bBwf`(slN6#6iA6B}vx-LzzKd^NEc9y$h!s27AtD=j z=O+j>A^jrH3pjGwEiRXu3oW%0*4qqYz}e~W%idyZnEkEuQ&T3F-u`p`mhC{*2NNqd zlPS;)pCB8zZIyS@k#v1qUu1rpSMxQ|^p)j05xizmAwkIp5yZMr15!Ce_Q;vkqv!qM z-D>2FMBW8D?2l5hcU`hvk8dg$5IvQX6yMERc4zm6xvZ@oB<-212R|=@tWeSI72fuL zssBQ}W`xq?hRJ(rE0k3*Di^>d<=o_S4Dyr;4Oq_Dv89~8C=21Q_`~lDow2EV>)reG zL5z#&8H#s~X70*|0x0>TzD}h)L!vmueWQ%`fQ8I?tg|k?mdKs;2qh6}_|YS!Eh4a!=4Bq@4cTK?ez`JJ-$Y$T zJcPui0ZIpw&hhi#U9&eww5sR|*>i!x_qOC4MCC{isEfInto2Y;&{Bc++mWN#Y?=4T zV;X?sm-EY_NP3*1K)E>~KN<2~gT6Nw(8k?IOmAn4$FmNIDJc{^!fa9`;`cpW$d}(O z6)LU&r-{Ws8|x5E&{@1Drmc;ysrt0P3NK6vtRztM`aKK6S<&gIxBjrFvx&lKkrpbp z6XD~*N;vw*2yp?Oi32)@j_|!Wq4LHQ1yNz0K@QRZ@&j<5>ftmw$8kT#`!P}aciA;D zqX~G!T$zU9)3lF_k_+#L$8^!(0kt9x9~dvr;6CECz-Uu_M2P}>$6DL@a0VB_auQ@3 zLlS!E)@z{1alNRL9O ze|7=lN;W8zhPf@%bu~oaiHGAVj@vL(i0~4bNJ)PPRX!%L9X0*wlhZ@ZQ_9 zw>xs6kKWI(WpdBJ*!5x@LwCf*hC3I-fJS9&%akIW!}-sXf4!=u9DZz1e6NS_nc%c- zw!P8k*zLcku9PX^Z#xg0o|!;|*c?1_j}Jc|gK|q7ccSuHO9CtF{*(RsT!JRjVDjp` zbi2{NkSN3}#G^swK4&MmcDxgF{}B}R{TZt9L4Rwd zfnUhb&W8$c)oC6%o_YS;sLgMjF_)wzplBa5FO-m+`w&q&Q<6GVm}vu@S=C5^A_mtl z*5l%OzccJD^{^~h%1r|3Q)F1f3+ta^GVGVp z82^$gK%;h0D{}cPO+2f4$63HTM>p2>X@;o(%!RODh<*@s%iKsVzV5dLf7ZYA95uQM`gEQ*C} zla624Dli+$-HM5IglM%`P7-^Sa7@18k0NxFYk5Z33*wJ)#m=38zHtC~PcZi@4RW{_ zUE-Q>0ty=7x(~it-U6sCh3DE0iq4hdiDADcAU^pxTIAZq%g~yEe*I0RBBT%=9c(CQ z_x7YUK@5lG+F|iHjEX63*zC#`4ET6!42J`^o|7zX!&F@+;fm>@aE;BJ^RBf%TLYtz zwOo}s$(Xy>QfxT2R2hoKgB`Npk=%(_UmMrH2Rk;zB&|e(xSK5U#TgssC{0T9O2Dxz zb@Ch(FYxiQQ`8%&Cph$Q`bRh$lmovS9)#i(z$jWaQE+_Byk)54&;x-X^IhLSpR4&S zE`u91kjy@FQ1tox9~Etg&FRGdAPH_K_)|Krv|*P*=88i8xg}AHF`V$DgZBalKOLb~ZZ=DIC4QHoV~dMsdF z25reb@afI|N2)^9ROdmBA8Gm(!+gUkM^o?sXN|P}-AkWp8#ps&{M)7En9ylf2M+@S z#{56r*H5P`P;t`b=HFkIKRC8wkXQi8p2G{Vma@@YXI=%$^)p*tGSB|;hihVv9`?nl zQZEY9MnHE_UFr&+th`A)SU;3z(=d$e%3cjn>KfYadwarN`cD9q@OD)GmKNW2dW?Wl zH6;?Mhal_CJ0ji$CeIUs&pw*kQe+9UU@KuUL09w~Wa53{X({Wn~Rl zn&kNSf>Ymu4^0Y=Bm?RD%*8_ol!Bf53syx0r{}7yxYJI=2CE?1>%5S$!>^+j6nE+H z-!bkf?{t+zyR;R+JQUB>y4H9fG9Te8;aKNovBF^1mRjB+U@+=Sy zZy2_sfumyCz!#w!i*Wqc^pa4jojC)uZF&f52$Adc&M^@5*2B zU`wmb^Act(zeEqWwF>5KfErf#)4Ih$ru~=LsUPyEct8EP@!u9gy60B7m7O?d;LM-r zcUd-Gk}Cjugf^pp(N{D(d`pE>C~~$PqYd-yd*^Jj5iDLA7+&>c`z3X6=?1WO<39R1 z?r9|G8#&ADoIQKh^>!jK+T+mAu1=V~dT#QuN#T9nBrHcE;y?DzPua8QWVYZXm$&62 zTxa&Ws=ye|@>>gy#FKqZg-g^kFL`4qWBdMDP+D_qFbmOe5B8h=vV`vc#eT`l0e2td z-S3MV`dsEY!`d~!#`BSn!S6rdazLd2GHoWL85E5(Y5{rC<)3rmZDD6z5~5UGz$-W#fjO)(c0zoulUQ${}M znj}-@ly1}X_5}8;=4Oxc2A`3#J}0yI1*{RDtaem{bX}eunU`u0uvOR*bBuo@ej*fZ zY5UiWb>d;MEaK5Iug=`nchszsT`@Sh@m<6^5OC<_GMb0LM~n>orw-3 z@5W+cUB6RrMJ$F)MFP8m-80)S)0XB8!ab{;Zt=mOTW6o=Lb$ss|C2VyXgJN)Up(e+ z`)GWIL;uqpRcj$cJ(@~b%%VQE3dkZd52oAe6eiL@yVk)Y@1!Et$ItZ3xZM`9Dj>Zo zdUmE;8zx%cp|(1&?mkQVC2^rn#Z==!a@64bmuMC@-p7D(1MNXzt-PsMDEfOl!a@XX zLyci$Mj>~VgGZByq0%8<;FXT_`kJb3i|-XbAEI*N4Ij2_eP?#>D>W>VE+}9*692r1 zinnE;T;v8{bk*#cAY#~#{@7eANXs&LscZ$4?;H?9n z*GPA@lPUGd6lBC#z! zKnHjFXwg_t{NP$ajN0d7BQFUFM~QN^OAE<4AT^p@5;rHh8=f${^4rhrh zTrW4)m=JoP50#n!g=jJbnMceEp=dj#nZo~5bRPaxe}5eR++pv1<>GR$%w&|V*+s6A zxHhFSuKA5zLTFs1r0e1uA>+!PA(<&FJA_JB>Kdu2ND_U2zw`YAc-(c)=RIDp=ZhEU z|2dlnI#$LJy5+pWmfqdy;1`g&oa86l=73mh8~CQKF5W<(2Yf8waE}F*U&Nn^xy52@ zAP7k~rGi8|dcF!)DYPzh?(zSi&tTNqBH!Q@$`Kfu3W`1p z_Pa9JPJQLI9qd^;S_E=EQ~r^&*U&DAmQh|av|s_+Sk}nx;Vd+(^l0|Ev~G?Dpcp?4 z^JqnHpSjKoaa@>Nkp>DRs)FUe@_N6@>YamH6<3$y0ZNB??YW6dC(&qqof@5V9p$xT^pS@@o;Cr$$!~bIU$?aT6O$GlM&R$IbaTg~CE06cvhai8CBwR`^_KkN z^d~)#Sc6!g7Tl=Oep@uS^Q#2I9CYU&I0lugl_%gL5hr7dZv1i;xW?}fBQf|VIV!NR z4tKU^u(xsn7#M%DP&w1ILr{Xs*J>tLdh)t)NwGnAj!FR2gK@F z_2N>E+#RhWEo}HFK#@-1?66R<{4)08pmJsp!b#Tx0-YAV$#o1B7aI7FkAqAPlZ|xs z3#g^=$@N6n8#fcoWyUo#2VwP0z-MtPK#G*mlJDc?ZmK^C7aUBUNTc7W6ov5`KV4d< zFU18+Ep1#cMuxCK)(9e(<`&Ak;K`ZNb5Rd&vb8ReWt89t5ksiiY?8TrmMnw7FZQ^Z z$}`ww+z}$|Ju>v?HC{crDWOD{K$%J-NYwQhmN0HhqW12%J7H=C(uI|=lyMe8JaRKp zg!+S}B7m8kcdzk1j^IGAwSgG0A92?OvWLoh+mf`N zaoN4euGKav5#x_$XUS95%nBE7D@N)52|JV1a^4E?l5MAIAwqNi2k%^>zB!(w(>2LK zN#R8GqY5Su-Ffq!4@p#mC7taY@$fBI_%k;Cj<|;17m({-X_EpTPrmXk&*_BoX$u%e z=LA?XCO|e1EE68YK4w-NbPGJ7Ocy{1!0A?7@4dF@spcM^+Fx^F*o)1>R>xtg;G_x^ z8(HF^0eet+5P14gE-AV(0cHl_s-AgW4yq9dj>~3>Xr6yL&yLQ8!i$pZKczKN(pv=$ z{aX^%9=c#^POl0Z0+t-pn}7cy!cyfRXQynWRj`T`dUTZX`Y{1fw!Ek@R(|>Sq?znk zjm6f)wH~Tu(CUlJVUdbjgWSr}jn`EyUY*6+Ym7ck*#;|%2N6QeQUUf-$KJi=T3uRR z22y#>pjhLjE$xHjyur6!a@`ZX_L={KB!Kn6{QV*Q0del0O*LsZe^(p z!bg<4%TJfy*x)(wUJ?p{F=4ff3n&W??2007+pdIRrj&v#CW}mi6e_>e_6i@<_ljwD zM3fFyMQ*8M83ty?D zNPTJ~f+j=|4u*g6T;qHSeDm?60#T1g!H;{K8S^($& z&T3ohy_28s4*u(SI-XtBBNzgy4}Et$vOV(er5cwK-(6e6X|YO>V0SR9Bhus%u zN7k97zm-Cxx4?E+d9KY~NH{ z#hIyfeXtw`;ASrUy)X=`K^M>B=Qi9!6zIB+Nl(kU6n#@;d<;m_B%me!^rH8$g z`pPxkb>|$R^X=YoK!{;}Z?whNbUS{r*l+eTC)X!qCYN-4Sr36D_qM}oW*U)e&&JRU zGSWzL8iLJy7uYVv49HCWRtvFhU~(_rg4FyFaWP3G=##NKVmuIvSo0-}v>>>zcLt0f zc5)6Q=^{9$z~k!2dJogZZ#XLyaWBMwd=sD3Og>s>8b-kXrAc6 zBR9{v{68o<^XY3szkvr?1UO9^w}=I9v)qG3!>WL!5&oapM;8O3yI!V&qqQU6sN?5T zgL{^EMP5SI>0CwJzfpsg5}|D=5fd6f+~%h4jBi#AbEO@6=7Gr~K>FvXIPo2owRes@Aw*`^At`%!QVmp;Yj7?R9f-i&;b<5_g z?63F^9qGn{34^%z#at3J+~?}WOp(`uLFo*aH&n5_n!L&(NvvpfJN{dlqvTd>e+VCB z79f1jSQ9BYcX4&F8|~p#DX5Jt3#kZ;eGiOf=S<%01F>do`kUg!)9Cu9OB~;zOMkr6 z30!JI-4z+hHVP)d&{!DVCLSy@X6*rM1qM>iH$q~p@zoG!v%n3NKaDo7qDDYwu&6e7t7vhRlGYG9+g)iyhP4 z_NrNLTlFqtAn?Nbt?eVI*5e{RC4(UigRClVk zIRj~#|5FjsZ?b1YV6EG8UD$ees><14AnTps^-pj~gW8(MCRTJ~Eb1U|w32)FEJjwJ zMoXEVK%Q#6!%0Od7JUvFk&GB3@y}1NRZE@n+oSg6~2gn0_?Z_vxAa&e~U ze^3i=4G!gv;Svgr+&v@y9D?LC214w%r0+knONyNs3<&}2H$5Q&ucY%8ZpR<}mX${) zZMCmqGcI{ggDu2kbjA5OnEOq34dq6irN{zfPX?A2nu7Os=s9(<`x6uJMC;&tbjRf6&0OL;jas zFG`d&&ddAxXYz%dzqch9?2hw#+7#$x)025I9T2%Jl|=UD;R@1-W1$y z-YH|n`^EnK0+&c|tqmJ(nr|9Bj(>tO1Y8Ji@gw1$12#MqODw+6BU}=9g~`2jtmDQd zv2~~JR81d6+;SXV6#7mMIC-lQ4Sbf5EIu4eykh8V5rAOsd>_kS_p=$%=7cu9c)xcC zSU8AC_F3Xb4K0=C$sY%O)qdZBKWk7!;88TcSS;^-|uufK1Vx7&EG#oB_gHO8ZYxkF?D?3$p83g;VR^e#~@28LvPx~>IWYQ{$TB5{$ zBk)47EQgkcb=sflyh_${-F^yz&yGPD!gea^(;Zm`K+mi1zqMt>IEVS;Fnt9>zWD@n zT4HiG6DNea0kuSV_rvq2+J#&GFb!1Ueabk-e$eK|cjtkdflwd=6&^J6t30_np<5d4 zvQ9et;$+@^vu77d{k+^9v4Kp_g;J~TYS=7{Frt5Mkg9{d!l(k^rzs`J|8D=OjAqh3 zwq2nFBr76Dpcb+e=SgK5c*w4tx+-#ORwHT&5Us>gulOp^mF}

--!*0Ig{xm+G2- zFR#dAPG7!7*V@Ss1X#FcwfLSfwoW_A|2}ssW_jtu&3p)-IVcIq;Ma4nVWNE8=xPQpnF#EWF8!;68}W8yAFv7{ijipGvk!3WV+4I5;5qUf zi@A%ksrvB*P83b>x$@42y^{Hvx=6TYekUJfiKbevv?wAyejT^cm^>|aWyFsseUf_2 zJ6))Uti)yGm|OQEj=0iB>>jw`4|faG6ZVoLwi~1i9zoK|5YZ}_nz08&+liwv?{l8xs9E(;7u)xGZV0+pTAvVJ0 z9%p+Y)9aT8Ho>GC&6`FLKHh$f-UW|;YiQ<37teKahF}aln2V|L%ctcRB9%~x+Ox2e zuipLE)?EV!+)j+PDb#vo=s0f0v?1jP=F{R&=&eWPGlCRH<=U6R{(P?aEzhfp;5Tr& z?L_be?q=OF{JL$DRu*@uanytVLDHfS1Ge@;2!zoiBPvRB@yq&@0?PrAsvy}X0x=tU z1;z@zUzEY5O7J^iPMCM9zCDf$n?5DYXNZtW;OTkf2@KQodWq&zcToeA@dEj%dn=r} zMxbfGzZyBC!PzDK(rj5-WsG2)ceEDLeU&CcnRV7|DOjHNZxm+#i3}eEqbygJkM=2D zBs*$@{)E}M7dP|W^i(J?paqTjq|(@V1QAAURUr?Ll$2(RUl1+O0<=E)>_iWP7i(&o zrecmhj#3SCAfM$e(rR7CSm%9>=_9z}l+N)fqW*1aN$NLD z;!woN^iUPqTbOd z8#X!cb9 z>=*JYc78<4NGFKZfGI@8Y1>y>HbAql(nb&f<+UBD)1_6JRSmbFwVpv&ZK9Z*^1Ro| zIrw7RQ;sb9(KK+p6EL*~p4;XSegHonXqG(ukR~Rp10J}(!chwB2n{K^o^KVvU9Aru z(ryqLUYPNvN-Za-Js_hq$4jx6DC2fVZX1Lw5MUzx^!-dYG13csNBIu1<%IQrJp>iT znEf-fXIFn~f0{C1(0xzl)Ky+o-DdOz+aTv(R_wN4utmkQv8QU3 zemCOkB>=n3nO_^lBcU_qJnRh9JSwPu3_RK^zdrqOTC`N5`+~(<0Vk7(U1PY6jdZ7C zz$#(kBFHpRatGp_Nu5E``>WIu(n}#XqYDnYyG)KEXCHn}M0~-|oqI-Jv0HxuPfIm* zYyhL`SEuiEmvqUV8g~)t2M}a$zG5Qn(py7p1Ic7AG#i#DP9PA%pP5&Y8G+UB7(0~jFuj%bbN0wMB@6yJ=7t1FK?1#rU zA7B@$Qcs+m7&SqI9=^g%X^tc7FRfF#)Fn~Pldbb7^C1W224giBMP0)N+azmM zq}NxyeZL|m&kyU0j=690>EjnN&4TWYcBey?a+wXSB&2qm-_?(*t4gRN6OPf=jcyms zc;At8ni(-@kY`&v%SLuy5=MP2O7a(ql;BwTbd|}&K)c_&wQ@ev`+LPEPC_twcrcn9 z^6)idleWNea2#7IDzUF_xOy@f!f6hZ)5AP64Con!zA6t5KUgk<5Nx0+%wSkpr){S# zre!?IoWKM}LMg@|7yMFBA~(b28&WoeVn61r^zm-WHw9lHON?tSat&cOmgaK_cV#(o z5?Lv|wUDuA=wrSXD`6k6$^xynYCD^_U;miNSqCtx34&+!+)dT2ODuV!$OR=J<4`tD zjuj`d^%AlQ8pw6K1KqSy$+Pyl8Nj3_MbD5wZe@LGb^}IU%(PHiCwkxHzcgw!1pRM>_At=A67DQXt`c#eu+MvM0>U3 zSa>OnATF9?Z~2h)G;a8M^r2MN*kSG6e#>1Z{Po$OAA^oRvGnQtcdm~u29Id@oh!$7D&;QFWJFN^CAGL%`d!@bIqmT}NJ`B}eWfG_5g+ z=8dG$1gBwW`dFk^YTrSLOySuW?aRqNOxn1+Fy%4FzGCJs&9lshA`7L$FMPaTs;WG^*i-OXF_E{0ua&A#VoMC>o#0%iU4N)y4am!aRj)9vBUZ$ZrHKRJ zE6BDci|KwAc#{!qVw;8k1PI-dJYhH$i+{n$GC!bgYcsazp!TyFboyx4bCuYv%`nmi z*-L|f#Up^lt^ZZRqT>xD5T_!Mg057~Wo{@7KXj|#%}3frI{8#&|FVZ*7bO?Er~!$L z;Jg`97>9_;GyNvdayAHjmO>#leK$l)5uTCcbdUq4VJTW|r2V=f{;Nt%nNYL&NE(g* zCoGrURJcUr61#k6rpGFKt2}&p8t1VH8b7=SAbDz<j(&<@ zWSBgT^0=-+u}w}|3ueW3%_TwEG^3RzZ$#wT#EMT(!>Ip7DXzqO#5euewkI0Q7qcP& zaEhp4juXn1$&}aGdNhRjA0%+G6w-PD7~uQoG>Fh8NyqOC;( zwBJq7N$O$F#&cSD1mD}KPUJ>%v=|n{4H+Lw;P`EBkR-q7nyI@)1J~Czj!PpmesA~Q z#3^lmAPpe%mO-&4Zl0-OmxW{*gxM8sdm`gr5RESMQx$Fnx5y=ONPqT6XlYdFjx6gBX@|^DF=lQvj(be}T|RkX8Yca^?f$TJuXMCYo4w=0Lbp6b)Q~XG5HB*v}tjGGUvv69pFbKjaXKZIutRbU*$R zalJUg!Fh#E&gXXR{`z#_n;&e+p?^E7U#l?H};|U^zFM|4BDdFhx zN#qngLSjwFnkvj`H) zDXO=YmYm$Y>%rg3oc#SmG_nJRT%QqvUjsF{mw2XDYVWiq!Oli*0M(0?uh zCaL??G&XGB&oZ{;g)#;aI?AmF0A?b#A$41ssf8KR%q!SVGcSX(o;a&Go0ynYs+4H= zN+?!W%k!@E)ygGW&261aseF*Nu_o&oB{xYDMpn7`T1R(-z)zT4R;0S7ZP=!-0excv z3%ku^BZRP93{dQ7gHn{ec<+c9Fx^^)=Gm~zJZZ5`Wq|vgxSUP{iYLa0#MUcYTkPy% z8`f?D^C}@Wc;J@V8$nx}Xp1AEDEX3BZN6C2sR$5=%vvYF`#96{7#g;1lr4$diTnH%HMznt%nwM zv@xQV-URZqWh+g3gbEWwQ*KA783py_MPFfZ$w;WcS(l5@Qzqpj^@R`WY}u*URPlTl zIa5KdpQ38=M6$^ZQxK|f_H?6ME8jfpHIwq5e4dq~r4c zN~M9c+QYYn7Xu!kc+$SvaUD`k(d5`1c#oK*N(?#3x9BcRPrnfBp1sQP2uNAH|GvAl zoVMp=Dawe{Al=vo(1*d>uCorYGDc3aZ%Vehwuri(0P*Ae27cQ_mwW!TbC%ZPIgAp& z@xtIP=$xbq{~j?vsK>~DJ5+>xAPI!kkHF?ssp!QL#6uWIW%8A?YR+m4vbiIg>X-f`K@~qP$!!VJ))47- z^U9|amkF-y9EHJ6uuwm)>eH8x{;9m-5jJJ0QaT^pN_REQM5(hu;&sl)-MHY`CzSa^ zSvL(#EPLRrfB{c}ATC*DtH`PCd@*|Z`_Vy1a^^CSTD2B(w{;B;h>1X7q!7vF2H?UM z(eQ=%8qG0Fk>#3O58G#XI9KdB9v!C2qA@6M$-UPP9)%cJJgUol($FgGQ&T-+1(}I1ty=4eb42j*SDBu@${Ao=*jsCFdh#dQ8)h?~ca~r8^tsv3! z_h>|=D5FT>z3_=$qGoP*pO5Rc{ag+9>*nrYb9VR$l1~!0BK0hL$w`g#&dU_z?q%?t z&&50va)aa=L-hUzr1uyZKYgt7XZsCR9F=t53t$ywk zBeBygRE5YWgUD-)^GKVeswa9oz%R3%w`BWk9vJlXhV{qe1H;8wAa3HDdaGp_-0Oc$ z@Oo6~pu}x?eyJjC67<+J7~@6uo?uv3rH=oq;H@#y9;9@Grs`Wn-|!b~RBbcPJ;czm zVxxqdXC$}Bd)TqRngd_qu7@&1PJ*O@;15g#?tcLFw&AkTz*mxzcm1?QZ$<` z_X^&G3@c!dd(sxXH;Yp;xBCUGlH*lUT@t#wSEqM!np-BZdhKCf{7gi zKnRtL9CSf-+!J=>%}!|^IOVMr&9poyfX7jvama)Th|BXC(XN%Y#*itPtGmAI7d zp-q37_g41}W6bi8$-J9a=iw!fc=aa`he=q=#GoGxXgoTUs4kXgft<@1ck;iHk zr*{vG*w@r}>lc${%8?|b%7eqhf~){o>Xy+HgNZ7|+^EnT(BB^!&rZ@+yLEfKR6lG( z$|ZTe5Y#}HJ;I|z?Jn$b9!+(^<gm7?MFn8I> z&c)2b{MNzz#!C~3^Er;xl7=rW^`du=B|g7mVU(`~do?1}u*lsoj{5d*m?ce95z%6( z1nby6=;N3A9Wy!5#C{M4^In`o+@U#Iz@ z#}Jn@hZ+Td3(`(lgcaV8-MX?{_V^O7b{?4o6<+b)E{!Bb3Wu0WGTGBwP>4Cvt*cVz zp&C*kJJ^K%M)FkoajfDHBQ)~566BH8(HI?Um{O`qMANozjvs^mKJT=-W2ilwPg9}i z=we%$NFPOvAEoPOxsuvISP-`0DVR&>WzBZ6(4hfaVmNUXlwidFAomHlk+7M|ez79r zZ7rM0-`+S~VEqz(IILBS%%j^L`>l~Moyt)bdZP(?TzF(}z~)8}Je?}xY>*4#8j=PQsesdioSritAoEksz8px?q?O>l z+@(q0*k>w}!(Lg!U!KS2ks#5_X)zkO7~kTS#ER6cMWb)BV^mI-qrB@j?O^`(r>|Xv z&wcU0oW=dt4Gj76^5Q=!JBFU{#7MB?2MH~=*`z@ zgNi4U^*-brl{85%f)|ET2m-ST|77=DI`$h592dd2Ui}E@aLCP;bCeoW zUW`y}-Qz*lC&rm^_-?!GsQr>cE}NImiL+4f(yNslb~8eY=?ccDpq3wgfJ)IV&YQiL zv7JJQo>CWC<>Yka_zqH2$S*D&nWTMaMa(jkx^s|Pg3CUc&(=kuh9-N7_p%wwENszk zwT$uTsWyG{!&ISVDQj}Yny%3kQ7wTc4qvu*Kr=pkrcologX>ayuJ>Bx=ne0=Xa4X2 z6pAJ3i@V`loJqL0%qx@t{0+9e%i^J$bd9L81^m3OR3cJnObL#37I-Q&;&Z*)^E)oW z37y<~(h9m({REZr6gsFj@=Oz)wb*L}rdIhxgGLDE)m6}`Is3?luEv_(KmWa>lOr1P zr0&@TK^yb@(fhJ*iv4>G8_6k|cM;FnAd?MAGgwknp`xLAr>zc-yBn0Jcr+Qr(6qWq z1uBiR7vH<*7OQ%{Cr;ow2%3;Sq{TUM-B8n+sfs05=qazsOha`u&F=Aeze?`;yKxXN zX?�{O}~P9-XChsn}@J4ncQ|=l&U4>4=$j9z3Ddxc1x#WOuJ=o^9yo@eEbb$DgoZ zP_ z-XFb|CwpOfuX{#^wY&%X5a$YTUb+IeSn<(YJ z{!%!Tvm`}rUm|nuc;K)u*4(eZN1MQN4dIr~&_{CkswFCbfLb+g>IFXI(Cb~kt-Zv}XEEELnkj}Kkq&tI=SbZ&V6w9E}J zeGro&5Po=7v+!A0gHe2``KmaB6dFzb*~L!9@qR5-k3dWmoY54N-fxv^{l$d)Ztv{=Pdz`336vRiq$!^><(e77*+b<7Du4i;tX%l@; zc3#5ha^>AP28-yq;Na$^L^~k|wIu%?gcGrk$A#)N7VH!rc|M2ZEyPy#@(%zh zh5Ag}>BG*7ocomZ+}aS-vBEag1+6A%cj(Fa0bW;S#ku3hFhO~m3msBX0OyjkAWV?u zlbbcn;17HnaxI4hLx)BKfHyMJW(OJJ{U(1!oO_;aA$^?S4>t`-bI;CcwCwuxx>w19 zxcOTW^dnl5MR>ElM%Y5$>tizEti#+7>omV~Gr5ACin9o=(tUzqb{8$Gs+_i*TEV$? zL(iWb(5p~rJ$cr{<-X8Y-EkC1F-YBd5Rizexv9^>W1pOhbg`Jzr&V{L*3$^0!=iI? z3z;ViuB*iKptUY{-GZ*4ft*{?oVC#b+nQsn$%2kr);4*T3(swP@fryk$%~`ifl)tO zR<1B4k05%q4=o9vyfuL&r)iDe`G)5(gpTVKIlX4T;lV0z0T31Wro5(K)+58>&VRhB z7tXz)V9yf;veOTG4MWV2!D_y~jEPtl&LIg*aF@q)2wQaCm;5uqrZ)I_ERifn2-dCK zKt*YriKJ~ci~e01{4YtI%*gRfl7P9-?d9#Ri5c9LY)-_GUM*=y15c=Ex~iJht>77< z5beeEAzkh@OkM*A_yCgOO6qkb>2D2^Y)r(m(VJbK01>NIj{alV8>g`Pd zkLRqCXSB+hY(@;c^_Kix4g>XJ512$0#U#$IkG_tiyh z{Hz?*q>dyH6m(v%fK8>V(SLhy(*$~oUHTYY#hU%+AQl!lWR0+n7V&&c-~z82SMv&M zXxz8xjRfAJUkcnQ8N?+)f#aMR3iAy?3K_yBB=}&>ud969kw&);rTXwj$*Q9YqJe6m zj+^u2-iXjP>|Ze(;{Dqw%y-8>B*>Ax;pO;i?qL0mLczwv3-Vox6S+{F|I8vDn8vY& zY!)2vR<`BFv(2Tq2|O6LVl^$xmD|ydR+B2UEB{3g5QTRKvM z0kL$I>F0`W7ARsJgOr^zs_0RJUw)Q<9m#7JrE7zO zsQ;0ANKiweFa}+q{GIApi2?W&h;HFPT~4Pzn-SyU)|}Yf`FsXKx0pDE6$psZna$U) zio#TuZ$?G23NohXCUG1WfZlEFo#aX)w(v(Kb}w})RZn_;0zp=oLa3EbgvGvLXlM=O zFKiQpHRcZMqE4#c%r{J3qPmjTHn$0UKBW09{nnwS$&`%r+h|xk2)pn9iWcVZwmx}c zRD%>s1K8Ehfm_C+73oDO9{zXUP zh?J&qV720gOv=Of+DwP`8vfX8s7EUI8v?Io!^&fvU;5ie(b0KP_9wXMw$h*AuJgzH zk|=q?<E0m+3IxLffFc9{lw6G+BLv$i;1h5zd$jxJ2FQ0pRt3+v&k+Llh))L7W~+3ZL8 zqSr%9(t!Va9G>{`;?by}(LLR+aWR#-Q)fABEuW@Z_P5yIg&gfaDMeMO% z!MgXFwj%`}soKR0;`09QEO6-uRr8H7VO3SMbRRhDLOs>i!&-SN^yTS*%jo1zul^}U z7?2Rbf90(NpBIfykiBzUx^r1)cZ)F>DP4yO2f-6<3C#BHu3IgebIk&QxiR4xmYPG# zm`_*16Zl1bYkr;O5)%9VIPpF2kHNP2qKqFvOXp>&D#1H1zT^=-<;Pt&Kv9U=tAM|b z1)&~z)hRLI4z(j4IPK^wVEl-SABx; zy#uV}ZU_$z2kIH!wET=}k2S3HCn$Vou1iI8@F(W=PIgmFwwMXf&FP(^Zfud);LLXC zd6$z)pg1!eg@rfV=QL+sLWfM6te@qAQQn24gE~kc#;mfp_D^%k;J3qmth8l2`0?l!m9fcN= zqt-$NH!`5yj1J#svtD_=v6GoP{l5F|>u*D&;#)>R%$FCrBRK_@L5|+7SUHtT_!a1n7CPq{d9R|WvoJ~bmzK;&zp`8+RF2mqOy7Odtq@AKm^O%8R4CPyACA(Z|(>PKRYMMJx~wy6(X72q8bJ*GMS=d z*fdwA`CiYLkQvg$yj}8SZJU-e&+b}T_NlE)lZsJi&VB3UCuPtNC<4_%in;ke^#yc zSWpVs)%aUxt3LaXmxuOnGOb&eC1((RM0mFT#w8nMrB^)!<)(R>`Cl>n0?)rpu-L$%QEQS)cz`leNnyGbL1bf?)qzU%>j%?tXnn4El z-SHXl{^IO&zR2pJ`%WoDAa6fAGbU+fq#AlFxy!o%tjWJ8q@iF4L@5GjGGb$9gUIY3B6oGS1M_KA#_(SD765 zwagnn4#!U4Og`J}S{m!Hs&DiDK^@4S>23F+#F>jp|Frp@L2Le%M%13kg_?(XCwgTy zi}MGj#Ih6*TZ*~-PDB{}gjVL?hu>wPCoR!IsbcQ*(lgMR@>Asb5+7oS;r0v5%W08{ zSDX#EY2eYF@MeMOi)qyUxfyV!FQDd?JCc&=E3Fg%0uq$|Om@#M9->7TI*=-6Hl#dD z{1@I&Q(i!(!F2EK!&ram7aAfs`sQbit81xXFW!7=0Hx$8)E}tucOrm{x@~;NJzNx8 zH~s4W5I-E3Tu@wlfqJua-aK#b`(52lN<2aAtI+Fr^nT-Qb*VyAyRMhqqng{|9MY6@ z+5}sRnsStEq`s-@rI})b@MAFf%lAeRnVxgZ3UAtgo|Og

    pnvi>il z&!gh5w<$fN{ZxQ(>TqdG>cBJ_=-ubWhFg2B(9HB$c&w%gH`2qMjJO=NYaaI#{PYPuZbnA)}{S!u$N{@ z647>539jJPpw-$N)D7)a7>^!7Hy*3fbto`@0-dL+-rAa5QT@$2E>-qROSzkmeiPs} zh%QgB!HeI*V1_NxAju<@^NlIh^jaT(1`M69X3y?)$s1{Z2E}pBr@4iQ zCc$;SQ#Z!#i7~6SCb6>|y7LhJ7hnM8hgv!f4WPb#v$A`asz+UdM}cjw)Mbqv_w=~V zlZQ^_l#o%XyHuIqU;>$HL~^?mZAR8SSYDC=z^j^zE5x1xjS(xG&?J8CI53rl-XDpU z;aX0!i}GmzI@7ngP~i7a9uJX%pLN9)Lu2XN{!nbD*L>H{xwxC9oDXI;knEqpzJKfy z+PiEk#P$j9)tzmPDo33VpuM=&`PARB6+cAMb7(0AxRRRu=x`>?LBehRFTjrpQE|l} zQ%kXk$;p~W>wFdIQk~At<}2~Md!R=`D0q!_=|JO?57~q)yOriy<;P&hY-+M)dw`ZK zVDsFqLwXX3~{7lM}01GtiPi<*wdH2$1*@IVSu z*!yHL0O+_&`rfz%JgXDJ^iK*%E>j8$|82L2Eemo13z^~6KPoMz0~gBf`}b>{Ke%2F*r5`a0;A z<}3O&pe5f2g_)<7yT9}UW0ZxM)i0x_2AcRnTKc#sv;-w2e$w-M0^&EQLmgmM54oAD zN-qbHk5u84xom7;{dk@8vvQKAx~R_gP;Icf zsYyP@a}<|(kXUtBB4_`Nr1J0@02F>0>35A~=*r|&!g!u&vG2?pgW8G}A4shOWU8uX z7w;dP_fCuJyTas29auRGxyuB~?dCU@=eL2Eq#vALjM!8{Nsp-%FSiUQw2l!zDZv-Y zLLGmG56EYVezT)imD8>;ImS-)=-6;xAXHm113nQH9^MFhZ!p2u7u5XGY3*e=4xFU1 zX-D;kGti|=aX?`xl5^ONfUAb)!*^d;?f6nEd}qpmjt4&y*#LZ zUtQ+GDDC0MzD3D)L%e-dkg0`nPE(h!r20>7NdG6`XYQeNQ-CzQX$z(Tto?Dc2paXBSX*m9YVTh$Cx5M7O9C+Gg`yYrO%G z=89*mRV@N5)}u8V#%kBuAV;DXe((8HFkUk3 z-;~w3Mq9d#m;&n~6%hl53Vd|$z%WppZkFlR7|K!v(`t93cZBGeFkvZZsGm>|?~p__}%Fhxo?2-tZtbW+_Cwf4kH z)ErMDPH4O@h3qm%#zsdaB8L%I%7|9y&00TX{D_|eS$y=F`N}N!y6cZEN@vYbv#$SH zoQ`yrEIgzdw1mL^D35=!kBYr)kmtds82cV*{W%xjypv4yRZRb+JeAynqdI!)f^n;` zu}&eL!2gz4^pA{+ZkdK_mQb+y;H(Y6u8j03*hEqZ!Hd%~r=I&;v(OFD&;P=HOxv|Y zRv^BG*q2)2^P{bU!8gIf{oUXrDO^BXp=yeaN|SMkbv;|EU>D$E*|J?7fYLftyQE)Xz}yEw_+%pE|eq~k*+e~6ID<9jl#BJ>n>O* zwJRiQpc6>cYO&1|l*Px(w}dw#C+8jThF7k>u_%h*uE_b*P(>snD69(fzmCu=g91xS z;+{lWY=Afax(SRSIq6hRThiq+8=&MMCL+{hoj=HI^08Q3yP)rEY#jC z9)nV)_f*0C#-aS2vaEsLcYM(0a)U><9el%2DHgtJouJGvIejx}Yq%zMu6?ky?VV3!}PG3kL z8H9L_B))q<@ACWRsA;gSXUmW14>3K#Yp8_q8e7|-cURtIXy7vqloN~jof7VNNoD9ld3!K@&{DjNfQG; zDzKZO_009lZ2=@1dFVAaK)e3JCnQC6M17I=Q&{qwQyhef}<; zA9k&xAAr%=Q2V(Y&af2*5%ww2>V}d;CV&vx}MRuXP>1`omp-QWT4X+vr7BJY?h2?e&xFH)?`iShdF%5SGLp znpUfNs^Db{%(~EX1|a=l+4p=!t6d(f@J%M;{Cb1v{~#elfbYBSG!hgvBWd)kr(}M4 zugKU%pME310?uWJ+ROH;-A&vm__=PJ#Gw+~j3H^iE&R^ptdn*YHAnd_$IfP1B=L{l zK{?AzL(_L#E~iCY0ztYmuLFM+5lL%zITnFnLa0s2r2Vnbh{xL8ao&0kvn1nfk#Fzw z09(ak!|9%K*O>PF6zbUKIhB&_!6A97O!_!1=~%KAHX%SiZ{KO|mJ{1NJ2!WsjXy%~ zRqQht6&}6D^e^kqN8Og}Co(?}KY(;{-Yj(K^Fh;RTwFlpBprKydAwDz6gvLn_!5!V ztqrkm|F|Lwn`U#}_JThwt@A0}+kST1Zc6_pr|XX7EDgy5fC`B){#jBMEu? zOpB&6TWFgkI*I=C!#nG2wUC#<_yS$!toM*Ezl`3W_GX*dV5_%K1hU&hMX_$@^lk8e z10{#_*(W1qM5KWgxbZ|*2hr`pH$h#Qo-}vNjKvbrRsB%)pgt=u*x}hgaL9Ov8%8nN zskKc(V}>_N^*lrZg<8-*04m;ZNJ>IUvtw>C{}4>8)vGsxFaW%(Eg^|-j$sP4$1gm= zKyb=#s-W#z==O0;O18z{?OeB@%)7xt4=QEPLQWYab=+B;AyQKZf%u6UKW?dDllo01 z7^m(+(*`?`#l+h1x7-uU?T0b8!3nu^7qFvH+>Ma@%8i1z}Ui zCXFQ*QuT#iKscN}Y}&6pFeYv+B9;a&_?iyOOMGqJicf6rXf$}ITA=v>VB_9qKfk~Q zSPB0jqjM``oJ^>B7pF1LVQ@v(3WKvQ8p7uNdkp47aP)G$Rz6v+ci(FO+S|4C6zszV}>eE`|h@|_X~hM;OlDy4}52vwp+){=UdaVdd zhr3c!ZOw2uoY3@#J`1s}2S5Z`6u;0>*@fG-S!h&*uCIQc_od>J3`c17sww|=$~pg2 zZ`=bY>|tugq)Z&!m#L%z2~%(Fl3UBw35G{DJ6wW7pc%7EyPmFZ%f#eA0YVmd zp8TGNQJX_6w#>Z9gB)Dm|AWE-nvs^~;;L7I#3mP3o*o-T?||fGaKhBJt;|cd^O55C zojU8v>5A5$13@-W)jfQ}M7P&hm6)*`#2?WPYy*);#36f3mzJ+DXLZM3cunJ)gPZ6R=p5oS zTUD)h4SqY(ZeoRp49BU$3=zr1!Og~lzOalsS8N-GfcXlFB>b>iTK}Ju9ni!Yo5DKU zcmH-<2I!8B7oBg%VB5qTAg8#}zLo#I>kKRyxClUcg*MT|-{V;Z;wYo!q9xk6PRycHX1ROk5bvco7N#`15A9RC+QAFM} zKd)q0+g14UuE8q^TSSm4^PCq6v+Bj62ShRdJ-BOrZm%9P`jx%Lml|&f57teY$S|C^ ztYjog<{$i(_xG&IHKlgLRw0DJ*xXVH#3C@I=ECAr{QK;wm;NVr2{@OylX;{PNaJ!2 z<}^3sW;bo^Q|qzk^b~J^KQ4V4EqFHxB3hl4J(YP=!8teZan2*@0PZn>TRAtgsZ7V) zNeuLF$yR8Rr+S}9r5b`wpnWQ>o*d~A_0R6H>`w$62FGd$TEAJ|I+BJlbBj*1DZkJL zV!Vc2CIB$ao0{M3qP@k^44gc?u{$p2GOPe%u%!Y53Leu;lCb$&7&BAybi|n?kFR1 zhcK&;ZwZ+bM{j^oM(W`KZcFC}VQ;aBy4jY55tS3o1`7~r+oxl1=XdjtSQ|u$ghdM0 z#swAj>mQrY>j#<(_YIIB*+xOaz|D=|7?4c)T{-|i+cq;O917S~azYwWS*-NuW**}K zp2KSgiFz$y;49o^bSmBkskysuopD@fnys!I#|J%Pes|p$iS&T^=Y7Z^Y09Qkt^y?> zWXpHoHDHr^=ifk>>JA>gGJ$=mf;MW+ zhl?&w0eQQ`6MIYYonm6*v$4_@zytHbAxoK4VaF;AINPSW3*8)+6i51mSVyJoA6fF< zH&{_2pQ8~uSW7<*p)Jl$o^f_|qyoaO1v_*-343D2*>N~z208E9EqU6TV?Ia>8#Mx% z?_Qf%nI`IquwV){bMHkRUcI1H~h{H zLLYNir7TRJ@N!d0!|+#Xj#$0^_^4e)uy3ng;<+&Wj}R0dV`!AT5XWZx_T#s{R1oHT=&v=&HfPIIrVz;wDHyE5GpgYEVR(19e~Y0Uep@LguGQT^LOM zvR`_wI7y?CTAZe=0mi};ga@kPlw8-(tp5h9RtOo>{nU6V;oQU%MYopkJ0TXHp0`?v zA3l;;pg8(jEy1#BY7eA%aOWY=MDx#{FSFXk`Rg@{+0_h@;8G+TDk5+i(Y+ms81-^4Ff3X5P zcTGL}i+HQ>4)|okQ5Rp{&hk-~eWSr3tfs9F%Lgi4$JYB8Xu5e%q)CY_XA!Abgqc=_ z@tJ$&biLMHgMP|BQjA;kaQeofE)B%**QI`mbxA}WP97BMdV7k?^^Ehj-l4ND2GLGP z$)ZuX^yK}p5T>_|lp$bxKk)+I!CT8G8nTvk~zVG?6-n7?C;=S|w zu>V04#ZfjI1k9r|$pI6cP}1MxX0M~Woy3zEAHff zKyeu);(9MephNh(`k=O6e&qU%Qi{Tq6VaAOxijRO%+90+iJIO>+66-viL-Ab+61Bg zvyTJ$OM~QJCVvKo;$YwSX!1$qSG<;0*GDHvn zXK$84X(*yE`IWoVces~i7}#j`{#?SNm)G7Gm|3Zr4jO0%Btk~~e|?}0Sxld@4BzM!aaE6F}BV{4E9k5%FvT;uqpa$LAHW#05ha%_^g9KG*?e5qR zLhESpk&}Mgl?46XY1h0XX&5#5*w1dkxqO-91 z??R?+{9=nU_n1dF#u;G`af5$KqJDfz44pbD25DzNB{7Dd@zPOIpXT!F(j}akHq(H% zYp`$V?n~}Ilg93pzzg{Zq$*^Z{r+q(-#+%QZCQn53dfRxD`y&*-n`tx}kMbRrK2 zxgs47_`R5NXyfzGu|N>cKIk2@G9&QQPo?Lg=QP?}RLb-b@Ls-4Ay1!7nrsnlpFjHW zaM;D(4`oHs2&ALh#C^4>Rihnnr|QC6r_N&QA(N#^CkloZ(QmY3NI+}J9$ZBkOCk8l zy%gyQcy(}+?CgC!ra+*QD^9IFPMw>=LTAA?TG!e?eQ{LRIdY!&{!42ACH_=z?LAmg z)s?jyw@gmROyl>@!f<_Zcxe=ZNM}J2I%*65m@$^+@R7tgZ)6Il zZ8ttCsOtcwI;oBgmaG!><8tVq)}IVnKd3;&w&`7>?%epS_2L+MfU<&x$nm2LBYd$-a}F5*zxw&*^;-q0Pg)9=R#*yw`y z&j?JB5e8>nEvos{&5UD+EU11-8dgUZe^xaaBcZapl-UCj2VU#LVUzQ?(hOFH{QJqE zRF!$?$@K)^*K|&6x+qy6om3zH6XZ>!Dl{L=N2O#W2N;P+v(uW0jOj%7d``JQ=y)#R zo0c5VDSn`=Y34#@-YmQ5q2>>gd2UpT!ghYlTK}5={9PG_5B0v87}5}X%qN=N7|Z?p zL}(q%#OkARWnohS@T6A=&ynFvSaT*gS>y_I_Q2uB>_t(@LYBh-^8q=c;jOGl7*Inz zDNb7S`@*=iZiM4C<~Opj7J7OO$|xMv9Of}(1kro8(Hjb`7y*l?!Fl6Ms%=?|d(&+< z?$>CATmOhY$ilVaLqvI=KL@5twZVWlXff2k!R1@i>X1G?^X<0pC($J6;%ke@+cfDT z6u#p0yK6>41rKGcc0VQe0t0O0XKbYUR4C4d>< zub_R%i2acVON5?0FwC@_PtPq<`@ET51j9ZJ0Vhk(6Klgxm;F(k=`Mfsi?n`i3%VO0 z^v6d(#aHgjpRh!)L>u|EzEw#-tr3X0@}BQ2RYcvb>Edc-{U(#S(I_}CYP?%cX8TB; zWmF4ued^{XWva9+l8r%7_zzTPv~j)Rr!wamltK^N*T7<(bUag1ZxVU)ZB5rsQoS>f zRQq+sT^~lUD|pu13y|R)_mABf7&TCAHPU>mggGoe-IUD%7S=j}Pu+5&x`FQ~2=<*gwsh(AOyq?Y3)!R- z;yeGglt2zS-9bqJd#bb$x^=~>W$t6Na)EF_Yc3>q{*=aR0Y8vfA;QI`0u4=o2M-ux zsaVKR6uHU#i)nmlgk?+bf{dLD2)f&XS|wZVBJ z&q!B9v6xwysLzK}{G3f0TR(oJ{~$7t8~=RKq3hR^N~4BbJ1bvV8UDq*rP9=?sU+M3 zXmc{zj1&w*#c!ZzuH_0Eo`598xyw z^~vcbZS~nI`SL?!j#m%(V7#VlELi5qdt?0SUo56NVs+$%YC?M;n;2!;P%9xvZ_L)w z`{@)}H)#)S9E#HuGS6WS6426al6%CwVW<(g?FmtO8Qae(#P|X`gSgUbEb8-*;|<^f zZB7j3UV3_)e^4G}$^ACdi7@&}{yjy~#!>^3f+Kkv5~l7Q2WrS)^O4TD_xKlRCL5#F zc$7fH?5^AbI{a@8=}818&2+GL`ymd@hWptmkQ)%Av6;iE43tiZ8t$}<1u zDJThJWah@119|;kO3Z$gES2KFb3n>kCnDx23R}I4$)-Eyik}{Q;e@;emfhOu%nOow zB#*#;9Yx|n#d44>PR`9AE*3;*cBO4@cc7=6&ZENbRP0Q!i1l)}B45ieP`U5s&)>J` zr~qJmhG$9T5A^nXXZ8VjBM_e+IX==(81u|T-(YYSyo|u!oeTS~K+bU02!>*I8GtvlSX>apo_XjGxUZ}xQjC8$ z5s0ug(FKrLR2<}r9}PtVue84i=g`-)LiK)D(JOC^p#)ndZexV=kK_+hAD1cmy1fPr zlv58|2hfs{4ol4!C0stl1ue7!FD7IBNV>o4yd!HO@5|I5<3eCObe8tAam7F1^cfF% zI4A|^?|Hhtoj7p)>lO%x6+7e2UXIZDL@2YaEu&Yf#c` zg0nRK$CZTlH9Vz5O3c4KZtqMpUB+NP#6=_$na*2Z_T@g z89k+vzXv;$z47x!1(eTu*_zgDIMXsCy#wlpuM!|e!@6$2yuMN#v+V}#p!xXU4k-UL zvd*)Us06WiJd*Hx1VT~t3m&bH7``-G;c!@66-G8eIBt?-aWu4?K2?uvJqBBJ$nF4UJ{9>2r5uNRm1v)e{C98 zH9bjyvl@q!YU-q&?La^jSg4ExSs86`1F4(U4HC_v za?JmO;8(grmp7Lf)k`2z@%zy*G+N|smBO|lE|aZ$&2QP+jOnq5+LyTpqbu^gVH8h! zYY`(N$D+nKV@0E_$bA-E)YrrkJda_bzTiDo82P|U{gdLL{jfOmo&Xq3SbOl5U%vD< zpB?hF-+XdPT_c9Qc(2E`kNf&MAsvJ7^Mukyb!2yCU1wgg-)wENlrYxzdJa^Icjg8z zHg2iQSUelMYhl9v(YD{BZqF*r5tfU|MaQ%CJ}mTQ*W zK@GRwTk((%k_w-|Kzb9EWW%lEi|oxeJQD!k+&oQ0F}ANyw< z7O>*WMlmS$3$Q%T1?wdXCYyq@NtOqnnuU@7`aa|EwJ!X49xOkd?+Y~~JD`*_Sq-T2 z4?6D1QgwhCpYPHg#&Z8vsuJd(_-hm@1E+?*5|Ac6{%FU4(CCww*@q~5P~E>3(uYk3 z4eUR4J4F!N4t{Meqp}i}()E$Tv;4HN4!+X){hXCw+@)YDWtc<%E(@L6hksd@HKKY$ zo%@gVnKTYQ(HzC>jpES9K>{dERV<%i01v zq3X-tzDm2n%7Ntyr#Yus8}nUFShU*cEKPzT3$mIr6%wLH-%I0MQmj%*2V^>X{vn;g z@0X!@3jB2Yp|xJO!=TZOK&)5 zc7wt!sZN)BxtKqda$YurcPe#P+M~iwW%02c_1@Y60rd9=)@GWGSdr#t1d{eVV($Ja z2;~v|BQGFRs-({9s<>731{nY*nL?$y>uW7+(?Tk?T*NB5^JIXkg$qYgZb1F&z(nm8 z6n1UNFaQt9d-gg;_SADJWyYg+Ula?8=TAJ6t_64nMVx$U=;!X^yE17jh_}WycfmV# z|9YQlsrL2uUr{}FyTZ1c)CO4IDM+8TY<<|gOd$%(*6JQjrk~0C8Ss&Enz_YjXK%|i z%z!;X*u=l%CeTry*=t?Mb$VCNQ=`RPo{#b7Li$@6&44j_g{WMEqX@Ka?pQKx`5Z23 z8}3BmaozE0j%FChpXHE41an{Ujb#@^!fSw^nz6Rx#KORALnd(A<=*D0USPJ<0Pq5y zE!rkGwlhTqr=8s+xrgvrKdY@D`^j@Xq=dvkuz69q%%YPkjc8per<1&*M>bFE`qCV?*JEhuWCch6L(NOM+ zC(Iap#DjKw?CNE*4t!7e-3;Q??FEj{!E2#*1+#TVNbF?us!a}8tF?beAAt|*^YfXL zy~J}>qy4U&Y<2=^Ct(VqRy%g%0(yJ0cZG6am} zeHhJgMp>>j-h}A1E~J)hi_8dSuuO3%!xE$liz%xbN}4STx2K5g9U{L_TdQowdB%6DeVw|2BKk z+JbhJb%9{5`tenN$&V&#o1K3uZ?>_B6Jb&PAd&;HMilk!{$2$cHd;E@^xn87aL1Ya zR{Xqn2Rixg%$N7>9Kk5schB27(;Jttsqy+=-bhB?V;P2*j7(Eeovw^-W&8@2@2|Bh z&2X*6k=+@vhhj&@I2qhde)pu;-#fKdsdCYFU+H4wa2LjDrI{ccQoFEP4?Z1=^mG6D zSt}?Px>UqFB+gDTK(?4<@)s#7Ovjl3UjO@Ftwv<&!}{qjDFZDKY#qG3d^=CH zEu+5({YuOQ_nF#7PBv#+$|9JP^7uSR5NODArUq{2M%3L0orDt0-wYUw+^&K`2fouH z{MY*&ovlONN$PV4GbR$+>3TMUNkIPOkDx1m?ZgmBG^lfviDNJCaV_QJ3$Jf@DP?_P@c@a|7}W>BI6Wc*86w9=Rt+*=I?w}S!ZHkPdWmrM&&%c8Mye*Y z(n8#ygb5%nkosf^E|%1)X=a3)p_Z@j)0vHe5UQx49Y39GK|(1xkR#H31UlOV4WC>k z^@b<%v*=AnCl7`v&v*2#UD1q511Lv@e-!E#LWDgEa zITbNGx`-cE)}_8A5C5|IAGm()&hCyqfEy2ZgA6>z_ic3#l6(RxoWH?o8B2KOZTDF!Dl*Ew3 zPpktVkmAm>7WTT3!xU_8vP?wVo1kTQ=k`D2OzV#~!y*g>Yq)cM$WVJly{b$o?i;&MP7O#%KF*H8t zf6x5ahaB*}sh4%wAajd7lRcrojwSFGHR}uZ^S%8XLZ1`^bmpeIUQqUKCpA-EbN#3j;@m<#!j0xrrd=kSM$s zBzhbM4dG<(^LN5_khT0y$C?kjbBHV}RU&vHu$cj&ZJ))c$$jPJH)TutIp9N5AI9il z88fkPq{JQ-m6!W0qV)Uc({F5(?R|fN;pCt}%Vmc1tumPWt{Qp8Q0La^m5Lkj`FJ|g zK9pAZXhnTvtl$~Ut*Oi4$uwn#a}GgKFks5|JZ~lDB>h&5_M*rCiwC{WtaS$hAK$;pL-xf*V)_5eM7C${u2dd21+ppgC$v- zRMIx?o6a@hFtE57?A7SdeAiT}=2Co$&o~Az0QLi=qwBsH^+9wS6lngwnBxamQZPo_ zWP^Ny0)wJwHQ(hpFQ3qyY~~&iPa|;a#X`0Jf-Hv@R;bEqRLvK$-wc`Er5?*nRL=r# z;`SUxy5&LRs8EXBSXDsV)DiAGr3?ndkfl# z2g;7y*AKBl066i$!Q0}E3WOWCz7Ne_Opj{(1+LB~R`eFkUpwBX##OhapL#SQuAv#- z)~6_3GfcFHjNCkC0as$x@}557FxdbJA@ec{MO4@ey_x6D_=;PoM;swS(KsT{XrYHT zIaDY-H+lhmAEFufqlFyM>RQ9Bn`?}dAa zKDicmQw^4h`)P=5r6VqEv|Ggr2s0VlO0+?X6j7O>m{xDir=FGlpHBRB_pXfG+5*cL z)sFWU#Q|Ti@4UsKM>E-k6$;(g0(W`%Lz;5LvpiCH#XHCj*XgylnG3npabjb(${{G6 zHF@Dt0FTNWfD~!Gr{YGcq*3d}RxIi>O=qiAgksw2h`d~%PP`isLBjpc8d%2O$|7JO zo}%E%U^>g`hYq%NnAHW_wefwve;W5LJ=wCJldOs2uFnpSOFkc_lxHrp+kd`2PI9Ho5lmJ3t~JijVs)Q-B}Csm>PmKJs7_dp zmLSzFYfR>CYCWhbxRq&UJae=s{hope`8mP`Ckri!h6JhB$%bphOad*?I8&>?&`I%d zgFmD4+D3Ei_w&^w%7i&zK__YbpCm_5C|kFL2N$PVWeJau)nUCbvU# zRc8j30>+n5RZj@4IH$|0#7UUIvxNfCRsWkG$ySj0G^&HK0( z@$oE>KsluHC5i5KPx3`19~8xgN0C*=g#s=Gi}Yu?C(M;$&-fF$tDh-&RnmUV{0uGs zK)oH$x=|f=AbJhD6cQC@O+(!p{cEZ_aTaUwI7#m#qeX2M#PV01$Qhn3J?Z1mOh&&B z_^xwo&8z%k1tX`sl3~W!A=j0_(*10&qq6r){5H%3*9TY4ol|TY!mCr_(##%h%0>7$wcd+^yJcCS<+2rP9g^h$g1s}$G^_?(6G9HaH-9p2z zogWk9h#?esPZ6}eV)ECbh$uhhpbDp=g7wQ1S7K@RBlgU9!JdR)*fMRI2bcdr(?~p4 zJ?rqMjyaJ<1tDVVa3)+pzm~*K`&AIQ+5{Pd^eZyo%6{>@>6{cC^N#@rq~>tMj;CW7 z201{i!!=q$`%A^OK_1}(qcH(s2&%;?9p{^bB@MS$`EMKv+7C#5oZZQb1`zd^auWA1 z=(GBD+df-)WJRec)7|p52bVaE8XLXOIU?l0gI|EJkV7UXo4DZ1> z8W)%-MHYe3G{)3HXbu0nRHe-!y;q;*8xU|3Y(i*3^DXB;@k#JSW^2d1g49CU4Ck4g z_&786N@W(vp`?LPz#6(z#x9ti=f7WG%%K+5qSZ-9qT-=ON4ck?r7cut%$l&D))nZy z=Kah&nrqM+__iM06tm(J08%i{sIStL!YJD>dSf5qH=W@ly)Co`fT7aLX&q`|DF587 z%drAx=hs>oC}OtLkq}&rRnf>c6}2ox`U-*=LwQthQCyb&=i+bXWb^1Hn_f0!G~ugt zAe+FjzxIn$D3-&glhqa7VLP}Ec}xuWm!otLN#6=^X6kjsb~|$Dz=&3nMY+HxdUK8< z?$bU?>F7Y@#<21=laaoF>~%0pYYY!8<9&gPN}ow)f`}bF#AS{vGG@+>9428=ME3MG zEYtL74t()6g(xj71M5D6Qb(bF8X#Xu&FhhnuOPUeSZI#0b_VBuF%MGM7FboCC6=~% z=z*gFy0mx!0+t5Qz0TW;R!OfaEHt@0h60Fe@-&J{6L7)3@<7>zOylSI0TWLYH{C?+ zMTBme_ZIM**!wxSX&I?P>B66({MP=n(m5}LU$>z*WFS%1OVm_6zVmP31(ruXeS^m` zu#u6E`~B!1198a*7ac3U1+PhM@}KIi zsvt0~yLu5?qw*j$tmH~ooH$I#DtmOM7veV~VzZYwO^q>9OJ4UkG27nrkybFRmvptC zCVWD_CMTUtaXDDgmyu*3dt;NJ`?Z@u9i2tVOSJbCenUl$4;ELnMiuqu-9pxY{!d(T zb*k0X%T2gq85ivxlj)EG26_Je5?e?)b3cFUiLloz=qY^-Geq8Sd6Gl}Od{pkRehu! z`HiA~GC~GM<5Qfopi!U9dU{NVx}>&zO#vHTvFnJR3|sSgHkr+z?$F}@Z5jhNWVwo| zLSm7V&&=4dY*4YSVRT+l|MJl|)MFQHbks7+feg#RbPgO9X@V)bV%E1Eo!oZATJdyY!XVoc}|dJW^J@X8n?~HsmPywaMOSF@Fw>8d7%l>8QsaSpPc2xDA^cKQ;DW zHLmB;>-D><58^D$3!hx}^81wvw)}j?6>5ubRASO6;eW3{Cybx{rrSTF*70L;3MW8* zGy-+biGqERapjf|-)@?0bI(%jBZ9V5jREj&4`o`t4jwi7k@(mz{!+dZvztqjJ40pc z&ZAopEvYYW@^_jSlW_7^^B(~F(1T9(m4-z2udP3M!l-i>-vKYX22H+~zVyT6*_Zg- zlcUVosNum?SHoA-He!QbV$k1!@2zQ$p#gd} z@zgl_4Gi9JJW1MAgl9V>l2OG zLrR@kAW+ELUt!f-_ZM*N6E|@9P_CD4y)-(r2V_?!sGQRiZz@W;HuyVPt6u?of$6Cy zZ$kZWd3VaiX@oRoJ`ycCzmOs_5y#$4dS!5>i0>ooHx=b*!oVtp+qg#zjcmn~-mNr9 z4<9s|Vp%4=f^3qyEqi1&+T@=)d?-$uHTREoNC!2i*EVnQUpoz(i(({!C4L2C5nJ;( z@q`%Ol$W#rgGwOlWE&45V`b-nIEb zK|Gku08pItf6Wl&OJ|2iFxU`j^Z;HYGx&09<0l*}rp`84h?fXU2^>=&WaExGmecs+ zVzU#uA^lH7v^%W3VB&&dUj$_hS-WW?A?P*9qifv(hyr1=m)33I z_bOu^aon*}B)x9B^Ds8M0fQrmfu)tn(g-HHLp03xQDA4+pFw?OIUT8CpD>fRH5$5Z zk~x_vw@d845ykDo>~-S+{VV3$scjq~i}$h~VY%ZVp1J zBwzhuk;h^d%mV0Dl9|5^JDz~;otI@}v*A`ri~w&bQ2)6H0&3;64x5(D7 zl2)y8D%bXYFp)C{tuDI>eyfGO$wcTUyL?hoCzG|Qb~HueNU!^lZPhIn@@rf?wR*3i z$Wg>Ls)NpQ6#)S0_i6il18_q;qz?>0BWZo?30>roxx!Rmy)rBT`E`mSW-&f#$2l7y2an zs3;M7G!(#kXt8K0ns)cxWKXcemX~+Gd$Qnvk&T}2XrXcPSKuCqlj!NY{ng)i~Gy_D0}#d6OQIS-8UnYBDx z9FzGUxOZUpK|i4(KG7n+U^?hOAwho_pvVk0Y=W7kE4As%jgv=3KUIG`81_HxsOVtT zr89(MTUibI=dwpV8-HQ!AF)L*uYHMy?7ZJ!OR7r{-Po`_I zj}BJ#;x?@c10Q3fYY-nT@T#ygTO|xEFkI|mXxZs%m;l5O(~5G-h0=QLu&%JnCU{mQ z*na7#cVTOQf>%H^GZ4qIUcp5ED%9}nT5(yZ=lsXIStRgwBRpfwB#t8Jb4qfzcx(hM z#M+=F@0Iv0W+kfc739`YJX{p694vOQo|r33QaAPK`MpC1^y^X?29|tX*+c^jBZTX$ zDD!)yZK{`=dYc_@QwAxIZ^_xSw`Q%?@xj<67OeR)I|=3SAduV$~Q z)awo#zby44nQwqtVplgYRoX~p`}P%&TX_KTW+a*u&@Ia=87u9uk45*f>HQBjtO7wF z#}KMHabSp^(vHqL{n)`J;X9BObJwpeyRowT>r|@$`PryRNDrh*;C`C%Q-hp6ou?5ZbVkz`M6_z6;_e~di z?w=oE$>!a?Y0wpr^bdv)iu>}!{ve*55A%6F`540`$0CYJ)i)G6pd((2j}Py(3L`iz zjeM@L#bD|je>A=JS^}L%qpMPnO6=g#sf2@<#wWCj=kxws8Q4AyeMc2z?byPk_g6jQ zuCA{~pl5ns9j+76>9wH_CoJ0w&`ZLVlEDcGEBt7rPc-GkKifOWb_5OEJA5baeuoxx0_O0`SmozEsDsQDAACt zR^iCJhh-|oj~x;?jmFBXcM?jC z2c5(pSB%OqAm#+Tebkm$vy6-@P%Ji`9YAm+{Mdzui%tCvkhwlLrjq=ufdXs?C_Ipc z=%0Q;LcA&0g3wM~veMh(2LK0LxQ~W{*U}9SZ>1XN{nR4T5zTYmJ`PmTo1WI!WBd=} zO+BF`+diqy?uv!F5zt)b6NQ$++=jtV<4B88MM;i0;$|R+!6WPRmRKnxr3=5jDp7U7G)zF}kX` zkY6z$>W5S`D;qTW8UIS}07F-PTr@P}K-LwOdrNn^J|@3O;e`d`a5~ zjsU;|sQ#Am#k`coH9q#C-${WIG$*j!3joh{ohwB0k?r$b zi01KwZoa3WSV*|K>w^JQ@T(>m@AbO^t*1l8D*!HNte$yP zZ`V_=#iFE^8cQ*rP@wh@OR|lV6ZC!l3`X#(%%cKr2g%?iR6-nGJXr=J_h`MgG11nw z%z%oaykaoasg*=1V2zjSdktHqV56=^go;T8OqA$oJbx@8rlSg(fQTl6QEpSr*FTHJ zm$L`A1wdXNkru^RmlkQkIzQ3EK&AL-U4-nD7_%Jj;PQ@yQg(I4N3lTYtIugdKa*JcofN^o+Or9jvuL7&f1K zyQ6re57>C00{}hE#tS2~60h~Q>0Ix_2g}kVkv4F9%UCNqKgw%{2!E~C{Cpy6XO^gc z8)0GYS>fmc>%XYI@8Q<$EW5Xq)$Rp-0s7_Vm?4FC3z>b9p0D#uM4Yi{{h#?@ju&5y z3ei^ed&nNEI#NhS#5#vxI4g9z!0M?2o%3|Y+4JHOG9L~}Tc`RDn-y$S`4 zobZ_VW6Ta6wFn0>^_J~EuakOpY@Zmg@v#Cx1e*;-7xeWp^+yfzvIdP$2jO7Zz; zGCw7CweD3t!u9`F^Yv`Uweo{tzIKOACR;-)3|?2IZj_yh6O1_pxwP}UId-=AeNfhC zTIAd=kRAu3d58PGkoXFBO4()0m87BT5BMDlR@*1Bs4Y3UdfIa74Q-%7PXA`*O#i6)9iGm(*khhK|#C zD7^)Yo#KsAIi0N9(yK9m%|5%gEDbf@KfLmm?!Ogg*A zSXSVmQbhux9O|`sVAa+k4fqZG%tspCsa`JP{lecb0pqE^wjoc=S2~@=!AQhhe*-rv z+o}1**eLp3DF~Ee#L-&+wDt`m4<4vKlph{V&QsI?NHGn8di~{$08033`aqgO2R&8r z8FW!Od(n^CDDsAnM1!l*c=;%@Gg*6`!OEBT$B;G zYj*yn8X}NcoTA3iKt$CbHWOpE1^|w#2KeMf7Q}CB|IryI%IP5wiP_;>eUIA3zr46B z7nllN)d`6v3IjWEO&^zX4NRjp+$x`B4EJR_DcR5gU{ET&de!l`=AftVtJ;wR%g`i@ zZHuwyz3y5{$+ec@-%?-kS(1mGU`~h7Ho9Xac1CmmtbkW>m52%P=<7A;_Qkq^uhFK$ z-#?^nSNiLBSGi+mKD3`@ICFX$3#gT1?k{tc$=z?jnVt|i zUeJmx-~+)>3A+$8`w#Q_2KvS8<%9!pAu8&>9h=$qX{27>pM0p zUsz$s=gj+6dlDwiqiD}+Y9@D95kp{R5e6jzmM_U@JBsRWJXVZ%{*e zb)|Po=9I=?bN&Zc<)2K`AAGPN!2TIS;JYqEJ6|EQ1*f6{!9|9anw{{+D&2Z|-UQ>^ zW7M&f3q{oMnhL81Bb~v!^N>xCubA; zupO{R3$k^(I-CB-ZhfG1*7#u)t)*?A0?e^V!(QrzUU>AD?8o;}b*+atJ>cLlc`7%1 z$a<)ty{=VhaCgNjSl>Lw?A}`d&-0=CT6fHFBI25N744-Zi2qFhDFSJHsf~3Ss!PIAi|eNCMllFTSBD#CatbQlg^(-KB2%{mTU6bRwH01FLV8a6kE366SnI z6&fg{V*R#z$-~F@SSv5sqKXvGEd%yws{xrUqIFBoaN&+ZXG=jZm~IKyX&dh#aQo#$ z#TOo>I`0F~pkAr0qhh@w%J=JBnYJgRfM6&cS@QnEXIECiJnO3@*_lMc{@ECCZb`U* z8j>`;AZ%bEtNgC{h6jVv1_bDx37mium1{N}vR}ph*5VeQGA&tDXDt^>zVs+-X;AI8 z7x(e|hcm$iH$FyGU&PRo)%I0k5gQ@hu=DgoBWbi&lDv|>dk&-#LZG9XD82QAd@eAJ z7uM5#?yKoOEPl8#(g@@Ra)Pk$*e;tJv#h@iq{P%yJqHojTbTkAUgfm z${3V@91wKg{q)0ZCIPKk6$Fva9Iqn(HDki9%2R^BQ4!u-jQt20F-eO&9tJVYu4v?dGi}rLbjIlZz?Mz_3`Y7=>~w?+&6$CKy#>r}JPb86p|IoeBW7!Eph)y9NoRngoCl?27x;kqk7v4b*OqT%g(= zxu93YW4q-G+)~ppnGV_~)loVSBooHgh@8o19o&4BaaYJy81K&6UKTKIu(&cv-|w>s zNF=2#Jd}ZiV*aB^mpm0Tx;QAi0k!`F~1; zJj_>xxI#Cla)PJ7F%`KdvvzG4WE|hEA{30q^^l1i& z;uE80q$YJmM$jK1>HW1N%L8 z_p_+-QSEM8Koyq8Mbgd3m-mvyt+oa>&PA$1QTGBnMDE7PT_#)~oLQnxnlXxJI)8ck zf^Z{a{*Css!`2eOeC&8zqX|AaW5(`5_{%Pa1Cl@@SY+)lV0n)WLxFT^gh$56e!?wt$h#TE`pEp(WlTmPuZBc ze2bsn?(D}<85sz5PrWb9qv)F!98}at_L?Jjey~Uw)!@^Nx->NFaOWwGxfDtb2saEpV*2|Eem1SvE8r17VUTb*kngY_iTak{bTj zn~_(HPnRv@xPghT9$`<$Iz)U8pXAuTQuFar!xw`gDv)7e->m=YOH&SAth&4r(cOP2 z07lLOCr274GL4{^@ekQMdP48`%MEzR>DFXfQYw#KE$1gyn6HKJ zX&?|0PBw6qt*ihuzTNGJNfp|9&yh3*L#Y%buxcTR0nzZ z`sc9ys$0Sh=ryJs9XTlR{YuG0pB+}U_x}BS<)kBv#)O>)Z-@M7it3gVa{~f;?ilt_ za*}(*?>Xj4(Ut^AC-#tS2}m7*=kZNPzg{ePf>mG)6-#*qh>j9}X7v|U(!wVek4L(0 z)vNuPiwA1)WR>Wx5b4-WOO)ybFDCUsIwFU`Kebx>TMuF8L4$ynp}>X=gSYn8+&v#g zx&v!w-P$9u)JbSj8Q(cD>Vf5vA`&=1Qp6q~TwcKTa%#F}oXcR0qK&gc7>v%Vet|We z!5nywV-8>B4!q5voX%1+WIXtPiq6BI>i>`8pX*+(am^69xZInaTx5@n%=m-I zQeA}Ty6&}-(3MTf%7}{WEvaNBGlhhbsNe7X{R=#-{?CJdZ8)h2DzLzQr@g z)T58w&^Nz>%Z6Bqp+w4!8v!a-E$|LmV-@gQ5>z)LcTrTVx&-xQH%#l>zV`Sc6=C#i z;?(#+9&P+=fi2fCnwqsHlP3@|4C*w4t51&Hx(7#uTUQgcbQ!mD2DxEAid;HUf0tUX zcE2#7RwXh=W>dVbpMt`iU?Q^K_b6e3h4%$VtsnB6+IPkS?uM z+l&r5)t{9ZU$st3kw#I`q9wf!8#acC9or`2V3*|lN_teHN=Sk96TGQM{n!5eGIj=G zPnB<0g`kv2r*8?wYGE2bZYL@4ROSU`C$XqWzZvqXH5hkm_7_B2clFm{bv*1AJ9jyB_~S zS_Bvz9Fqj!JM^O}Fh{oX1}8!>+x_NK(9c`@4lCO8+GQ7u;<#r2*$hY%K)~ybk05Z+ zt*||F75|hY6T)Ws-u!Fs1DdEj8b;IwO{rSIN zE|tBs`rt=nscm+?^Nw+r%Z>e4x*}^H8$rxv<$#aI%=>FiPqZ(Nt;%C8?^UMKC&e}SEaWg^^@y2b){Sfu#zMPUT+ieJm{ zqTasE8@l_<$@cH|aa=E`&4XkJ6i1{U$+AbGihr0b8!{@lsy80w`@@V?M?7s}Eg`-v z1Y3SDPF?Jo?+Mx_xpnEFqMq|{ z;I1!xq4f0qR2)|{|FR#AUfsvxUHqOh55gwr{SuFz%8UsR{H&sZRqvLQBYXaFTn{M1 zVSOQgrmQ}lb1?GE^MaArJlA|?c;qvu&kYU>Q!= z8k=nTS}+XatNpr~mC*S$>nHrii)>Sartwbt){Iz8tR+HO%0)(J{@3wG4X%xB z7$P&~wq4>%ca5tX4bw+u&L(fZcmehahRmkg@A3O<{&gxrJw(@vp{xkKlBYy5-A zqGz`44b)Y72BI*^`w>!@%Ot{JY%W<{9Gwr5~Vpf)c z8PLZOtg6}#J9TGMUoVw3GTlwR!_utXkDH)>5dC1l^8{<#X1I-l(GY1!KD`Bieb8i~ z1zW5`Jx>s>~&=qFy`~f$_$?uOzVqUe|q&tY3}zi zE$8wnWI1TFK0k4Y*9fhmb2+KKnU-u>BieYf8%Xav9xpID>SIt(-mP<-% z>vO`NbztNwxiWOA?&Nk$d4PntoGx2fzd9WmfLldK2E;-20jAT>Cl0nmc9#eWZ#5DL z>=*vFI+u-oUvfpRT64P`Qi7xmZaP7ph5b<69IPmtRQrrlsg;NZ{S#0Ezlofnp$WA` zN~*Z)`F~JU&@#+OC@ru$Q8QoyI4IMtw8i}#>PE9v+4--BAbJ$^dHFD=HsuV|8 zZY&Xfiv`Uf%^~EP#*e{1Nhdu8LGs1F#S27q>$EC;imG_Q^{L9xTx&eZ3)DqH)gf{f ztz|BZ2V5*??cKox(m0k#LT-%5a{8hfAm3t_HR7or;=*OqT|-Gw+hTHM{igiOS3u7VTuv`0EN3`HK{@t6abT`3a*UMsQg)sHayP6M)tjsaMrRz= zWH}Vd_j&zs1qp&-b8vSAXU>)(&gkPX7ehFKgPPm7Z4t*NX}&5`{9$6z5>6vr%1Pzp zm!Y>%(ceQC9Ot2Zh|9M#pW!Y=s)L**%iUA&r^)Kp{GZA4|8202+KW?GPujZEkpx$N^;Dp zTexxq^;_w#cP0Mgj&s@i>-Zen;XmPUXJ&A5=13uz3ElLu{;_v|&&F z?Z*a76<~EjPuzWtuwKkiUIZZ*Fq{Rj zQRt#EawJV4ahMa7hd6p)o|m+@8%KlddDsurO!cJG)^q6Ytb?^$Vz7epN2*p?2C5J2*0sP`?|5kSh|SFsUIsY|bi+pAViCCNV3k$KH^ zgAurF4{W#|edSBbI>!E)`W}S-@@W279af}`hG=b}LO#(G;-*4PcaQq*Jd5(+@%fP9 z?ZY1$4H*pVG)Xvfc9 zdxQH=ORW1r>%td9C&dxcVcgq7a?k4O*YfAr<>@fw!s>W_7BQt=V9 z$R46gFat=G)n{QHn(dTsO-XUCvJQmA>6d?@3s*&?F>XAWO5NA(M9pp1#S!d4j?T4$ zd{8je`TeS5=8^G&m0(>^YbdQ^*GBD5Qbi(PoF5tvKWcK}+!GP>gB+|%Ke13+RWr(2 zWvFUFKI$D~-M{@{zi9oE>#Kniq(KZyhxCNKZHvHr=5_k24y3H{=be87u8dCwvqJzL z+r9a;oVaXF1^eQxzo-1Y`9wMyi2SnNcX~(+W9DiFHrXdM*r2&Lo`)0kUSSP1pw3+Gj8=ro27>&s*)hl~nRkOO8wh(mG);A@9 z2ak9gjzS)-ZxzZOuyp>)SsEPImfU9z7K@d{hXq^M1kGOQ23=(e0s8{FP!S-pD-aXL z(>l1mVKW-k?9cO@g9e)bu76&izsodBgRoF5RP}&PH(odgGPr+ct=ZUq@Q<~I!TD(m zLI{46$XGiV@2U5iv_3V34YkH~3Bvx9oZpoWRM0;sv+Ai(x3w(!L+2*w^SCwiqK8WM z|6Q~GI^g1M8>A|$x+oj~Mcxbh74~$7$&0c%?!jttriFT~Pd)`ZXGz!HbZNCd6-Lnf zVOzUukW)M5R=pYy0$UQ^oSR&%#dXlR;$MD~TLmlSbM(OFn{r(wlfb}#;3bruc`wW> zFlZA5xEj%)FUlC;?N3_<`9SXRRvG4gbP?{M!JFwrXb!1rb?JieJq`eQ=NdB2z1kYg zGA8xNXW?C!-uOthruqcjt?lT%gTr8GCe}0Yw_T00qvxfXrY8nKhpJyqZ`)XSBvrqK zb4vnqg|*pg&1b#@-N}BCan6=?L@7ZX>>v$c zpTOo#p+GpmxpdFLorsFj{6g30lqklTIuk-m^Uqk7aY@!p34n5^*(KyFad>{M7m}U0 z&<2D+Yyr?60`Uwhg1<*4!Vxk?%$ISepc`*UT|uDsx4l6Uo+#WH(Me+*u7usoY@#eP z+j4vy1nK$qD;JX38id;lnEl*{(mj@B=x7Rt4q>u*z+^et`R8rLS@cyA0^_9;_zQ?L zE2yF`H2phKv)Xu-Z1)kgg=*FlxM_&&+z2T{^4do->#<)os9D6>EXPJ^q&Dfx%*AC1iKaZ>E`1S0IHAVJe$k5NiKQ8t7RKyC4o?i3jb6O>R0TIut(?Pi&3Qy z^U&euNp|Kz_c>F1R)k-J3A61rptxmR2DqqZ_fv)+lu3y5Bj2olbVU4<;Exl@ z1c9f5jJ1OD;tuZ`R9C{^iV}GpdaR5GPbV*^hvzFEMJQ!)+vOAy^4zH)Vr;nAAH18_ zsh-Rejj)y#`@7`1-q}A%6uogbGDDQ35ykGgP*@o^X(%ZR7bU(dCrXdi-@eENJk~!* z!m%fYz34?9`Gg*S9EB|WHORYz5-e!RhiOqM{(r+(K{FEj9_$~9DC(!9bxW-ax>usarEYyq84?veDa43o> zCI8!A)7+6Hnd4Tfb4DQzvDLW2Zjc5$#@Jm+WiaXCnN-NoWe1?um1kjTlceAk&xd0v zjwgc$aCWZpHK1%JS7l8^s^!u&Ynlh$W3Q&H?JyfOFa|ZBCVMamxVTZt&$uTUi5dR$ z(=d?zlT>kK;_p+bO1!G_DOtSeBMoUx?AeEt*<+tX!BYA_ri#-9CZi090sfwtC!F10 zz$(!CcUEvt2%mO`QcX@2Um!BLbtQ&C8b@#Hk{FG=Ts%QJ+DvJ~pS`yWg{xp1C2XtG z=qX#%XMYJt9m!5)6I~_14w$&^pzUb7CC}4k?Zjy0Ct2*X;MIxcE-Z(0oWKghl9uxzwf3 z#Y}Ss;DWY`b}*rml59gac*x|!@cAGChr0-W^i)_1_6?e;NAz#~Fj zkREqkCj!nwLcxHSh?VH1gb_+=9P{UdG$6yv51v6Lt(4|P<_y>UUS^yfHrR*{xB?^$ z_^o5Gh{#fVTZnVK0zgXQWqf_DW;N+`a_s0+HMZ7>Cr7&73FsYd=3HHLuqvC1(ElJ1 zOiBtW@g6WYe97kjg#q8LvKY(_{lXyz`PcIuwS4Jq(V$?1z}&`WJUAA1o!RDD7esA; zsSesomLj)N_<|_zcZbB+iA8b}bfHOoJ1m@^3M@TQD>W2DO^`?G5?h+RS5$iRmYVXN zD2_y+ED;L93$0lG6ymzAi&1ikb@Sgz}<|vKeL(~V!-^L>TeTbqyvnAM1Pww@!SsY5Jun%5h;~3 z6d63bF`1K%iSFb_7J+ohong_uw$3f5P!+EtEdd{X0Kd4n$NQNXPKhP$pTmuEHra?u zBEg81{9Y+u4D)JxXrN6nDImaIwyDL(n$TUv4Kp!DL_WVUAx$*F@7G?=)knoA474|> z0jKy$g;ZHtEbiu44E}q54Y!Y-H_5xP6KJmnnGf#VZdHM{;j|jHROyrXCnFs~!9=o@ zbF025?W2Ih&2u?dRHJ2mSPBQ5LdoD2J@x=nFcfwv2Ng1z zI*Q(HjtmxYmULXEX1m^SU!ixN2O+PS(UdXpyV0Pv#J;2?nuBIc$Qh!!w%5R}=>Vbbjv+f0y{EhA9uaZw*i=8m;i|z!C z)76!aTpHx-RF7GWaqO!(8Ee+c4F9(M=r{w`S#S+o;)NxOE%vGO%}bZMwRHLkU}$Ph zFBMnW%Ua(Wl}yjYG}L$ABdP<3Br~uOB{o4?z-v=$TR{xe;m1_$#18;pPOeoTqRazJ zF&a3h$OU>v1=;7p7Vb%(Pf(>ZHbJ%5ph{_R)o9-n1dlk$VIjWn@^tXl;;v zy;~T|0C=0SUOR*3>eb=vf1&lBaB4~^%rRRSycAepW5WK9q$H?T!3^&Tn6S12-zJYf zEnUT6@DFn)-|M-dMN&+()fTAa5b3UgCoxv+SJwg|ryH@2`-z)?YfP6FlI}AnVwlB} zG`^Hs5|A3?4omo2^m5)9vIb1ccw{(7*y^LS%n)$_6p zdtqB3c!e^pFwg4w9z?rDjyh9ku1DNw6T9P{a*#`uF0G$?jWal;wh8v?hYHDhzILk| z#Dagv|DeW73hY@ndU;}Mx80CZaaQDY1_AsHSR0w6c&how!z_y5Rs5Sr%hJgZsQdU8 zk;>gcLLU#dOqM?jc{7g9+I^mB)K5p`uTaA_vQB5=R;*2j?)ft7Htgtt_U<6qVSU^aqJ&=jUp@WD*= zU3%k*3C*Q3Jia2LUf`{xQbqC{X7n0kvjs~1fSvA;ZU)8cs9@5fl_Na<@26Nu)^Mzv z@$WPb-RpMw$I~lyAZ#t^>HkshDm#g2010K+aCe~Om5(m5e|S%nQFthV%~9>dV#_7( zcGQ)=WGuCGTuV3N615eS zA_KDn=Of}s@OkNl7ht5RyQVqRo2Ov3{_QvLF`Xafl5Sp`CoPu+#8ko5jp*eVH2q*N zBS^FF&(ATQj$*q}V={!x7{Q}t?yi9=D#O*HxB6H%hRt~LJEt={bA1*PW9-~8RfEj; zez8sGo8rQ^=?$w>jDphIq*oJ|_y99Dd*}^f?n&*yIarFjERH3s9ph$6U+&xdMwCnP zQ^;yw{MehfX_eD{=6wU>L$RC$6zsl9GfV>&YdKLRd4fTmGmSM%{sY;g`I&k$h-BO8u>(3m` zI(0)b^_1+yD_QnY0>`M7i`*WAM#twWyL=|L$xG4wwU9`}b2fu_ac;?90$Po&$Ummg z=zk70rYU2^4c-X_A$fK^JE3#V6@)0M6|Ryhz>E>Ie$&;b>zg%M-B3+BqzNBX(un~G zHuZl{_cQvgxccd7@UYTboHMoq34#uPcn6g1F~sI*1o$E<75U1urjJ{XN$N+}+Z9z^_jADEP0khe(=3mX)H!8KyzutG0zEdP+ zIB{wM(QAW^h}pDwPt7Fch4%-?&=_4dEp=v#RwBwWROb{5JVlA;y!bZEpY0VwnF(oA)}l z3JxsljrE9YuKmJeWe7t~OnQ$S8Why=d^qaId8&@bM!zoCtru^x`MpHsZ+gDn#isV7 zVCufe9%&tPFt9!aH2v@7cPxEy;o$q<2q(l$RYjuF%TEGMyRX*!9$Ng@y2VsGVAmzC zZQhi*ZFSUm(9Z4(j1bD}nc7cHvO>C+sh!~Amkf^lyQIy0BBB6%3zI}%J1t9V)o&kO zD3WLbcW@-&g1BA4rdtv^9&5Uc)xHd@*zp~8vjhFZ`t4iRtKltJ;UOaWWccysSpQpE zvEe;QS93rqg3TIS(k)olh^X>7EUZ&VS|b7h&)YMJu0*0%AS}#+ADA3*y-kS}BxcVV z!linr#={FiG{}iidH!xeMCTd>YuFHakfI6}(PklSSs}ubmfeV(O_d4$_A=?MgGj)m z(#4+}`5<#sAL+M2RH;nKBjFaeS5OguPC?DS4hv|pHEJ^uS-$DNZkhG&Jk-Hu?bDcd zs>U42-!I2zBIQQd*yh|np}i)cA5CG#5;Kr0ze=x~baK&LGR&mFGY!hJFjPLxW&|&~ zF7&J{=vh#COYnJdr1rcb%y!nJYthZo^MUlfrj(i3+U3p1V__mVIDtb?eiZTHeB4T) z_qbJ&)RcpxgLrWljh>Nrjt4GN#$W;cs%SPVaGpvp4W==Q`rI>d=Q|pk7Rot6f5hAG zDYUHyhyf=faxY$VfwdICXR0;Yt~{v8lw%_EjqlNz`ngQXta8*c=c`}$Ka;PHj|O}6 zEqy4$iN;Wih9g@EHYe=)EjEYnw9If;vbL-p}M2LiS4; zf1}#;tuz|^?&RkKkmrO|V~F};vY5ne)L<<2!1+@?L|l|Ty2gcCsnST(ySAz;>h~(z z9QxKw6nek~iXScDd}W2)z3_p#A_5Yw2z}2_dFCwV6NRQi|Z(kV8r&8$oXZwM~)ci4YKG~waN{o^r z;YJz#1Gp;BTcfwi3kfK1{n>9@rq2)GownJ18G{53jTf{yO@a}Nc(m<6KNqYIX`aUo z6FBMZw-ZUFnpu8bf=PM|4^!6=8@*I=V#Qz(72pY?RQ1?pKZxCR9~yxop$kG5Ji(V1JeC0& zu?i%{mi-?Yc|+WvQ@&aEyjEy@BzYLVHA9qcJ%My9u+t?xO>`uY)$o`7xFj# zhGWsda9@3~W%ohesfgxT3)!DpGm~m2e74Q7GWGJc#vz|oUKUIjtv zU*m={P8n^xW7jVAcve-3+-dhz@lcwe^F0|%LuJocgxl#hg;`)gS5YnZUI6_@S5dkZ=~?_xwD< zOWQh1$h;q2X?n|n<*Ka`e}>Jb6uZ(Bl=#ar4{6 zG^kw$W0V=p#*DCY|L`(E{hC*%U3d%2Av1J;+jB2@!YDMcU+Qhp zcd^q!a}rx${a{J=)aTMua7nHKiug)1+HpTPjtf7?I7lT4h6S9-(x5O7Wc4<#o4~#{yE`l**Pgi*;JqMe#tT=GKQ)HG!xI@dj)Dn@Kb+54dyd`@_%=%v0v-@6l z4_I5mRmX3EzLDM@7IoeSTte+1=qlK@^>h?ZBNt`}YecCb@AGFud?Wv%=?pilJd|;~ zm`o-<8Vg>@Q&N0H=P-Q3x{to&lh!52P>KQ|g^5!48YrDyP2VECQn?I}|C~7AY7r+8 z!r#1gFm6Vg?9i`rN;#LwD?}A$aRL!oJjKbxcX6P{3#hW~m z+8u5uRX`FCM`YkCf#li-|4jMsN}tW;XIds_!_YNt#2H)Iz|Wf6mzU%Png;Mab{WgP zkexC@w>INpmC&Q6t%lNeH!wjKJ;+dmot$;ZqZ#7Vc;!9nR!?ZqOquLfl;&W-jv;gt zl_H>nAeZMHHzU1!0@aJZ-V&OEUGkU}RXuHksgus%cxRzd$$teMj4H@6sHgBjq=T%U?PVOZ#+GX5uznErfIn%rlD-#g*`p}#E|V(GkQ zPwf<3x80dhY-ljz6md~lTq9x&i}vKDn!BYp6nVV_IQsABX>g(Ama$C9S{cv? z3-C_T%O$nJ>-#|N=HigXXdk@l*pSCWNrLnKFVy17jvuUlI92%O&07?Azxr0A+F+5p zBNB-*XZwM!&^*~Q7SF&Eni)Q7YI;V5Y%eX1^3@Oqqn1axGHXATaL!vxXM(N4RM`>><2;k*E!@e%{8uZPb@x z$l)D(_9Ct=vhQTE81YoAork(?=-iw3 zfX^0jLNQ?5fJr}8K}VF=K??Bz&I3SPHEpdpsH+}_N>mWGiSm?v?W61>v>DFLOn7JQhJ1A4SxPuG`G_ieY zzyM%)e=F(cU5fZBz;r61p&NQp2st(SBFZfb(U&*UW zZChq-c5K6AyJ2`Bv`LKcOT{enpb1SSGD8Fv$Sof#P{G$_*SYt7U$scS$ozhnV=;Z; zBMo8ooHu=_(I7n2-`>^+)NB^x^)q!x zv}5M}5k(~1Mx@; zz&v7f3ukPxhp){))LAH*0-*$zhECR5L*PevEHHff$1Vjn>T^l1Eeu4(*tAbU&#-bq zZ*C956Qr#7)Q)>C;v#9x#DTqFsgoHGYA>gDHsqSK-E^5i7@v}%{wtP8f*utu@y zNIz94c7bg;>0Nb2q>`3_s4H;M&(ctWKK`r4<=8iwXTF~h zH@BE1DoowAek;e)nr*wiu>ao9)de21yh~#e7ZnkHP!t=q8KBGa{$q(cHG6qhjEF^{ zo+!Lc(EO?@`zZYei`)vZ5zF@-7)aQn3TM-uQ?9s_%Dj?81B6UPWlBEin5*W_AXaFa zsbNJS8Z5zr8y3b@-{+ktrOd|`qF~jNu(qWYV(DD~l2dG~+mH8FsXDLG`vAkXEU#O! z)`j^q!BAUT>@KjE*Ke)1XfKgBcWGGKs5O`<1hTr8BhbJMpOBeTtMGzaM7jiEm(D(uhP~L40sBiR~?w&6h3|oTs*_+xrTRB z9a9<8J{44RUT?NW^5US*MAbH)0PX3_c1XAyf43fHln>rxQ>TJ}8X~o$SWE|vU*-u= zWZ}!Mv_~-fv821Kn=W%*$13q48aY$Y(TlQ{uw91sz>DrD9+ zj6#7ezl(f_mh;oGj~Jg4%la9FMNZzbCMaFaiJLC*7RM}d9^1BzS$U-1^yg8r`u2^$ ze@p5|{Y&?D{xNz#C+CDlymlNM9MvvH$%7(@t*in&evuF*w@qscVN*-`cAYy2y{!26 z*#le=E^!Sf#-afpIXY=rOifPXU(ThtI|HA%O2?v^EyRet1pPtVzMm>pF_D&#_|vyD zM1EAokxn8TU+DS&L5hU-+uyrZ&=Yi3_Ly~qvdVWeyM-s2nAhvUpc{-;{#L_?NZ!rQ z{kUJMOOHpM25ge5MJ!#EK>trERjV_M$>&p1hs`#B&uOZ8?88{-iewnc0C1Ed&n^&_ zwbf2RL8Vv3`u_2A{4lL!;I_d)>Z>4eV|aCqWli?xaoc#^meqLP#|%-;*%k2?7Z?FL zE4pNj6==YZ8J6K5@Z!>0RBaZBXLRD(3tq{~&2&!f6%ZcFeG&s(!&(H)vd%O2D}bh+ z!ETdz85zG70o@hnqf<}VV_e)-70L;rt{Tn|Sd;}pE+raA+6-AWQ3|=pb5ogpO;uGK zJQTr;nP#=Bx817cD~f5z(cpCSo&Gfm2BXEON{l~!+UGld50G%6d?#DW60c^v zd7af8bqtA$NhPZ#L<6TAcE(Bn`?PJ?nmvl?(2$uPQu~C+7TE2~WH(8%X;oU9hkL)% z34od=`*7^7*L1# zC)*E>Ptpp#9Q7_R50%B{tnbknI|hs;coZh2SA2i)$g6`}IR{%SE(16P*1UWJIIb^F zO;iTv_A!nb20*8M^gjtHftL-B(~U`IHWN9lwzUKubV@+q{~&0g3zQ~ZU^rdM#tkE? zfa3KYd~2Fq`%9P1@Bu*Rn-Io-T8&frIuC5?GX5M3pIwEf#S=~Ulm`-wn0#DhyT>G8 z-B)v74Q3mqB+Ib*?^Na5oq5CH_<|mi(OGGa`?1#737hcDHx`MGYh-hHbvbwnrW#-3 zAC1ew(FHiu379^=gQ8#eSEVsG4`dpjIxfwctt$_>C*8w%!NB=U(Z7ADvEAuwJ$?;S zopjxk?tVlD&q&>UChI6M3qWqE5JNSvR=o&qDL7TogMYz!F$%?gD;5g;QfvkLdfj&B z(nxMPc7UmwTHiHrd56YwuWmp{{PwD2vO}zG6ccwAHM2)<=+E^usOknk7F3s1{(1X& z`MP{&o1^=gv9||d__x6N;eF+L|7|)C%GPqR%|u#}gGR09XXNQ=x`o6AqJV!zGU3g@ zg6rFYh;fths?>yLgYfuIwUA?Cg!M|*YyzK9OqFHBVG>~xQU&@ET|%9SK#5WSW67tG zR|^_+F};wUj110EdHb^56N||QVnMZZJj155vx3Ag5_APJOE!5sr7I_<-`6hcglN2H z<~lcqwAP0V-u9{L_GCN{?Im0-(z$)RZDo-J>YdoHX^UgDPg^c?o2~bsf^jZrxr4q| z-t98~Q~VxSS)MzZ?WfW&FqPdX2|%p__22I!<^fV19XhKSz++l@&1$y})w)DHZPYTR zyQNdaxV-+fg0%sNehxJ}9Wm-NI48LlFDqgsiy)k8Z` zK#$Ca9CK>DZJ36t!ye1Co^w@CnipCLEGS0b1SWLy_lFCzy<1HYr7-hLA>b`3j8P}d zPW|jXk8fe&b7N51ATYK2DEmfPN=4w7RiOzwjwgI)hYT^)KeqHb5Zm(87ZAOk@O^_D zW=uIZEr#EgLHH7@wu{gTumKEs4U@=;1z>lp#4(L4*ip9)wB+hkra`+mn;-_(-2~%t zOKlUEdR6f%e%v)~=$&o}{~XfX|Iww9lm03oSar{nvrj9L^y?A9@U$V{7qz1aI6G2m zNEw}JVd8^S4Nei1fK5Zna1B_NQ3?Csxu1f!q@XNuzS&Hs!L8nNdHnK>*uO;zAUwGk zd?xFmNnoJHc!xnW1_kc(76{JxA}MkENVqt(T-gh$8W>E<#rkAt+r#oWAo{fRT^i&3 zwDR@`i5IMD%5$Z7VY&T{?*z7M>S-&!Ph9}x?lhBiSW0610`cVVbe9g!Q)nt9Z?`l2 zi}@{JlMT;Ymy!p%dwD8bBvL#IT%UTi!Uqoi?RP9p-AHjH? zKE7YTAk?}Q_JOPfR_T^om!=Uvweq-QZhC4N%`ki>unz8-$C4TRy`)cf$92kCDx^Vi z!Izn%EyEc9`mrIZE9stMfd;BINxVa3Z-Q119?jpax6IRxaZ|eisM_yUlwd$IY#~L&iP=t(!OKje|A59E`l`uo;U5TJEOnNeTrog4OISZA()H zV5==6O~wy~(h60$N03u82)A8}Pc1v$-gvlg9k? zDvO&&N!ZP60do|CLx#nV7EVx+fIH9fzLu}mPadJlQ!1P9)QSf|OTp@gu_3XkykNM7 zPockD(U<|cBL)Fp9I30BipJhSCBN&7Zd8wiJ$P_VHg7ntq~c~1OpWfYh^uh0!gt5L zx+@7Q5oV=i#seBF3%BnxtB<>ef;H+45;Jmli9K+#!vsuOX=f-ZtQ994Q+hX|IE3X% zEJ)hxo{Lvj&ot_rvu3G%AbREMyn2WQ;1_A^oqv2j6xLL~Ub$gb_rjQSIf5Tr_FHBk za*Ex4)Rw}QQcXfn?VcBJQ;_BmzuZOwhF8&`<=8k{&@s}4tTf-OG6>wy8^CT9J20|_ zp>ajSzu61EQD$P??b@BX^$bK^Qgik13DhZ3FOvEe=JlurQ8Z=&A>-k8@4aqLB9c~a z^fK^~!;+7{OCRwv$nk#M@2Ps1t+5-DVo4;tF7#QR^lD+;1ah|D6AS8Gze zVU$!{{)0|vz6>-tM}OaVV1GI7DL;GlRpDz)Ds5R@)Y@&}BEF;+&mtX{|Cs z$CVwx->ZObS$!hQlx{dujgPl4>EXp0cANE&ZY&V73m4(RNP!1aZ=zti=cJ}@%7I_g z|Dc7p;hcjDus=TanFwR=Zq&&y*DVpfJn`1CAyVJ7o*S35^u2VP{ z`qnX|gM?Q3!pd03@ zQs*(f-!cF@KzKDtTZT7x7CtM^!9(ZlLmJFw+*{MDH{2RLfOEVbixhB&J2z zHyGj6b)dquKl>u1ltk9o?6z2u?~99qM$>k9eSMk12h&2vO*FN@K)cSM@-YrX6KR zU5K>Aw8}hq8tw4X}LL9lb|OD1p5&`gSR1_ zp`C(e=i44gr4djT01e6%hPr6pcAANApF8uub@Us2-Et^cUA1U73fy;5GGI8lQ=LnF z*~g~T$aXtw6f%0hv1^p*mGUHp`{*_cFCzpT`u4LJ`fN#1-SIEy)Zq?vD;PLY13F?xTt((T5)oP(*ye1fsbL*GfUtDS=^Ma;Ik@Bt7pu-wII;eQgvv zsdP)N@1sBpiXL}Ae!9k6fEmt~f}WtSG?*7#z$PgeT_Wx?)Xa8dezh_Bc2wR8+GI{_ z=@+J<+>PJ>kZ1JLI*x{NV@r8YR45V=!@1Ou+0lvLtnaT#A;NFGNoEH2WnRx`Ii0|_ z6vbamhhI`A?SG@(h^DbP%iVo-Odvtw?^h^k#fSR-{58Gwl1AYGhmSBB z1BEW(feAW=whSL-8t%tJw)>Nu&iOhv3yU(y;XVZW6H~xrL(ZX-syR#{vg?42I!i>W zzbNalECJbC8AyeiqJkQjexCv8OGQKnH9~!0`J0K7A#?Xo)-Zi5j7Q&}Xn zJWq6?!MXTjI|K8?0zPFQt{?6l;uGJhI^-+z;tq{9UH(v(!@y?PEhar;A|@0_Vtv=A)S56z#La(^JhfoH8d`x)>pR*$E1lXw6mmao=qPwTyY|YIQFA z@h3_0rP@yjOT@pBXK%SIxl7oyg_o*#59MpyB4rs9|UkHTxlliHRO8&ozoHd2$_M z*RVwBtx_-N4vJTpPF+mO{mkgWAm0f~B%osWJP~19*^j>-fmi7c8L%?vPO^2YpxN?D z5FoF@J8?4@xP)5n{>y0$MZlbRJP!qgG_)6b zk!{uw%E*)dImQlFG4=HKFvQr^8~$bg^yWZP8P2bLwnA4DY11kk{+5Br@i4nGv=5)y z|3Z;V1yV8qBq9a$iCO+mL2c&nB4zo^Qn$YnbjG+@j81w@pT%C8wH{}t4$kWoWGsI% zL0;ilv9dS$<~PoG3iurQ`U9Sz^Gw?*8g}0~1PYNyf?t5Gm)|afTy*%{4as2np;CB1 zCX&X)L~o2_-T07ZxGLY1I14X>^mezcx5&%br>o|0F5x^LYzPzSQA|K?xHie2BP5>AMJcWmK6lUZ?B z))-W-=F}5dIHY>A`n~B2MS~ssRp-Z8A4w@mSD;j`sFMt;{rZm0$IhQ8S}SX-fiT)2_SVvM_z z00mb|(D~6U6cP(RE(=-&+_^{pOTPK-5SbNnvQ?JJJWmL8ONf2D94LbmMh)6xFvVzE|CeD2Eleq?mwgrRzGqm;uOz4;tR0|fF@H8 zj0y=x2llq7%ZRFjr(1g=vzHQ{AF-LRIc{H#U)8%`qv_pA68$0a63~UnT#?WyTpDe~ zfQ==ssRiM5_<V-~h>!V|TtIf&%}E1I?y>^p~H;Y+@^!ldxKi@oKftTo%dL0R^W-8P^Nw zkUK-${^~;S%+Sux+c_6byy5=4OyIhYqE=CLga7et*_b#flI>Jn*h49PFe}}KewF`0 z_B!PnGT&u|QEguT4R>es8+aVejhWT1zmv$BTv!Sz^A08bSDzHEH%5IfU+NhiD;$vQ zXts5b2knXmRU!>Q)}|-&yUcj7ph>3u%Y;mO<%_=VvnWFraw#)Zuxa&D`D=&{`joT1 zK`W>6fyHl2#IKzgKe}emIisJ|_P+>b*r8d8g!&K5{Bsr26fW9}_G6oLPUZQH8p#D;mQ z?h}Dkw`ssBch}#Y#z489{D1xH&fk>@5XfsVg4YP`L(RsxQ^fSK3+exZwkHRwPut&5 zs8$|&wV((SaT9AM6@|DwZT85wwYL$;@} z1_E9Qbdy4iTY)xGuo8A4%fUq9mE3J0FXgoU{i1z#o{otAs{CYK^&8bZk_#q)3I~IB zZP<@skwJS633!_9r2Z6(m`3BX3>C0BN41On#w}AVgXHuxJAbNf5CjsIly{$efZ;mbznJM%%KEsmi|G8>BXH2>{yCE! zLF89z1S)H)Z)V4l$otS;K4r^lk}pH*9e;240VMp;l0KYSOOLa4g-(rXs(;>}gId(- z3)U$*^Uv`lA;8{DwY$hTr)-l3ITV_XH!--WD6XrI&u@WJQ+%-MXMl=R;85qcSQ!OimrjYZbzn#>T)wWJ`jyiz!%PP!#B^uzHI zQ6zn$nUB}CxN-7vpOVpq{$1*>oa!i~gXLyx=WY;+3f(RG8DO`&JS59BIIsoCog%e+ z1y`*VEML=|PfIIzt%%YGEzb8Y$dE1c7tfjIIbu%}U4S-xA%$!|5PaO?>i>U=&O4s! z?~mi3d#}BB5IbqSTNP*D8`V?A;tMWT z>WH|)$6g~!*R{7rHEJ~f_5>rICV+U!J(WsWo~QrrQFRUj!oPw_BUjxql;$&OR(l+- zDJrfM_8NpWvQv^pF}PZF=iz#)OIKd1lS6C%7Lb!3qiZy>lCPYBd8+qq+ukplF6xs_ zd4As$98j`->9#qbrn8BZEGrr@H9!k&Jx;FjBU#U+{1}7*Xn!H4} zjXR|Pou0ju6@M&pKFQ47a-yp7 zTssVaeil>a=Sc^xX?0e9onpA?n4+x&;$m!Oxv6qbBM5mNeDz^WjTZ(UtH3Dt+}%n= zqA-0T{lDH$Je}(zp!0i4#;dc?MPr)FA4AyqS)yyvzRPq5BvH|6nz+*WCf72lvEiAB zABgf7chOcy7P%q3m8JRlPnB_;U%tMukX5f@>S0|DGJ|#ekqiE_RDPBq1^+=k=J&uF znGOp{2Ny`TI$0A-m)J+~V&OWk3yI$zqF{@eT`-B*Oa)qn68SKcZmC9|YW(Z|>%Hrzp?ZFQW7=lkN$4N<=U=m;t}EAH?bMv4UC zlXxh+OL1(o&_q{49b@lCP7qOH#P+4|^e;c5=Q2>nE01D5x$ef^$KsTVZ}|Apf#i0) zsMahue(iNN21&0)!b@hc(L&gd9`TFqWhsvK1182|cu=$i-H2_b()K~H)>@m2nv(~$ zD4>p`qZT;^U03%{l?3H8u_+F$iF)Jdw3N&6ah|iWy4U|sQhmx_;5l8C_QUYHr_B~) zv=HT?0k8(+T?4jt_6_p~Xuq2S`ydZRmp2!BBa*+Sh0X?~*QhXdb+7L5L!H^FJ^$>d zQZU!TT(mHSNy4`35pTb+=_X!$(Bx8~P<_~}KHs^VrR*9Wa}eMn9UAh@L)q>TydA?=$+V%{+cmOx&#rn@)Rs?Y55Cm*mCubzgA z=jdp{UBmW%RLgfMv1_LVz-?*pp|N#XuqXc{3`F~No5?L zz)XPMQnRXt*S2-4UYp*h6QHQiagt;D)=C~r?XL%tLLa{4fe6(6s}x9SzPEl=E?O6& z-2s#qg%_`quuB`PY}9^1S^?H%Fb7+SiOb`;Ph2H=+P|Q172*7=KrXy6Nfzf|l&o24 z^PUsmy;kH8;?JmfX%?o6{cC%~U$h7cP<0H$|hX4hDz0Ho# zhn+NykH|4^V|g5QYAVh5Qvw5(wn@jZ#(8inUVZy5h0n0;yY+HKcuMppUis|98gdjV zM>L6HhcwelHaQp$z2Bz4ueJ6N(!1&M)iRqr-e1nb;6}gGusdT6gY#j^Th(Wx1S{Rco- z($%goveKya>uo~KLs3?`U;AoVcpG32_kUoItuaF$KTXu8Z@ZY4o|QqTPiZ=$GQ{_Z zsaaQ~8yeWj)aQ;AS}|ExFmf-#L)sgm?QY&G59 z1dy};gG`;!p`jq+#$|n1KEn^fe5~+Xu^))NT0W)%yZE`))R>Dv(L46fy>( zDI0UhNUp%%W1T)PSg-8wmpbQEo^>%#eQ6)N7S}^A^cS>+;qT!kXOozD+5?kR zhM9k1k|a zGPr0xWz}(7ur-FKL`rn$;@FHL>MWG;JRHeOPO+=+mDl}!_#8lUtkbbit{~skZ)_j z$N}j5S_lyNlMCU~xH)xJ(NvX`6naDCMQg}C9hXUK9K%4ga4XF2a$se4`WV_~_|W6c zv4c+Fq$1--JeM4E#iu&K9}SazsI%R)MUg*16KnpJ+k;=@mj`Lp>nzfjv$m3MOv6U& z1BF~Vw;s9>`E0*LhkS{^v9j3_;SpzFpxkW2l=uW_tf^9(b2zaYIs?d zl1f&bDnnAhtI!71i7E7a(7Bc+ay~^?^^=6(HG6l;H0`HB%vX+ni+Q#F{TgSEtj*j`{ zh?FJw?+r>KXqt?2(wL7rsL#|lFzhoG3uQ!J-*yS^BQNl=vCZhABf$ivNp|rGf0(=H z{I&5_!4^w*`m+GI$K#??qI@D}pUG&OOH1I7O|I}U0gR#3@Ef5Z=$?b@imKl2k07(6+GlF3)%ZD| zAr!2m*;=7bHSgvApd8tS9dn!>UQor2L`q*s7Q3y}IRQDf4J7jOx#VJ_9IP>4!FRb` z$y#$7v6&L9hd(|b*>NoX3>H8jg=_F)8Mn#$p>7a{Ol!n(!2zB*RzAwwz2;^PmXWZKlfpP=A z(vfVJlVQXC z^1Od}FxYJm?lIxsxP%}Y1qCmDrdlBrv@Knp3$u9g1`(0)Lb;?sg|kVTNEgl?1WnBD z8fePdD;(^vg@j@ztg5Sst{fBjxzPnxTcDezggew3r^-Sv@|AN^W@pl-FtXJfEn$5t zqmxSWT0+DxCR%9PY=biUM|eShLCcQIK2wK+*QmV!PJOQ-+J3Zv*KINb7h~D z-GBkn%p3P2X>!qWaK^1<3vSCoH>{~R)lG$i2Xxq2%)aR-R> zzQt~^5Dm#TZuslV41OrQ%K?=(<#AylnpCDtaa0fphplb_i^LleN+6WlA(Cw%>g4s~ zLlY<;j#^i-I3qky$$Eg>3pA4*!e{Ya^p?k zy)FB9xwE!rCLLxvr5}M~SvhAwsu2?+iD6v17%Xf1J=X{_h^2 zhl*%Pvv@VmcQe&a^k^v%cQ@X;`BkWj?O$udhMpsfjSp}}^Un7&lO{uBt0eiCJ6Y#g z0w7T}$1QA_IUOVaFbY)yp;@XCNTfecx1DV=

    *9Yqx`;s!owpib(@8Q@(UsuEeb6)968#+VS3H>L@km&{@6DsaE9@yHN zeE+R(cMN@-f1_mx(3}08$pJi^8#|KUCsZ28$rz7%6bfj4f%7}H*V(%(pu>@$^}%j6 z_WSiCf;_M#K*aoF&bPa=jN@=WJ6_p+I>I=I+iOsB4YUZL(&Yajo_$SnIahgg8>M3M zDnB%#4nE5gpU1nyxd#M1g+;h~OS|}*44+-C^ERjC%OKhQ8y($EJhmH4- z?lwh>one+>YPglrWSh{uDH{c*BCdARDbykl>?U8@@7I&o$iExfdc59Ctgk~*VV<)v z$DwD2>Z?ukLcd;0H~*jM)LW+wKwNYgtW7M?(uW-F`u11D@7OH$RnR+B=;6@rNlSK@ zw&uZ09am=Snf$?nDUf#O+ObF!Kq3lWDlM|gnkWuO?2CXELxJnG9$KpiZZbhAo9$$v zp2a2s_vt{L#GLvI%G%p|U8F+!oF}GJ)sT@&R`6QFTtvIR7Fo;Q{8`Ao)AGHgmD4^? zAv2Z<`NaxYengV)L+h`c?pRMhfRIocj2-r7pQ-5}A$(VTNxy92$_iVS>mqRsI%YXh zX2=K}ja`*3I!LvFVDs*wo=O;(sXpCu4~TBFSr!JvXjOA^DFy%k1V4U}L#7G*2f0YD zh#275Gjv{g4(j$)rP-JloXXVVU&}>Bj|vI_FrWJ+ISq#BQww2}GB0Ougm@7Y`@HR@ z!Fuw^g=xcZxFr7+{2r&0x89gPiVlCK6K^5>D-A&io0~0A5*!oTB$C z>-!%B8$->$=(Hs~f`t*H>m7#7ffr~0V}4;0he99$v52#PV<@1)BI$39Qu+1T?@{M2 zkebpiDYEZ^tS_}{tg&fDohb&jZ5XxBYm|LkW`H9OC@#vzKZPx&Q*sbdIzQ_xE?D*H z_K!;GlszwnTw(~l%NNBc{?!)OXZPJ ze8j7WGd3|GLuLG!BDka169V95+DKYQMdx+l!%IQ)9>tYbO~W@Y^S}b&g-TiEhGOB> z*#=H8t}ce;WN>^v2wB+>)(Y6CUk+JvR6C>BCI)jCZHe4k!J|z#Je~6B1|3hkU&c)8 zgvUrEVr>!RNI8R5Jmb`ln$IQV8(wPsY+a+z-yDGBjf2s+SQV7lY7lxU& zH4LbZwo4AJ>#U182*7e84kzZVVQZn8zJxcfUxsBykB zIe)w8fS*OpSm~6I5{Q4Ji`Nl?<%FUM3+2259w(1_!RDo8p#CCnS6C*wo(#QSxhHB+ zI z*xF|NX;>M;C3*1dA~I*_ON$Eoj`XfI)<3f3fS5p815 zahtw6xr6cr{qZ2|cV5s(VnHhO;hW4O#6z>_)62^Zb(e~=D|N5iLb)_EZ33oC-b5H- z+ukU=knerBGc8e3J7*0C_`2040i%tEP69wYql@ZEgO;8b7_~AX-*?i&)T6Id zG5&aww-0ZJUE?vfQB`p$k4NTg0P^SlOsqTJUtC|8DgJAf*T@3>M=7{$v01= z{&b^H{@dj$Z>Ra27w1%9a&~g$f5L}tv(ri{$$`GaaCQ(}6Ph+)+@C9YPl&Dd6!P`E zlU)nDTLUhc9{9PK<#kb?XJN0aEvw>R`wK-({h-3&`Ti_8ZG>>(gIhO7E<6P_Ej4Lo zNYg~~tup1z3y&J~lZL~7cH7bx3&~HF^hI;FnH)CAs=700_arG(rlA>sf#u;op#Yi; zqNitB>^6>B?t#~W|8e37s?Es{H-?h(`@$bO3$35|a2K=VsWI|J`YwET5Lp(Li(_~d zve<_>wmjd4VfJ71n%mQdrciN8*z*B#ffdtJtU4q3T5In#Rp zG&e>n9mczpnW!O}c{PQrk|~tR#tNa}f-t7i&Ksgn&b13>G3(tz0i&FWld{5eWz8z9F*?fs*vt;(cTOH9`*9G8w|K;YJE4#v zFsP=l$NVzL$i*V~K(I&jnhK1oFp|~@5*c8R%kOWrzjYca z&?x@rtgC>@J+CZuR3!SXCvke3*0sct)|SsQC-3OD!^6evj~eu_cy_xI$d|NX3@}N{ z>a_wijT@nqlispCh${+m#4poHWjHUpPLxg%=|E2KN*!@E#I~QH$#mWBe#HXaTYB;= zq*7)XB#P8vSONq&NzUWxGaI_0EW$5ptjf&A+e6o)f$PP$iSZ0c$JPRg+2R#y;?&ZF z%>MMZ+?b}#MUxew`^H1L)3cxG=Z75HzUln{njFDCoZ*A9vc2j@36xeT!1H%_oz}`d zXzZJMB%>91sjHutW<8q95Xp^)8;wM8 z^kUu%A4AZo@PCCXW#PQ|Xs=xS!Q9q4ZAwOf+sU-JC)3vKH0|v$#`j{&$z@M%f>h1U z_=YmKgewDs1Lsm}g=KG*G_WiGehn{0)qoxyv-^-WoQDY8r zj}NJ|-C5^m$&JV1#;_e1QKL5ek_9QT2Qd{QgX&UG~bh;Km|Ftr`!!ai>k+?9N0XX``6GkF$C!cKK?M$ap;sc)Xe{^^ar zZ_B^6h&9Pqj|3L$i1?P{SdklH!buM-j0J{X=g8*n*fT6l~EdhRw*|&^L+{OlP5A~fyuNa6ep!~EU^}L{#N@m`onHb0{0=19ibs>PfNIDi`hZr=w}mC@r^+4XB3~}Ep7qsf#Mk$5$8zEGEW6u79*$dM zod)uwDT${-oj+$n`JKUC)n`7bi=AlUt zsA9b<%aTJP_Okr!+qQaQ*vSyhNm2X_qLOvi-#!yffn>tP%aZ+fB>;YB(MTm*oRB^8 z*`l^6Ylav)Hy8#OOQvKseKcwQ=CvZvo7bP26+2bOb446P15F6Nt_XU^6hB+DSl+nM z(n-WD0#OsK%2kZmb2}9Q*ZEv8R9s1LeHL(+b;t+D(nUhG=b|FYpC}L4E<@%1O44#( zh{X3Q(3Wm@Glr7OH*HfBBwZ!r&c?ANq69qia;%$zZVVnL$1tA?*f>qsZorH=*k!9Eb6N zV5DCZEglD3J+SO@ki9304p}e-W*rp-%|wsXlDg!5d%+Cey9o5(<>&nXM=KV~JsIkz zHyA;g9k)|xt-Yw`&(A*vq>?=OhfiM&<)U}>(20p|HJ)YeCokLA9%%sD-}Sn&xi7?n zBm_&jb(=%~Pls3+zL22+t5-h1te{{MX+d#H!*D;-4UvCOUAj8l&IbF*78nbN9)Vwm zWP8S*M2X$zwS>-37<;)X7Q@)g&&Z6X&? zW>5+{GtoaDT*;F|1$^@s{Z2@+g*QwH=aQ=>Mna#>^Lh*IpC8tf6)C8;c^(I)`H`;f zP$Sa)&|A;6-kdsk`181EaQ7+jpeu!3leSCaAeerU-dCD@e7chf_5nOxd-yYdW6peR zR$%6S@%k}#ua!}in?Xc@4TIyr&1jBamdpCJU~~2QeY*ljw)yoBO*qb>EfAX^58r5< zoRQwIGG{NFQ(w*p9)Tv)@_+87l@+oxIn8RtU1+z6*DL^-E`cBZ;)ay^p)pct-du-M zCd2}#1?1aZa87`U@l)+n;j0bZY6UjNx-|h%USF76-VAw^j%lA(?dxMMQfuRX5O9GR zHd{VY@1>{5ZkCOwbHTNrWC3Y-I`szOr&%y8OQ4BD3eQsjhso!5^%$boP|z9+@WKN zTd(VR`g^wCJq8!<-C?h8ANG$lAM-|@%cpJ;0Ku5WDaO_!G(nmSUkvA2dlWkXL9|-! z-$_X^8WmyU-?Cpzg3YA$hthrlp-&zK9!WRAiHTBHDgcoB^7HWqEUHO1`tQNChL+2M zZv`H%J7k9V9Ke#;VB0u8M*(laK~v#;1UoU@!e4k6CN)&Hb?s{8kGjh-t~&>qTr$HK zDfqby58MoBKaYHNfXH>CiN8UiG^u?@t4n}JWHgUH<~NryZm1g4u&d0{* zwsaRJ&%XctXF#3!7S$=1JL}MK8Dy#sk=}TYU48g^93`@D7OH-pwe~m!d@$K*QHfKa z&Z{a<5x44p?1x9`3;g8FLT+K5v%=}LbW@MFUqYC?df8!`bq*E4(%4E&>s>DfWyxbD|GUqTlnG#v$_9n${S0XPIGok_=C=IUjND9Rr z{+>CvF3n9%+4ahL%mb6?$!?dVQAQ?{dhc@?qxJz6Y3wwV96&H3lE`y|K^pV(uSblW zbZT>fKjZdsG-p07Y07eo{_P`NmB5-$x1=agIkw-Sg&Hryo(*|MZA8CXRLCab035RLj~gA zxQ@3U++NQvc{7NJ1T5+E`>Tno(aZ%`r4P?tv>&&`N<%Q4faLa1 z)?4uOh%?1Kol+CCK#?1WwxT7UtT$c z7{}z-AiR;g8Z9JR7!x6azMnv`bR2CAxCzxLan!O~W3d-L`{(}x@Gn>$1 zw>U7oQ+0sobQgx^FzylBxDyRs=dJ7a;f=o>s~-TD=x_+zA~0SmHCx-ZY0VHmN9tvG zV2Ws>GE^+eueZqT2tIjTvSCT!N!tR|6!$Tb0jq${H|vdH{TgAk|KNH2Sp#s-T*PE< zZ;YutpfmAu*{gBgb;O{g7$933>+e&7!IDtK+}qev@IB zpSP_z?hiY`AQn(~kY)$g3S6cO;WqnKPjVFHE?5$Dfl=b?s$eba2qf#BL zHRQ57Tr03-tWj?#1l>$VBae8XPiNQ!oVAPBZzNjtkxUbI-~nqgX3J-U?vlXrPnv7jsltGsuMr#!$EV8rbuT+%jCCP=6M{W z54x~F4gvjcwDopF73(}g=!^6P^F|Z;un|CPXLUXxD|!sPHy$S3QlXuIkBxOa)r9F}WigFD6RMeud;*2Yta3=f4h(}o_^l4LV2{qx=vxhX)I|+566UCkIJwLS(oZ~gb@oB`ibD9E zJ{{1I_!&FkRo*icHWic>d^_qzwHK7|x_sOgk>6e?ExfuV*pg}FgM8-Qw3<~9@_k=l z8g9I%P@RUnoOV8D!IjJtbIZ1X6;*=8iRbLM4(nfaI~fE}q1EpP`#AxrCcGM+6A@LP zs?e5E+C;DZ-HqMED;No2c!MjuP}9zq9U~7e%4fwgJp5@kcmTXj5fiXb+j@hnxEqfB z0cMIyOkBoN@Vn2%1L12iOblC3-2tFzI7A(&Pa7KZi3gDr`ky3>xlTs=>%gdZn3-IA zM^d!al!3m^!Avn`ty$yP+3LZ{EKGSs(4*j@0D2+eOF`AX5fYO+yfQD`Gfi`&-->hT zgMn)g7$=HPk80(Q!)R#UN9| zGt(j9E^^6X_0Q zWK496kgJqihxOAKB{RVv^pE49j7rg>`Cl6A#Z#c*2ngtp4^(J=ie}5GKkD#&M z-Z&W}4Qte1G?si88qJBk0cg+$>+^2S^)H%ipBq?RDprs?^I~|6m;k;zTFxY4_4gr% zTFug|bru<_xlg;c0NJ0bN{{U8KuGZszc=g4x?zx$$ELgumPjMX?xxjwPmsV(|6&lG z6ktoapWdf&M5Rg6h$`Bk3Dy`9sw=^;sLoA|@1Ou$y5dQJ>HG!Wqo%=)DciUef)BvIy0%;a?MYQamn)U4l*Ug0!BH}hb^)li{rnFa@M_OIh6&?c;R zs_4BhXHwR!)aRgSxCR~7a$zvjQj3lM$LC?5;rRPEIxCYD>%|k+-n0O#_0yOg8gE=y z)-Sm&e)W&lYNDp~qJt-~!IWJpOZ{fRHUDg4(fmOxq1?Qizn|VN7sWcXfTy!XldJa0X^SZ>#L(t~z97W;V_7y4hZwdfo?Zy|jh@S%&Tp2F;jZY5_q z3VWU1AIuD!r4XfHs!z&`nnb=%+t$Dff0xHWm*jtPEt=|=nKGygPe=G-h;w5%4s_Zaw6z*A9)^s^x)t z80q#GZJwWxMx?@UNdGf<-n+S!_#us|Y}^QXcZdf{!?L62E@;RPaY|!`*3j9C%m)A@c*1OI*A&qNruZ?&Kp)aTFb!o01O;~K)r=<0BCIe;G zw_X52sC;xYk5Hx9tE=X5M5VP2KNYq5R64$C%yTtoNCo;>J0|K$>f^5|?S|=qX9n_F zZx@s1S8VPtN0NdEa$T6;KNB)p&`n`e3<~IwpO#ZYNkFD6flA^Ftur{2R=2L%Q)j#M|PWCa=Ih76E8+3k`u z$!IQNMGT%F%mr?oJKv{9!x>*{ay31=d9@>4E0W6C&n~H;2W?e5H2zPzsDjJm6&|S9vLj53_Ro8%p=F-A+s387>V7m?ESV5l zeI&`4y$6;mdY#wfUK;DqpV31qusB0Eg*61J&8g*mW?AI>1f=wf`p<_{rWW;r?@NDQ z9&;cFv@(^+!Y?ocJ?u0BtR>7bQCHzZ>a}?iifrs#0Jm3&9FHovXrggce}?2JR4b_s zvZc_mF++NUu!V4InRC=GO_RC`)GS&O>YcL3Na#78$?#@&j|KMOHqZl#YFyRW<=5LC zekREL;L=kG_(tw?7iF7;>D-FpUsoJ{Y1i2?j%O5buUh~RX6BF!*Vc;7E?;CgIv0MW zXf=E3WbOm>#R{@B&3-wx;2HMx*|DSqtIbh>T~=CX3umsZuoWcQ-J!%B+vt|2Clc9k z3FFE;=VbC2G}uO-#nn;Ds+{h6Huq+RGTNU4=0Vw-%Nb2cIC`N5+3psJqXChw+kPh4 z$WOG8*)2(uzwBdsRm859>c@|=45)TzVK3`bI?Dm@>IZ&OX_Ca;HxYgJnKEzsYMJe2 zLfLsAqT^R{eY@ zI{C0hfQFk=L6)X_$G-<2X#pI(zIJV~KJO82nyQ%}PhDM;T>@`3m{~?VRgJ>Axf$=;3ovnycuBY*cN;V^;3EOw#`9G3*5E2q2UfYng}ysx z8J%`GT?l&?5<5ku!Z-bsxRJUl*-Jn4K3lg23NsWBj_a6x1)QaV_Ue04&Ylx5l5NLM z&DRDLo^9NZg9pGe!ZI+TTCN9#*x$38%Z8aB?rs9^18<5c8WuOS{h5_9}~|@HS_;omHp)yS+0p zPJ=pN^PKMMYOmL)!Hd0IOvDUJPe2CO7g4xh$2q#@{UHhqT6TRm?0}J zcQA;O%~}_>*-`QmheXt=}45FmIdo`@s1L$ zA=B|Qsv2UT1&G`1W(U_|m9uLMJx%iPi4^SO3yGFLXQsz_hUa&!Oq+tAG+yXuN(=+l z!)J?h>b0743m3xPm)spow&_?#`MC%T#{LIw+tN5k{B8b%`^0N6Ka#c@_r@Zfnda=s z{vwwB!(;%$#KArG!Hj#EH;v-OyOZj?7X@ph+SQmc+4S?)*{_Y_g;W>Q_mTM_O&Y&{ zR6EJM(C$gwl1v2C+g-XZww*K>1ggy{Sm`)|J|QK`A1P7|+4Z=tb1b!QGR(=v!TuS^ zXE^l)<~O@BXz1oc?oW<~>5S~h2qp$^OAyI$I$E=(-nFb-BplCNmABQ966P!_67nlz z;M-NyO-N(XEnmZN9M`X>kYGZLLwZho++3Hfe+=1bGC}+gUfoWVl?!4*M%FIfH01ganIxhyKCY3>urv6lcWjObU#+Eo< zFg(R#Lt#dr7BDmI|K=yXJdNoU{eS-vO10XrT$ERIN3uO`=KSDbw19Uv(_?LGDR`qX zhxtWS(MD!c@?J?Z@8u@DZY3R@&m=^JUdr)3JKqit4xh}&Xl~iqiipqKz#O|=e02wJ zZ8{c>Dsd)yStKbBH3HXI&kIw=NgMQc(~67w61U;X2N)_{YgmD9LP9JZslr2~Xw?@1 zWy(02tVizOB@nxCCYW{b?)Qhoz9?Is!SEzzoGk;g)nx&tSm^p> zV1sITam*oD))G+-EW1;7mh`1mB5V~XPM=_>C7k;?1`fy(R{nR0l^Sc&dN>XW=RU^a zHGR7b4ne!z2MNoz$==Ug({mqu)xR<)u})Yj{BYR zdpjqeBa(P9>R7x9sBKlef8bkgEB(0ZQj_R92L{K*l8>lFQhxu{{)`tY_P$xl2)d#E z`N`oqsNgWPiMw73CsE4qJ|$2L|D6q_e_4>B-KNaC57-0W$oC8lPiQ!ED;JGVpah1jT?{`^i(NvWMd``6vdP%H!d=;@a1e01sVwk`hyCrnYzEnmBs%Q? zxEXU&d@mh4PNPb`k*yT-%NCI=8{eCxK78&B;$;^e`5SBjd78_gnN7j!H@b6^tmy(^ zPIMnNMfPC!h6BoJEM{8(6zsBiCR8yl*-Xs-0}OfaDGbMu$s0f9eG19C95_IMhJO0D zgDH|EnlP$2a>=@NgTIi*Q%VTvXaru53R_1>bcwIEvkl4k2FMjCD&W*3%AQ&oF*mI0 zL`TE%r`|^?wU}+PRfSAhOF`a~qXXRUN*EaCBS%1^0e42#!Wcf`_s5#S zAII)2AZC+Ve3C54`TU)b5l%F`cNZmD_Yp5|SSNm!dq_b0mZl>o9b*;+Zw(#|8t%bk zA(OCUeErE^lDge5(8=MxjGYwA3l-d0(zJgfxs4_g;=>;*zKz<|<-vRtgPl&0C4vUwncWiFWaa$>|S1}GZw!H2V@rJg5W0~ z3|Q2+%08CDkDp`uNZ~+jor8+UR#Q>eY^{uVtMEx<;0GxKpl(&y>CXf{Wy z>+AkBVAA2CN^C4APMyBM(nXU6r}Xe3Q9BWj&<7;A6p)R%jOZVIIW^=;CpxxV)G zI(pvOP+0KJ26gk$lx7V<1d;QF#oNN-f-I&>YX0Z^?K*7Q$68sJm>|Li*QS0qi~+u$ z6Xa^|N^0zzz3vPuNFi%C)yMQSMAOX&-;V%Iq)%L${XJk4{&wFa`yO^{2G8}1 zChD`IM9a_Fqq;|FLM;9pQ7C%Ahz}YRa4HWQ8fE@KGK7n%pEu%F|L6liR##OAn ze>Vp;>ZzCIZhFWh_HCWPqseFX1zz>ah2?%Oxz4}LFJUw7jh7@Srrs9tdA&d-M|14O z{JXZB31yk?^Q_#@uC09(6jKx%-Zji5_z<#7S2E$T?3CQh)h@&`MO7+wdI{2Uo=^#0 zD$tJj&z%Cf=35W%NG&&O2T?+Q^os6M89!uO&TuK-j(zFTo~KUy)^$~88m7Ewb{o4Y z>(geN>AMu)JeuAFKGmYE9a{$3#6zAx5u56g>XMHusI>Ycp4nNLa9uf|ubdZEitmkI z{|>qcQ)nr<9okmsoSu!zAZ9&9z|B3HW|R`Kkyse=!?o!~z53#`&jOxz_5J-3bxAtW z^4ltZMp!9!w~1JC)BgkIwmL$@GvE5Rt{r9bRY}D#7c!Ey37P;^!sgu?cqaZhc4R8< zK?kEIUvTAf?N2bq46|hc&xG?;*@GL-_Lym~>((86rifJ4bJ*S~sgQUvP&L6$UY^$g zZ^j>R=}IM&^mxv`n!A2c3KL!I-@r)Moy2FT16ZP~`|5T56U=EQOQmaJz-PDA855i@ zU}v`CqwQ+n_ve+?0JVjZ^#hI<3TzI@0*iy4)i~ipiQYxJUpX3}>L9&X>Z!g5fA2!2 z5c@+*PNi4h1Ou4neJcXDG14oW zc4P%bbH-PAJFgzX@iHT!-+v{C$A6jjdal_Kp;XmAP~%g?OG+SMz_t1~(fPLc`f9@; z_VZ@<1ZTVce8Llf6t?5P=>d(KANvlP;DR)Sb(;-cKO}J|0~^au&8?jPcw#t1q>|L~P2Fncss19bss}Xm$LYUUC|CQ#^emeR;j?TlM z%J=`{_ZhZqIw#vXGLwTivJSG#-c(*fe6T-RVW9A#_oxKDEf?s3R@tG;LiNR%+vp+&BB2 zoVw1xt+_T#lDzvvC>eTS*siv>_}b}!$8yZ%1DEU?u4 zOBcxP{v)qe0(oAPd4wNz%99QA)AO0hABFYy3~iaNqJ6G8qv-E}WUQM9Hx4RCn zTflx)4|^Ht3&YUa-48@1HU9D8tM?nM306Tl%fw(H$7>9_^W#$M8iU#_Owj7GSNQgk zW$w|pX%`|7hQ%81ddv)7u5LTW#+D%Pa(HMZrunP4Z{ zhvj)q6jRV_wviFbc*R>18htNT|Muydm%i=1B0Q}yMO*`YJ)eCM!&S4pA#pJr!{#Pw zx5(2Je;f6lC^Gu`%{b}deDmjjS00H= z*^ZANsO6p;0to1X5;xE9@>Y)bTe6)e6$(8Td7H(wL1Y$MYYM>lSiUr|H{YnGC&U@e zu9JyagA3PaIKL7;rP`8Y7796M4M^hgf z<@Rx=>3G z+R|@JQMc>*yR^x30*#nW}*o62e;Nk-iJ%fQ1c3Ne`%dj zUhC8rdIsrnm3eHkgqluC#ZmcEy^1`6Cf3^7v94q#5Bc(=JpLr^uYLfpg-sU#&IQGw zs=tqA?T@}{Y7fejk@4EYcj-BH`0kZoN|ox^Ee;09MffIP`cM(QvDc`y>z1)3l?X@$$_LOO!XtN4*0{{osaxwBnSlK=*J zVUgT2>dl6Xm-oB#fZtYR$<`VE}83bdq5R5h~-> zIV7+yvU>76pB4T6l5JmL#vu1-e#isL4AP#{VJ*q_KQwrjiq8u#S7@Jz0;tdPf@{i# zM#cJ+c=E{RWySa%_ul^!&go#{o!I>z-q8vt7QfX}qccOKKS6&mje#8TPVw&z?E7z; z9mv+HXw@B0Fj=@S}Urb6A0eAw&}Vh z8ieDePSpcZy_}0@n|68%ERa%{Awqw3Ratef+}A`jo3l-NGh#ZJ*BVNLuOSZjqju6} zD((gE&pqYa0UR}#InhMVkDPN(w4VJ|ox*=;KpYFugg!?)xT(Eh;t0(MGY@-P;tjk+EvUDtK&m!M@)QeZVV3$GL%5-F;SaD#m?%IWtds5^SSz$GuApZJkgJW_>&b}Cut|iJJUdUj3t+)4 zZSmvL<~|gV>^8j6N`tdm2Lt+#Tsm(q-KCRBH|TlHXi?O~%|TNLUp=2EtRCJMk3AtH zMPyP2-_Zi7j@LAP+?vRDBTub4=%;|SZfHzFlbdsUyl#dD{u@Lr?{Tn_kmj&wta=u#q?}yWT?VY zt(n@AK7*GBtOP$NJZfzpHDvuz{n)KHRjS`ff)bG4?>&i^)n3~1o#O2?`ZDc_ySiaF zGISlF?4DiI=`V-aH5o$9ovsr){A4ru?1v29;9*amFx95ghy6UQXc5{Mb~2Xb_Xn-M z+Ah*QLr%ffOCPsX=rtpxMOrIgg@F@PcT90|pshB%3^7wHGrYf5!=Hx6RFkO$>Oo5} z9aHq^qnu2?pW%Rt1m_kWg#XrA$v8FE!Ro zK{Id9PvK}r84@Ce8Y0sl51ku!?%~(oiJH<)aSeDhw&Cr}2Gu_B9m9z6t`7^;-w>!R za8?mWjSxmCNo@GC_LFLFyP<|c+wl@>@PqkG>9c9Xe%YDfYLK^s?@V)giw8AVd|*V% zC+RsuiZQ0RTSi$04^iKHQ2zpK2Fp7)y!<=Bsbcf*edHUSAhn+GvY#Fkd)nKRmV?SJ z^1qUy$>+_g+?7HCN(Rn&`)U67Wx}3wOLJV-aEGWb^-|#XDMO zr7GCc;w{0Tg8}?f^^iNBIUPZQ1Ut4<`O2@->qY5wH#0&i1O$jA2~M8gf_)O{ z5YQ5)789mdurowleU*peXV=%4UMb*iRk&DS8u2D~0un0Nrf@bi{Yq7@D=2#~qd%J5 z=dl5tePeO7HkUd!W*)0!%D$={V2XjJ!gFJw`%52>9g2HO$$bQx%C7Q=mEXw(Q>ZYw zra+2ckS1rzt&=*wg0hreyVZCy_1uv744?!T)5s%fJ+4x}9m~SX`T{sIv;G17t6;c) zM*zEEx##wQf?6DwHx#le31DnNnIqq6BhGr~e01c6_zWq2oD;}sVhHnuli?SiUsHIq zemlQ<6yR2H4jlViwdY;21qR9|yX{B(sT#0+Qq(SijZ42ma#4qiYbNA6+BAI$l)nao zrcU%68$PxFgIpB%>E%D!)I&O(Sr;h3yzudfs1;0X;s;hOz8!fo+|(5pReN(g1h=C zIenm6k87LAX0sC==>>?EwOT~|kX2CD_nvEVoylKp*) z&dn==TtoN}B9ovZ$1aeLdXz7&asdiJX7T@Ca*QgTRf{JSP{m`TuYzjBZV-+bq+*+o zU#{SVC3yM%oJMxg)|Lk-oz*zl)fG9X*%~4ktlHA5H%dZ%D0+;Zg&nAj5(P89R$y+9 z_n}W}XVNpZqK>LJnG2yi*TPpF6+yjU)y81y6bQP8+mYJCn3qu9d%wGA-$f!A{e}@J z&9GcYTF2%0#nqlbm`))e135mA+EDUP!EqRlEv5PsIK03zK4%htT75x1T~i9!`Iuh@ zF-mVAX6PoqCg4v1`kGa8?pmsjk92G3yRihy<;JCE;QJsKm}EpVP~OkcuT|_9?-N(v z2FwMwu|q-JaRA$=kMvhbp#fwH2^;yl!Y6E)$Lxw(S((9bA~suKl#Mmo|Lw20#K~0) zaR#OTF#c>Z@<|EYnp(>hE^bF~oPytLqKqfo-xNed}z)suGsAjSKqs}nr~6w-bg-el_!S-fa~IQVUdaKeoJCOxs?<5{m@Vc?7&ii!a) zsoK%$1kC2t@=&PxXva!dJ_)eKK4r}HJPDFL@;x+XH*yLD$>yNayXzpi=X*-&N1-8p zP$hEuqH7xFt<5k zu${-SGpqYK9BV1#$rB?~+hodGr1?Sz6&>mHv{F0Y8Q+&R=OYZEjz8AHsYedY9Vh>+ z9w11-f%9pfley90a+>;$)(2yOIoh@&^4vEd_>!Kq-cHn{cFm2K-};laH|-4JLo~-2 z+h|B!XD{LlKQdz}!I{c^^1BK^7vB~MR=y_Ax#LrXd}m3V?@|%|#OQcPpw!CUKywFI zb}PiP4)x^@g1Nn$$HOf*61LQAML#mf`pf!+i{|qr>+8O8QyT<2#TD=I+MB_4hHD?* zCTD(saAv|}dKR8~0yA4?3*Yf)etobR;c;7jfq9lE#yb=;3lM3)BXPC(ox=!K?vc); zTAy;D|A1k$R_LZ}gwG6>#2gMxec8Ui?59C^g61OfsqpRZvG-HVJ0391biW}xKB~&~ zu7Gsl2}z2Lw62f(Dx^Nep>94I=s*DKzAk+-3>!1e&~5saWq&(YQ0+vh0=jg(9bo=D zCDq`WRVz@O#)dzkhNda_(%7g*4Gp-%3MFp4nOZ1(n;~9EUD20gfh}2*XtQ6p5IRS= zRRO3+i?w276R@2Yi?UL`dvd$zur&xiK=YKnCvrBS#2>~;0jbPq?I%y7^i>tNi5HLm zOy&Den>29S{A>atNSfR%&U1Q{yg9nq+Y)LP?sp}$fn!W@%Mcv`G5>sbcV8>qDXX0g z+Iobv{R(X|gjDJ%kFZjscnr9r2#Dv>Wnvru5Xq0E;=Vm92dgydL%G_J;d-$;$fQz3 zhEn!-Jfd?fW;To#O130Dgi%pp;1zElOnH4B8;SQMZ&U+2#YIQ)X1y0N7$&HOy(&|H zzP`QTBhbBwCA|1}QPt>(B^X~PE#rA%w3L|9+4Ct3#En`-cYGn71#g=AMBQVlBgsS@ zgCCl8MI@|F^<9P@<77d-_Um3#5eMg6S|4O!W%8GqMSW0CCH1&LiY|r%zC!3}K=|PE zWInzAnD6ZD7411L2e(LxfMy0zd^(Eb)gwrN|JDSU*#qThC zzpXR=HRGnHr{T}z&uGrn{jJ(zRbA(ehmEL4*c>~)%Rq{?SOJ1sM!G^_gR|pB7C}Sb zBJe?ku{k;W(8<{w&o>WeV7pNt(@Mdrif}>awVdh0(BBg0v>ZNJ?QNp$q(H>}=cbl{G)A+#I>mr!{Bq;99j zvy#7nfMxiP@~uc#$B?2W#n?iWo!;+eXZ(bvY5{}oFvY-Y3rii*y()kuE-afcTEHlXsx=vt3@zl~h6_8ak` z?^Wjkw1J56algJvgX}cxIsv8ehh^u`=3q`lnH!8{>HqzeAirATXfj-a%VY{-j;1oa z0pW5sQmNlc>GAh2g@(TCTywLV`iS3F2Jhli*E^i*t#-941t_nY;?4A3?6wjD(;`>h?!-+pIDutX=k^uDmanj-Dajj;;4wTNlF#p2AjCh z6vLbg<{%~=^ht#u>HIgbVP&_l_JcIF)_D(u{4Vvlt;KZ$Yb)5zpy>lkQyT;VV`U5ZrvCL4hS?QT=NNI?OLgrl zHTvpK8n07t5WZ<^r^ifRfTEYPZgk%yt=#&f9qSvj@^o1!3uVY4fGz!Q4Aw9#iAl)| z23^9V8*K`2#V>U~NOHV%)W%t&pA9VrGA}>NCgw<3%T1|&)+bD#HT0c6iIM{Uiuy5> z$#z?2Bcqq@pN3wwhM!mVYYXb2{pI*yQ3OWIxPhFv{ax4R0!{u%Tzzkr)J+(KXNmkC z)d%T?zpCNK0#dDanjTc~{lM>=oy<|sTDZ+M4hGTtf+p433E3k;N-HKno3dlH<0~d5|s%Pr&xI4SkBCzn*W1#|$I!@l+fASqObzpv@XY8mW}pl^)?sfH zC#j{IN~8WHgZ0@PA0@C~%tV=o`HLMNt^U=ruG!|5yqe1uF|A-oS?-nMRdueLE5d`E zlhTuXWv+9~$!`VF{Nv>>gfNcU3&ree9DZa@Axoa=aD&WZcJqs$yMj7*Z`{v*iGt02 zpMY@cZ9x)y8L6VibsbJ$LrD%qejmB~uMwIa^3)0G11~`4P6~9TTqyQ4sVJ~~Ool3u z;cfbVHbCf2J62hVQp(N3>^J!(2)w`%5W}=Y7~M(rshBU{1{dx>g4*MCLU-qq{O9YJ zyBk!O4XqLfTTt{NodPBw3z&M#vrd02|)8sq~7SuCbPe2vs*Ob)%4n*i-C>c%AyMmCJ5bI zO_%vQ4{}AFkg2G|(p3k2w11hL-i-_4#VMaaMf`$MytnWXj zP7HqlSZOn%%BwH++6y??P9Z-Z9J6 zGpm4KhXS&s{7JZi?5kKy5Nd3&Vwr&h2Tw`=rXP^w}u;+~n4`xj-4HP6Vf; zz@c&Jb>pCWe`8}i5wJnfa855rLpOnRtpQml6>f`#gvmH_1)&d(G@@_GW3ZZ9YXoM4HnFF;i$_Iv)w>-L zq5<7DbN*LZtlD5HRbUL<>xoe1OZqdzz!19q*oA2|x~FtCaOwrbpB1P$jV>2d<$30} zttl(HK%mLhI7=W;f!9KbDv3JyR zRoMPbHO!o^6q(?QL9;zXI-BFa6?umTkdbg~hD(MSZ$<}${GmY}^FZwN0_xBIxmzqJ z>4w>AeQ-CL(qomhjTsCoAU$E~gRAEjo6Iar{l)xj;(Eb(wK=zr7dYm_Fwh+Co4-No zUUQ@yWGgNPKusc49lV>B>cO1?9~g^;DIx_-;s1Sp_R6` zZN?|@F+b{%z!aP87HfYkrf9q1E?mKS*q`<9`_JG)XrT?cgT`jwKG;aEiL7Jz5trN& zXar1)uEnO&3+A-00h3GjxWA0|;pEVmZ8C+qc?+@w1Ieo3bmSM=JL$5v<9@ji;mYn- zgeB8I%b&@-P3->d!s@&Rp^!%2)U$^pyN!Fw-M$9xj`zx|)5AB9!KK}ns0s641Gtd$ zJT7J8Y7*x;OTLmRKR?Vlys*}sF+4F49wYEPT2oQZjmnMJ{G+D;WkV6kpncSip`94%3ueIFcW zWmO!Wc|Rr>aCYd_t2H@R**YHZ%W-+Ly>&~2Cu9$$CXfsfmCrib3+3(ma(N|%5W6N% znp>*}{K)&+K}H1jhWbmT^m+=8So#_Ftcrp1_n*_3auB6}^=XMU9A440pJn;)?9B^e z?j$jtM@<2ApQt&_L*Fr1bmg^J&;ZD3dcVQ!?Su+ePuK|4b$}Annd3 zn<0Y+r1)=t#@PX~Wm5QU`acLqlB!YY1UsYxtc?$c)TBPmCwIFLGhL~i1^<{OL&Oz5 z&5qx<<5Ja@S2yZiOV*HRjGBJ%r~zb#|xvHsY*@5zf&0i()aGVtsZ|?lJeK@RSM7N6Cc0U-r-N& zfD;9Bmz|?#?!;djlK*^C9%d6f|4yn`3)vnDd5%$bQWQ8k_lig6NG+YUZPqa{h5zIR z0{4n>PXS(0vn)i;sVo+IH=GSQLTG`7W(N1*E@O*m~JK6c5((DQJ0ZM;xptO{$x1- zn+?3_a7X7_{x0f^TgvpZr^8qg2?svLLU>#1u+*e4>E3_^olp+aFTBz3SgzfkaDHF5 z^Y03~3n?Qi*|YtiU1}&*`Ro92$KuPq>MMyh(f+>&ha&H&=`imVAKziZ@I`&V{6BlY zVI$Kf*vZP^p&}t}if-3LOPt57xdkV9-9CEf08Hflz8L>VX4YGa!XNHhH6LiCJ4Qvn ztSXKtUcR|NbJ@;W<0#rbAu?p@vt=(fAML<*b*JMa;jC%Wk-Xvy(gU@>y83#^l#Sn-2b3U%n3UawgtD=cZbnt zD)L^d$xx~Ignz(wuP5b1&By~r-G{a@lU<4B4+ZC_M>j$I;|&$ZPA2^g>C5_EsaM|` zFMCRGvu;`VQJ;XWN_50UsQ5~bER&C=oKbSn!i9yc`ad6XtnR;uy-kfhCDTy6)~#2( zHca;1D2i~eQS?(4?RwOu&LbHt12d+KzIko9GDL83&;r5k{@~ih3_X_f9tcu&=rl!G zkk2>GFS?KOB3eJQz36$wiHk*e1CFN0N>Gul<6SKT>3eO^{m^&Jx*L^O|H(j{QOMEE zlroIsKQm85&X{<(%ns%y@RSA_>wXZSCF7osPco{{nLLgSSsVHN^5x4`82jGPD3AGp zD$S7MH#0?&dX{*@T(9j35PAIg*bG7m1NWW1{w3_1d%pRWCLL;*Zg9JlOxtc57CppY z(9~OP7UP)#EhUtI6^|9+b#o4t5SGg@v7;dG`P2WTWsUk70#tA_xzW z$0#XhLqE3-ue(P-GK~(dE;Y~Gs<5+k1+09iqvVT~p56HcGOupKS*F?^HT6S(@OBt& zF~|4v1q_A8mbx=tUQvEb#G23jX_H@_k*;g9jkb6J<_>3s4SM~h(BztD7oF&}MOAPF zs@z+yIE`x;vu~wfJ)4##b`6qaD|lzW;{?cN4q7OEGhBVfb~) zM7y=7AvSXE^^JpRc%2vNba;ci#n|4EV$DZh268t0l{tRZkz6??9Lsx{k#^(;7Yo)g zp5Y$Q0&vKWU1`8KMky)=|2cRS5;YJTSKx)I0YoL1)Xh^@1og2!5K)ZlnF6i^&#c-V zzfJm+$|0Vwac^{ife_!2t2p11exrYTM%yE80g7Fp9Ds|0nBF z3)*o=nRuHT^FCekv6&eao>#6O5-K_UA@Ocf`h8X0pG5-aHg_ucd0bt2aq>zv&B6@m z+Z3r*X-8xlDZ?DVbo?SEVrHYHPKjSVCNPMJGC#9ykmox#(uK~Fb5N7G_eAlKJK0VS z7(l(IxN#4S7&pf60@jt(R_w>2W!RotFsZ<1$2CPuv^OQi{x$T%cC(Oha?gLLU1gS_q+Z z#?VZRaQ*?p9T=+|kQX&Lb$ZWUh{k5@7rpML*rEZduDccrM^ej_B3~*T6+P{6Bfzd@ zT<2HNtMttz#IiL95<LL+iZtj5E=f>|E55(RV=J|t@fzb`1^K-eX zF^H9P&2KF1Wn#U(Ac@N*(5E5B{!E|B!|}Cjt{JVPJXNI|EbH1&&0`y(Lwc~>PI$Pf zNhAfpiTo|1&PPeZ={Y@D#2ZlpXGsja-avGvSZ#CIA{{=5b?g{WJ8SDW=Hg4~_-{SW zdcYABIDkUsQ{WG1DqfkV+9Yl$4$lKv-;+1eHd-=G2f2u?;;;*uR}8E7e51i8tmUrZ zW^*b2*2MR!5OKysgF?$pf6(s@QRT0)9$~z8XMdSk&B=6W%C&L-1`~`X55-ra*KXZR z#{Z}s@E7UG$zQYFPto2CBvU;M-t3Y(H*=NVsVGvdQi|h2oS@du0K`20B$D1--#lXG6Za`v_5vY#vN++vr}>H01BmO zeeM?nP`C;hb{~BKNrNz_v?E9zB^c-{D z<8z{g=r2i3o^NBsBuDI4)VCd9Xf%pDBrsbooy8^)>Syh~Sy)-+@gY*djcCM&i&nNT zv`RJ7#?Kc`BjJv?GXitkt@1ZmyCy6&zX8Tt=4*-JK$e>tHcKA-g@3>ovOf$WQ?0Zc zRb^a#Eb|d1>iYTA_12P+=0YeC@}WS7wdJB%%`nVmf@ieTszOiep_`3a6DeaQkFaxn zZ))1`F^{7VCaVr~_C>Z|{?;_d{i?hF{ZY@l-PuW_^H{EDdM>xLv#(Rx z)_mtqfwWg^R{eFZg0fI=Fi2v_5GgAv5aj?x6Qc_6WyR*YMNt)jiv7GZNuY_FLG%gj zdWf@c`X;~$Q_LFtFi&Pb+r|V<->dF^+PpX-S}VasB>V$Tn9t2p^XkZ@^XHBkhGd#v zJGd@ip}&YeUkLJ-CCYF zoN<6b9D)&1$OWu}39bBM(Q08UP3|NnDv32C6$F8x+xXHTnRzd&-;*u|ZE^>cLUxT7k<;2fOI#bIRs<@1 zwiw^jYa!w^0Fx$yJzsL?fj|A)hOJ|9D~#Vcb#3`-+6y#q4whlpRjkpa2tiS10G~3X zcwl&6cOn?ooP>G38dY2X+VI+_KFDGsLHpl2H;qsH+0of((+@m#itNd0s zEw7(s8ji0@WqYw`OgK9TKHtoXDCj7JQcK%~fjr8Hq$FEhQV(5K#Y%gl?Rd$*97;RUv-K!`!{!~T z4gn%Yq)nZ*!N!L!=51k?VcuvXh1Qg4Bh@Q5Se~QDzDFL%8ta|dyZ?ij4sFi6n7i>X z38rXGQW^aZdQXO|9)B^GoTS$%Q;tphR^HLUs?8K8NXAKeD$BYmeql+k*6MCmxcy!x>m zXXj_b&haE!m>3vMQrvv4Nx!uAfHxBV*JI*AQf%C!l*nc%z;0LO56NrLquL}lp1#o9 zZZ-duDXr}ksmsMY#Rl_+WIQy3ml%DMiHY-iEIvc+)=<25_#CG9lQw{ZKOzYZG<|<{ln&CrcpiB3S?)_uPwzJ_zIn+s_^e z*bsNb1emhMP!}lj$Sa?WFq|=Rr2>W*Q-?V{0ZX4MB^*Ex8Ba`MxIwJ@(Q(Z2z0ZZV z?0ED7^l27K{*PvH4?mzyiKXibq*5nPaVF<1D8%vB3*%QExit@UAtq%XO9_RIJm z$xtdEcl+kni*vHq6@Ll_?|}bh!6r?tp*nNX1((}YzUKCG_fsGPmJNyyZer=9n}519 zrnn5@qhicoq}wuzBsZWP5-k)?K;=uPuf!gkK;@d<-?D(5dLrqe`zJ~b(~$1h zzC+c3L4}=1*0pXv>e+drM29%`+yMZ(>VD6rAa8}xx6uusQ4Cs0pubNkO9>Pf+U|u{ zT^DR&V6^25A|nlQqM%1h#;#IaepI^?5cUcEKq7s=PZ{~wZ1Am4z}@gAu=a`ZcKfHz z6wwEt8@{^_feHD-{@fd{rSi?ldkWl{Igv(D7$q>+e$GL!y6MO+`$s3E$j%k4SVS^% zWk)$2mO~v1eAAjUO|qhZVQh&#MU_($zg{AmzUrDTrFqp5CQAL0D|YC*v3MdE-=(AP zL-mmUQF#*)w$#l!JtJw=iOmFYG=(d-1%u#l1_<%0-n{T)rxS^beUo{%=sOvNo?X=b zHtv~+c`0I-!=1Rg$=&@QKFb^ul(0SW$=~hSeBZX$BTsJ?33LqX5@`qf1(boCt}~en z=I)X9ae&!!>zZbO58k%v7wwpUflBzD7b8nH+%zE8uv>rsSrlQ(FZ9j!m1aegVJqz+ zI7k^nsR=Qtae3a#UUkZ}M~mP54IXdnWb^eVjRcWFs(LLkl|tr%!0+~;tK&eUgi+XbycJk|*S(lA3*RJ|0UYKL9D; z&1fm*7ODF^K>Mc=4Vb?8@U&b*7`KOo0-Kj&BdDF|k6HVUMXs5iB`IlGcO~A^THWSHLINNGBz@V{f(`uf$6y=+` zI*o8-UcW})-$rxh17lkpR)?V#%rx~v4Jl~2y|DABn;ktXbE`phOyj#3MEmu$dZ{|d zbQ1O4&ud@OC>BKqOnj@EGfMaakPWmSMCe8M9jdFc4s{R(6|5s(4k>KS9ACh!#gCmW zc3~WbJ*^ywitB+OEk%CyY%N<dQIq?a`+1{H{xeqWJxydL0;Ti#24o zVfSbZ@TKZYK5pqIIpGB~(8eMz?dkQ;Vb6?lQ{iA>xx!AlF2d=X zsj}(r#mpHVtZbcLg`DKL`+k3Qc9L6U1zZXy1rm91vp>kR=I%lZf@7@Q zGeyj^a*o!@VLFvELzLQsymO#<3as;WcRN_rP6)7$CIf5t>shEnqr|X71N-*_fN=XnjcYsM}o9 zbM*FdQkaeYcLFp1Oz-OM7%OjE6yXtq5i~!Ly?6s`$%DbA^onw-DQVV|rN-gKzwGBG zio3qjr8njbr3n<>Mpv6Dhb6QzyAwK0w5}QY%7zq>86`gP^$s!;{lx>FH`aArXop~- zEhY)W^~yQc<>JM24;V+49~b_U4jJ;tzEJ z(R)HKpxfuo7owwNJUL-~8!?7D^o@4>=Xh1yK;evJY8l}}mP{?wyMN&ED$K+Q-L3H7 zbAz?F47mq++$=DQS4z9$@{g6DBs{VVOve!WAw1?^Innov=s9!soyUVZ&V#-L$agD2 zA|y^HT70gg)EDFGYu06m3f`}?hIm1XZIvicA}*$*!$ zK3U3jB%RhtRwv8d*h*hL@ZKWOc?&$(HBvUlE3{a>jkEh&-`9Us^5`L!`SU)3<3{)P zFd-|Y50W}ruUI;o{A~fUoH&%Ms^MXI%AwW*;c*7}D?iuE+Ra!qO}*@3Qvh0%gk>7H z@_W-{Io06CzwxYH>t+uJh%Wbl*n5c5&Pn*yP&2CvoUQPsA|6&?n|~`TXu#>)C0{$P z(N~tg8IcN?RCHBx1B}?CEsIy5jl7xT^fi}U0Ys&Cf-&9I4x5u`c2Krbnu%Y}sSFra z^U(oMlchWB*~AF*W9F7=lBuzF)Y;Uk$24pGw^g zPBJsJj3)bNKp4U_zajcHiAbfQOH6CF6~$va&VK%`^-qR!FV*>&6%-Jr5ZF{vr(_*x zsV6o$cr#2XxXT4I%&#%5>D(GBOiHQY-F4#cP|&1Wb%WUl=ll-}Vp>pWGuW6=!e*Ei z{BQ#4o4y40Kilq$_p?=brA5K4{z6K=*7-x`Tq#ZETQ7f-5dqiLM2;0w=c;$if)`XW zCL`^^5TIF=DN2ILuGWs?d#wTe_Jm-v)>7FHy)QVLm+nq^RoWjbtI*a(I{(Ju=+l>q zf3m?mq%vXhCLQFBst;%8=A~jc@O1HJ0D?`Xsb8m|%}8Yws`hy3#=PN2YFH;sspvw| z&jl5HNr+_o2Na|2D5|Jr==v%Cc(*bz)wm)q8e7K-oda@Wj~?Wv`HMwAkcmp zEe{wHx+0oG?H39zM})ZEk!F~wMp0IDIU*G(ZAn_wYknc0C`7$fB>6JlA_^h+eVV*sbJNs_^Jl2==wszg_WNMQN-W|mlHJbvWb~9t{U|TJ7I%Kh39i4 zco;m?$}{EeeKGKWQl+?G8mbWqLa*5J6=-NaqQO_6F_uyQWA%!3oLUvdNn(-ozC}%+ z2^{>!!!!!X$3yVCn zq}M`PAE#ZbfH;)U95Qo8E-F@`O@}Or=ds^EgY?7K4w90Z)4n&%MmL+L8ie6n&(pB@ zO}GJuyQA{?-4BFxKqSk2NI-I#W*a&LE0GOU>GKtwLD;jJc(KY*bXSA$hBK{45 zd4irlo42276*aEwgObM`V@Q;7tSAGZi~)y;8rFOFADb5QH7gXC=OhrjcS^@TJ4#QP zgfmu|m|mmx$7cQ^{{T^%zMA_lB}2A?XzykS6fNl{6~AZ!3gLJM9@JD8nm^*IvDK)B ze`XMm6j&KGi4pdUw6TbeBA7AMl|=94y{?q-48k?AP>-OoydM^lB(f>)iy!>k4V^|< zubvkLLbGQ>pN$&!*!`%fxIkHFld+BRJu1m(2}|yh*rpg4!h`Oc?Tqw_|1KA+>55w3 zEhoDz0Ap$L=VO5|_(^?*+05}cU$C5G1EyB6Y^Kwpq2l`Y%%L}JeyNQhDa93<_CM$* zq=BqqFT$pL6j}YQ_`b*jF-ogN!2nKNTG~;;OKEv{nWKjH2ep1iKc@g=!T0xJR>n#7 z;bWaJ zNukS*mB9E&CFFt~Q-PNjMXfgHq-{{RQp!MDRI_x$u6qb@V%(>F z`lBj2BC|R^@YC|cuV5}<^pWE|sWqmWdqV#L&K>y^R&GqK{1PC*;?~5M+3wR^5g$Ee z5}~e3_t74Jj3N|U5iD_IcBH^MJvN88wB>Ar3e#{kq*O}-b$Ai$fPlh?yfbIC1mNE> z1Q)CGY%g#VHyW!wXM(S2#Y%GqVnsZlu1&QM^8UZa?P@-0)#KKVZ-p34(x7wWFR-dD!%w%oosUy+SPIM26%L`o9a4 z_6FPIEBJ-3oEKlVI<#`i5gwDB0sn(Mhh$ngEOa1xeghapM+=PWFXi5IFon$=RNPhgvO+{s^M=LrQF`C zh~JlFXFpLW^Q2ZnE2>Wa1qhgZEh1?aw`_D{?wPO1_jWRGY8(02P6LqEcF_O*cE~OjAd5o#q9fi zK4-U6wSOIT92U~U0(7lj$aXF!Vfc7%<4wQwPUP^B`Pvt?5yhiNZ%h+NR5zEeP9wf^ zn!OWf2`5JN4*&JoGpT9M)Izy&MFYnz1t*7C4SYwyOu{nM?8c#N^Ni!^HJ1A@0*8y8 zz{1%AW$=?T?_yb!7TWuIGW1GnsLoQmnLCe(bih-xW<0pPaXsZh_#6661%Nn9-5Gac z&}XDlYO(0NV~Idy5fmJVBS}j9tkeh59R4c@bElJr@Z8U}iZ=>tk{o}Q(+WzUX#{&=z z%WLVD(G@?09QLoM;X`IC8!yd)WdLY~-Fk~xSKjb_CD`&QLewhP(rWhdsDJ$badhtS zOuzpde~&STIUm}b+2ojyZ-Qz0qjd=8a!&N`q% zN~zE9{{H^$(L-&v_kF)!*L6Lw5lDJ$XC)QopQ7}7mUdNYs!E2`^Fl)>8_ycjQrA5p zzjecPe?)R}o%A(--M%wBXjB$Pi>}LPDD$En(}jX0X-n0r^1byWf$Lzf9zgZedo{IC zo}51v1s@6(RCtyckvzwlW8O1ZVwY5c-4j` zYIQx>Lo#nc6ZXwt3Xb7V{W{7A|N!vanvxQlh~HBjG~BxR;A=+ zD(4;LdlW8Hv8F$67}^(Bnzdp7$5=HyoL13@ZA(6fzm-M^U8|wn@e&Z!v2GFqqX|@VNG}a<*?*1H8vS{WfhlPe}vl~0}Bm)ed9~c$?o*(4@D)#G?`!c&{ZSh5c(;a=jfOkh{ zwl>|Fldnx&-kY-b&|z@C4a{IX zo`>Pd)H)bpDWKL={Sr&jjcx941ki>@Y_Kwb^Q3Q1rUB?T`$6>`_9CcK9cpcIR*R-- zSdX)GjQyYw@#Wos6OpXU0gdV226M=onUsgY~^>W*hX~ zS)T|@8OZbL0-%_fhthvg_QuwdQ_JI5SN^;Y022z7g+C@_?6{jE%%za!^ldaUyG6Md z`ZNNmnib+^`Ce!5CZlkrdUd_=&hs-{+F)qw)S$a^t~0KU={jF_9-dUqw>D+^`;p;8 z?s~6pXe(}Q605T1^<-Wa0()7RJYO%YxC%ltS>=~wXmUFAT1z(#^1jDUC893l2JV-` z!@Slz{`9p&64LzKPMNlZ3ZJXY{it(*GhDugM21g?+xyBjCeYNj@f^}Ff#=Afog4J4 z9D~KCiz{TIDp3N=aTE63zW4S<9eR--w*9}~B8X5f<*tephg+h&`moex;pM`dG%pL?{6q<03E;IKl;`E#;_7P0sI(#?m5E1 z*S_<;W8arjnb~4!8PWDaN9}8=LaI zcN+5I8Z3qLpJ>0^;uK=b&=10bo&YtDNlgZhzs16pVl4aR#h{M0~8f^MDZKHTPtlYM2SR~!+Bp86RX5!0~SpTQ1awkcUISu4h_BMm$ISF*c zL;pPMnLhP36MVPdQmzzP#_%6ymG*Gl?rLkgEdSaO_)4<1X+ScC^>R*$$0v){E@(a zn7k?igql~R_fo1RZwAmSkLI+BXIMhEbz(^>c=nL8ZF@DOgs!-I+OB=T?y6lgLJLIb zlOlu|M>!*klWRSc6=VuZeiA?eT+yPlqhs@wNXOD+({t@$W6Fx1J%LSJ&0Bj*Y;Be# z%nKJnsji8Rqa%;<^uB*`*AYB;KPHlk64~#8G^7_8>zCpdOA8it!X=>1_*hwn#@5`I zH<(p#b~UMIzX~RG+EgCy)7h_vFSWqezTrLwD-f#{XwiKj+di#0=VaLN#X)L~V~yA{ zhi4}Zan#++QCZ@fZ9ICAW#-a9HkkiemeG($BIU&`wz9&pobpy`R(3yb6gZo{Q}25R zsZAA6ZC7Eawn6kQkUWO=-m~EkJI9`ediLMw8ZFBM8hpyTtOc{@ zp|5?&EV>S_-Db>vsRM0+OR`J0@GhJXeu$V(#JhO%Sga40gVv?7LEuZvwm&J*Fi$)m zguUjh?KxQ6s1xRb1N0d53C|ZlnPv?nUBW(8C~9(lu8Udwdb-meaP;^pT3DdD4H+cs z(uA@C4|xf>#y9;xo+sa?En!XM$Ab}hHC7ZA`jq|;IBHVV*VYO6i0oc@N5Y$d^v32G zq6(hnuPslW{yq6C3PJ)3uR+9^^kf#xS*uZb&7v*onJtn10(}kWE}@ejLNPEF8A@qM zsKy^Y_1hFq(^WkG{;!xXU!N+=F6QdZ$;`cFJ?`nnwX@j{xxAR;?{OQFJ@`|}?;3pm zV%Gi3feXi~MU=&Ipn-sbt2n#~u~M(*;I6g}H|==8uIf)_viP-!%~)O=Mv)0OMItt< zcV@Sj3m8~3UR{oOa)OT(YkH0LPd)0}mLsCS?@2J^VYGNrGUV1dVAQK{x++qKO^>9r zoQtjAP!i^7rqHJ*;!c$Lr|s+@o ztjg}?OaMJ|v3gZ6^AY4E|5Z6hb@|%EYa5j*nlq#9H+HWWR6y?SRFkFpt_L1p?9Q95 z1u2*EJwJa}tO>?c+W(C=-}v-cgtHR$wQ#ks95xh`kKj{!zwA-|82sLUcx{Nc#>6sV z9YPjD4UOf4)e8RK?G~&QZMRam^p{^W{RjD7Rfnb@ZEtUNByxIY^c5ARt>27dH0TFK zWe8W*mM9mvBopgrd6$$kGa_Ex4F5|b7pr0}7{r7q>Y18j503{v#;A6`Mn;SpbdeRS z2wg0;EH2EdoTlv;J{8|Q$T@f~b2+47xciSKn;Avrgh#h+W z`A5*1tCm+SGqExf4;EVXdo&E!LX%PT5IO)^OAooff; zh5h^uCu3|G!(Qv^JJd6KM4Bsjk!Eepm=<6uZX)^jfD>Ow>yvyUB3N!KrhyFIK8I z>U`J|inT!y@I3aQ3wK0s8~dOe=Um=IJjNv{k&vW?_=0DvlY15V@v86FE4Xdv0Q*@I zL|DLVe;QMVcNC;yXobEi>AY*9KXK(+^Pp28&!}PKDNj3TANZSB^jMOu~ ztV9nBEl;X^!0z`wjzqDfhCl}0j89KH`Z0)C?aI$F+$kcjjEbN@23vtHP8vy6lXR0U zsi=qCbB7~dtse;O%OVX;!OE`Ld-hhyA)yOH^9v>_pr0?&|JfC1^f|=)$x|W@YE`G> z#_W0^OIng*d7i%r($5iz&ULLq>j3;4?@$tZ?;gPGIH(ngnMA`mo1h8DDoI#p=cQ}L zgKMvrO!*;Ny()u=i7Jb({U*=S1rz{?<<7e*FoM}!eJac>%&E%~HBcts*|b(U@N91Q5V7#|)ikomg$-HoVd65(X zrYvQK60I`BeeNa8U)pnR1V|Dt@2~Fe;r3=e8e{?+$ghbDvYERcmEG41ruDnyd7?<+ zhk%6{cpFM&l7TahzfCekN3;=rvt9S8{2XTcM9Wg0332#yqbg$eR6{Q}qeUyz7pun?Dd*L^sZ_F3Ae7pC z-|?!9?z8#J)js}Mnu^4iLMZE7b<19+xsXj0vwR1Gb{rsBx~pF*RGr@>KIA$5a?#8+ zPbG^IP7Y*f^JE=JmSZ@s8tV#ps{}IiSeqLMDpe7;7{8?DvD7n~Xmb^?Zi4cm-Beuv zYW04_ZpmfKBH1txAaE9T49<~EY}{Qd2&Il$6rDQJzg7?-{uH(Uv2;d#+oj;irzb@7 zP?w_qUI!>7v7X(xarmRntL?gZJlx@AWx!8}i!|p%SYA;7^;6yo^LR3ys~db6BqcoS z7p7JorkuZobktPrK#nt(y7YaJwn2v(utoGW%OJ+%K!s5^kvv{|cU9*W=;(U-;+=m& zLn_63Q45TC(lix+rH?+W9f((tkj0@>=GJSsvv&evH%9Bjh|D5>MUzG9yBP^M;Tr(J z=lt?%K%TeV zqH-Q!F> zMPt0?ALO6yz*f^9H+a8Zk3Z=NG`eCfXCgqB3R5Ob?d&}Y&N-()a4pLIglE^=vKz#C4BM%FNi_e*%fn*bCbaTa<*d0>?iTu05G^Fe7Afb~QLR5<@gbF*Fvrt;| zJNn-`dz|x*=WRc3;ETzhx50@(vJ4|VFRZR&)7kyv`LEX8snzVZGtbp826Ym}I0F80 zc=I}<9!?Aj#q5Sr=0%70c=|SE8Rk#M!1Ak*olxthq;VJ$0{J_pa;rF)?U+PUwJJ?#&l0SaC~RCZtuUmRC=jcS>LI zCjn}f1gwmD6>w@?9|;7|pq8_9z!7}+)$tA9`{lEiVPzjG0Uu@N5`Is8*xLzPro{4l zs*Z_EOzoxQZuSC!&KzvDKHJnytd!t<;ehoH4`?)LHL*3(2_Q>eQJg!blS7P+tgX%mxc7h5KW?Ka(?_4fU=v6&#NhPiF8zNik2C^(0tTyd&u6uv=*)-LZA)ujP8MC>s4Nh=cr_bo zd3$v7Z4%^$=bi8}lPu>rOsnP^c%uVOTjmxi{wZc~>QkswGVg@%)ND0nx?A z=NGR?IQ__1_a4knNJO;K5he)(QB%Gre@&X3o_eiXEF}yiC~w}T!~Rsni9W2OfARVd z#K1@+0A|FVrC$enR)_NkB*zSwW*4iqw%Nq9O1-!rvzy33N}x=A3M;@k-VU^)hp{IB zVdvi5=xpwib+7|H&ej=PvcYH4Ax@M#F8|y) zYL=UU1G?Zgul%yvay?;*jPo;?a%_mEei#OKegxxV8?Zf>Y<))C+Zsq2JQ}W1X8r1- ziJ5Ujp@-ort$BvL zygWn_t~a^v!Vpph_x#l>R)IW{?wKnFTrz694&>nY5g5i`Q{vaa*H`pr%O%LXh;|{o80C z^ex1=!-`Xsi^RQF5!nBlUAT>P1N%b!<3~D<9n;b@R{-;{uz-Fm^7>i3Q`Aevxoi0m6Pi9qw-!%Z{uT$)d3K z*ywkkBZ7MwaBXR({=3B=t64;|W~~`74GYX>!H=RF3n6KiP7ChZBRx+#u?q}1RRPe*M+W*G{bB}0eFw?*N~uBB_n3BjDy)!`fe)-m5G^fw7Q61-$>Fa za{y|$rZ<>SVKv_y!VGt*n%5ofFTE7UeZh0&q$m5PnVGybUKp?|2HYq8Zy#uMQuv5^ zUPxmS8Ye-Sg6fyDibUI=2PDHIS^$k9qIEEbk)j%&c`eZQ#fA7Nz_t+}FhYo!O}!*Z zHpnp~E}d$l4rPd~us1>`0yqCUNeOgXl1%Llv>~fuqFNDW&b!E;@~>Wl=tWxz@$4|a zBW-BibX^X)Mg%%G^Mk48-gnq%9xsC+r9QNGvj`W+o^KBNi-`QTSHWS35QWnUV*jbB zIg&8WR#d+`m6g5(qvXI8Jw-yXV*hehOP)u=mq708;+C3#sZLy{{?k*V0yn;+ma-Pf z@~RI-Z+C5Ge-3){c;CLDLu4FctDawrX%8pR!l(_;qEI6cj?MBM!AvL+9xwC8~(R{#}OdCep~(YTNDzMSAw`DjC-U@!ro)!ZyG@2nb;woR<*O z`wI+~PySImPJZ|OEGGt&oN0kO!8NZX$fQ>N!%*6*7=v&>^AmB`li?Wrj_$Y2M!XL z$ow$H4NE3*Wj&T{yV5EN!Hy8FUb2XqNG(6JaHjs#M;kt(e~8)uU6HDxPb#PsfD>p&Qp`jL=xi>DL#YuRdMZaB@9_<#va=L8NMsd4`fJZ z2|;5~-pK;%w`t7$q@b8?PdU!={j>9hPy}K9Rdl_JD8>E$Z3hhpE@ic@SS=~C;&8{% zQ)_|N&1wO?DQ}e;}b zr(xDOsY><~eb0Lxj3ButC-e;O`$110Dgc5)ElFj9Ea*B@5X~q9KWScPBFgU<55_~v z8(L3FN0kuqgu9dHy*Wp*eVDT`ZT%`?m`Gyy#$=(McDpr|6Ym+fNamhb2+0gsF=Z~L zxC%KyNx7b3dcA=R6Wq__F(~xMs8fn83{qS#O+5bw!X`|grkz~qJ7vE2^Wp~gx;A%v zAfsH^O7wK7Eq+OPOu+k+3ofc`mN_yJUI-cL0(ADe*r4{?M9IFQTcm7CzQj0uj%TnA z;yd~z(_kjeM`-o$8v$N;PtVgLm&Eg;M8pbyJ3c@D$-C`oOjy)ja~*IEh!*x-_YP7_ zZAv|352^GB73Zb;@@sswOkPd~gy##MGpL<^qcnt_*w0y&v^e`|3nBH)I$B>LRAd3m za_Wd0*kqL9-*2X@XbF%02W3XWSXb8#-jPpw_9F^-`!x)?Wj6jSzO`Po(>n@4c?^>{ z=RsdQSt&SeZpgHbdm2P-vWogYKAwqvr374{E;hc>Ek-w^xD$$#Vhz|RQpxYc+u|*~ zHps$?cSGUw2~lve&fbXK!saU%<3DAHYS8kcTZ%5-EuVXu2F=GS_-|NP5Y2{^M-iU3 z%<9zKl5MTm-5qh;Up-VLW`Zab-4PLM8n~A(ecG!y3Pk?9?whwg_7mM3{ctUd zUsn~c?|HN|4kyY*`BQnz^jJ0?q^4)6uA(KA0Rh_*+e*_;8{;KzI1hNf80RL} zv?Zk9HT$zQx%MPFZuVF!op~OxQut(jcm(Gi!^Rj2h`kq6xkHx$Lf8CNE6^YtmNws| z3WtfqzxT>Ah^sT;ZM*k&uZY5N8mY(sH{rUC-3l5YY@^8<&PCAG_{g+`!>iL z{yf2g5BzL*?^iyPg0T=H#F`2>RStUgJlSx0@&_9fS+#Q(JuyzIDDW4f=>nNPFJs}0?;8~1(k zALL&2rn{}fD~r<%h&tSf{?4xtZv*RSV`$1if(TzfKvJu9j6an10fVS-_u$Rhet`RN zk$n}!ix$8#qSSv-G($&ZCEFm7M9&_=_Zksr4r7xzURsvNkjwA*4;%s3d_|CBpJqQ! zB8w7qN7bY(K!`9Chx>#-maEaG7|a9xsPVa}bYe+^)95;Ek*sJ;d>zNQ#@Q-hx04li zwv>sHG`QFVZBUiPd{>7(ziHd|zsbiWv(~+?0SzWnxw$z{6h0;NIQtpL$icR9WY1=6jR*|P_+twf2ls>01L+3vS!rF>* z*>#i5)-Fg)5zks(#AsO1m#LR~pz}U5&)l}lk1MWKi?+)-zh!C|MBgy&+AO+IPe**V zTAMqq@A6Ygje^q)o=!buf$p@P`HDwdi8#+rVPBesKQ;R9f_^7F^e|Zm@9F!ticwI< zzi{hpF1r*N#kpGlFrv%&r7!0C+R10Y<#|$fEmcG1V|p^JR$areE&9ktM$;$s+@ zU4CfCoJfpZg@cU{P4r2KL~io%jh)k@z5Du8&|FWULY1ltM$Q=`hMQFqPMkW9o)ejm zD~5q;-QZp4Th%_QDw(5I^Qr$q&rWQnhgjWbIM4!DC-dbNiHAq!KOrZRXJ*UNth{+c ze8A{`s-cN==9bJlqk(xd|3_C-IjoT;aP2ZYUdE^lL?R+>tgky=zc-wyp7Nw4j0<2- zr5jk+`Y8tz7H~yME)8$mjS^9_y-HFJAq<2HBiqpbfe;6U&<0HT{-HzbYU~ zi#CCUfEjel%Pl*CG|U7)lD;#WKcHs?Dea;Jdv%bcpkBPMh&UY$e9=<+XH)e6V9b`Q z;APklISS}O<8ZsGXwKEaE7;5B$4yS8*Q{0nD&2&`pjG*f@B724ur`&J7Pzm%2&B$i z0FNmb;hU{U%0DDO`EcGc0*_2o@C}!IkLvu?V+rFVo<@2B0y$10zC*MRbugm5rY2@+ z@A8pL9pZJ%xv)N$+iN8u!Pg00y9|y$5wwkwTA#yds8R{_kTV=Wu!buve1(%$T}mFw zhm0EbgZTiyTem<-Out^3z9UjpRM*i9DvjZJy(G#U`w7C5o7IZcVXRh`%S}Y(ETdptPhj4wvS-S0|^)g%SPy1y&^20o>=c{QZ%% zN`9JvxZI|q8c=GFcUklc?>ytu(^A)sKlK^H0G&FCO^~XchYoP~FhMuYEswl|x$d9x zDbpOvJy9oSPmtHhtQHM+k_=Ve%nkV;%Vfas&W&zcmUFcvV~^%u$ARO_^S+{1kkxuE z(GSVM&Mp7EUgkQ}q2rqG5wO%`W#onOru*&uE-Yny@Q!i)M8?8LhC}}>M}1Y!8$}am zWeHwRA(tY3kE$Tot{zDe-V&Mc{*hvYkw>(%{RbuOI6RyGV019O4!h@Ys+0Bs*<#NG zVa9kpAT{9S&|c*Qg+B)J(A4B~({}M&o zzl)eXJiA$Ij3JjyW5oIq3Q)mN$X%ba1q&^H*_N$7TZvLliA)ZE-{5&exqI7S3&rlm z5mLuI8QQHuWk7)&<-l`$jpu8R>9rB~`hDtFhEXm?ihPnVz*t_sWIRq@k2CBmw}ivy3sRW!b17)d7KOx6(9xRFCn)&NKL> zzNb7q6ndo|HbiN~SI)IVcB=X%x*>g+V>sovhkg9aBx5A?48b~_=NVW22f41elF^C_ z!+sLdhIE$ve!hr~3a4uU4_0sqR7XubP?e7NT~X-o!`&|lU1pSEn&p7>15??!<@z2u zmW(;s`Z}>i(4qi%nXOug*D(hH7s@}>;ua~y{z*U157Dj=AB0nA+LU}U!;H7)Hj`N+{5SJwQe)w;ObT22Tu~E5?T~V_%X{X z547*5c3FTW5z%oKyj%@xDJC=s6>$q_@6Y{S|FKj*%`UzuR`wFKY&p6Q++^eAPMx}; z{vX7vcIs(;O5M=0*!E;uhFT5r3)J@RjDphHwVTWrm14C6mDWszQ*lUB#3blN*_^2l z!eTM}3bLY^?c6hm)8Wr>*Xn)16t+Wqr*TRCrgA*Y$H35|P&Di5h3KwZm)a;%Kv~VR z3xCNj!J5%<`kC2}D#cRP=u6>3|30=zh%alc*}`Y1LOJF+g!_H(wMB~d80yv>m_fJY zL@M?GWPvZ{jj@<(&QS80q)of~q@w>}d5M^vmgf(3nE$iG``Wx$!9DY>#i!!yt*fT! zO!nk$pxQKaZuFt<$UhQ0d#9OyNKSuL( zvbH+l_7)SC{LZR!C(qqaa=kGZV<$rY{+uk~d^vDAJf74SIAZ+>9l^OIvTFz~#^%Mw z)6{`ogZ>#*51}DuLxT~!6hIaQ5@ppA^K2CV8~Lxc(bx;%xHPFjHuf(aZL)5um-Tn zIX}BQROsa0w3<--5^1kbb58u>0pPe%gd(xZYu$gJEu2|qi- z;vb*$(Vg$8vP(AG?tx5sS}tob@>0+qxYdb=hEZPcBvW|nw^XPdyQb=}S`2IC4$&uv zLF;31=tFk2XdlkDulhh8b`}%0oTp_SA@TZJf{mJ-Al z+2I(TL|CKAGW%~jgjxvY|InBT)#emUf!~hGHy*VgBg|G-~11H_%x$i>qI(rhrBjBLQk}T|vtIp5lW=7paag?;J@Kg@hF;A)HM znDqJL>%PAyOY`8Zv_&&tv&LLbTpt({+CB&&F0k2r7bEG_+-u*rAsuXJIA!MnkR}IV z(T&~1QsvYu<<`|}S>ta#YEj4+wNwI%L9dn0fdPBD$IR=TxSDtG_OO9|%lt*mTA-D0 z5#D4kw2bawrdW?#SsYxMr8JvAdXs`;CJQc3tOyJde)9S)zo#(s<9FHW1+issmCN)Mq`7iidl4dM%fe1E#T-epU7YF z&1Sg99?o$Aov z>UGJ>hi?zUrx+(nr|at%yBmxAeuKS00Yj+2m~RB-nmx(QfymN_nFG0aCDe23CbTP9 zq#3$l8Ea#355fl^Nu$$!h*MHU`ddUq1Rw^bkF^08$X}9vPN`n~v!4>#0P`7URMUMw zEpBQOzqW~+`vkh$8A$>+UtFJ5)g5Mhn%(RU#k8DkSLVNAkR+csYtFDlNdQSArUebn z(7ElRDNC~({P(7@*0JcPX|^4|sC;yoUtDUK>mlOB;6ej6&FA7#;E- z8xhK5e18;dtQDSrO97M^qFIn8pfGbs?X-5$rLG8&b}(7Ni`;B8qi`Y}M zI~_@xuB_2lg*N|Z$u{Yoa$c`LoQil-Ad!)_Y(flVcw5|4B5cC-%Ds3kcm6tHy4fYO zVhqOiD;8a0MVTw=(2w(X)_bqpe#!1fyflU+1CTWfTzFiyhzZLWssk*-VF^k94m3*4 zi)HV!4VFKNm?cX`o|NDyZmGJhoGGy>!UdXiP#3Z{Wp@6qL{&JC8lpRTAY91eV5P3t zANwQ@ZHY}3C?sP})#NDCxk_sJVsFZ4pyPJS`h_ing-GC*+u+x*$lnep=9?gzCqoHWZ|^}vPE*L4QtCD_eG7h zNAk~7qGh{b`*hA*CL`T9!gYO_t37d=6WY&ME`i#Pu@}qNub|73J!Nyf!iCTy36^D7 z1%IH~$2EA_;|hU@G6Y z%dl2lDFW6!>PWw=nKX%B`Ks>0<-3xUmFwULQ3O2&uYS2!d*L3jqAdLI+1YWq;FR>q z9T)wej1~FOE8h^NCBGRi3YKr-X^y6IcLJdJ>-NUM(~Hxe_WX9OAPbI~+a&VN=GBp2 zHT&}_dkkoejCNTD%`}}a#?D$%yRZjA57qhfYRi6_Zh%&cM)_QLvDqob3n9A2;AdJ` z4*NU*d0Yt{RL&UO4VCz}Y70+&8oGmZf+P@?=YsCse!kQ)fZ_np4>R3C}Zx$gdo3&cn>a z4NlU<{ny#He}I1HyCe({O&Tp#SUmgHV9_~-1TGE&V9-X=NeFGU`G zj`lN2eJ|~ZgQ~z=<0)?_;^}XQ#r@;6QVDl-&tbUFkmfaR-r0nJzx5q~dWWdOB z4vA^5qV;aFJvi7ZA0TSxemn$AZ~k=eo9c~ZRamu>UGb=viDSa3={2a0NA)l?-79Xy zQDwVbHLQ2C=_=SvG7Zu|VQ%*1OD01tc&HYeA-ITYn+Me6I=%2;EZ`RpQr2XcJmT=2 z`FFi`O*AbqA?X(?+l{a)p0}^s+qLYmuBtxTGwtjw?sml zW~kGSDR_PvG=e6=u%2nbNdGn7>*5vIOMmb3%EsJZ+WCZMS}1biX?pjtKqW6-eBEga z{Z5Y8w_~jue2h9$U%U-0;SpGPH9nK6PFFroX1#@AWq|%@*mF$or5LX1$w! zy-pwC8pG~A1gm1Qu==7|&XD!C)RfOu;AB3`#)*XZvCo%Pty*)wPS z@=L;EOB*m_}ktb6kRa`RNT&ZKvB8zipYkW5mIK0#0aXZCuBvNde{$ z8eVv|1w%T5(DqEZ(|i7BsZk7#Q`J-kl*e$c&zrA~Q*z=>U@ZfnpGZ1m)MUp*HAvED zxE3!O2o(vYZ2K@rd(Rf80{h*uSC1gxqY0*SEay-Z+1Uw_pB~^7Ns?=CeraX=>cGE4 zPL3QiV*Td`Q(8EnlC2}AoSc;!tm6+a+LJU3HPMEZ9TDuLJS zMB59h!Z9Sa*QOd|i$qDkn%VUC^l`dN&R4#(JRVK9cMU-gh;@-MO=2O5FBz}+IZRU2 zTu!2WRl;y5l0<%d#dEYcnU&2l+*;0Tck(0nx8&2Q_3Zs)kS(0`(m|_j%8M$Z+k0JE zv0O#pZ9&Q|$2`}`xQ;PvSq9e&SalZzNhe{CmNCOZWJHzix{&kP!JF+{0BUI=qwg%; zyS!V!U*+A>GheUn!S?v-m45Ife}lc7?u+bZ1n^wnXXmvVB}Fly8Y%C)%HJ)@pZTKr zZjh+Iol*TthH7)1EGYVt7&y0yxTqk8*E6=q#T6MjO6ZvE>_dOtU~js^RNK<{eobfx zD5}hS$(g%$Vg*LE7BbvCD7jS)}a9z7KQ0SPkCMl32 z?NSXw9Wz^{E+l~*@#hhC*f}4KpVHh&?_^_0FHiZAFvtB{$moUrQ(mdCrLcL1AHC4$ zKnAB~HS+MFfY$Fx|7^)hynX=8G~_Z+jn^V1GBM$({+bO8hQ-%;R5`YN56>FD*6mbA zWj0b>&^joWX7N|8nMh&wcutizO4GRbz!+Vk1M^eP#_9~Tz@|Jvo3@p2z}o4v!Sk)6b*?eH)3 zjmqFlGu?1dFTTUpRHhjDb5)#Nj%*u*JTSl7d_aejoJ5jz+!u;muj7?~u@MRk1-|dh zyy+x<-#ELSvv;R>E&c%u9KZbY9ee9&_-fi|aV_CpHL?|S#x-n>q`g@o8)g>rcPkq( zJJYm!m0uTYJM^>ni+8Jlx0A5y8$Y!|#4&Mq6!Xjo2akJ$XlsJKd1f$_ ztZKp1aY*6}?#QVRjA-)9ywL=x&$0Bi*f(>nmH2WwQp>{RJ2JxOz3yZFn*zwSkeGzb z9Ff%ourj--b?;jm`ZVup5RUh_m{D=JT7+R901_`LX-4)9#kYP8?)G+S#Oodb*Z zg0sx;biG7uM_L%t`lfuyqWq!K1Jw2WtVufm7g~f8bePNq(&9-mDJvaIVNx`1$NqK* zUNsi8hv)4}nHuc0s9CqxnPg~Cv>titNCNU+wY);v;43vkr;9;#=>qS}PXfveRLotz zrf9)$Z^F7+1UhjKYMYf>v1%Y{h8&iWFOZBn3hEbLM*v?p=p=IG2ZF(JeEQ{2BNl$& zY8&8M(~fGkOvmr24z7`G&<43Sm4Y7qZgVn3gs3d6QIN7jIQ!4O`(_dkFXc@yzgR^k zxJ|I9Y&}L%P1Zqk-hK5O4*DRy)C0M3WE|J*byUH!Jw;j^5D_Q|iuO4CTW_x^rVP0{ zQ5_v9W`gbq!y~u(%~bP#>W#>5Ov0;}g$1+1BZveB1xzD0WafJhaM*3&TP@&LpXJrj8@YowcyVxKz-8FyKn z7$zud>=i=wD5X7If9lJ&tOzp*%&c^l z*|)qJIUZG=o0wDAdRU^S>*ku`%NEH>BPfkpmASN9yK~ZgUWi}gaLeUO7*5s7^xyWD zKIy2}$ohd1NPFOD+CM2Lqmtx6mu6wp8j#_K8r@IpKEy658+OPrYL)V4sBz^#EaICi zvBNVBGvViT`^Q!-!jc3DyEX%HCEnnm{g<+*EFA}3FHFU%o{KHKB0RX}wdQBbSZ?oA zZ~;?%Y|2ja>o)lNqxs;~dACHP`@7^iwSxVA+MnF94T?r>ixhRgLW)p*Z*5P{%yI1#sb#-Unv}&a^&$2vG zUjron9fo&T_ijeHu`J^^)qh>CEZ+s!u{Qf5Y zI5+Rz!R^f}kbBF=S~Hz!@eJnxVGUM$lQ8^^QZn;NVY))z54$3VN4Gtd{F%;abYOjn zfIah=+hjfGZ)aN88f9ZZhV^HZ)@Kgrl*p<2Xc)9aQn3neICCaOu?+naB+v_0ecGu% zvIJO;qHCP(!o=s%or>X<>$XfwM)9rTqwKQ1a^pqA2V0e#iM8nqHo;IW@r)WAyR#NI zo{(TOCw=HiXasx4_d@zHP_j~4uS zM`L2g6?>cg&1lrYsm9fV1ueOqjhjt(l>9pHTv{~rn&<-TZl@zQOq^`P7EL0?uf2_qX_3x5F#3p>+x_Nh z3qKeMJ&54u6i5w;4Z34ztQ(XtETg6S9WSuGDjuxTbITbOBXq~U;PMyfVS34ri=P*p zV5iCz>pi-+@0{7g`u<&Y+Nn%--d*W*TGf9*mTXvjr&$j%GN2-ijEp-ogA{!vG{b;q z`SLEWxeB%9q-eOk3AJH^cU2+du}Wqy89V$jjv){3|FUU~blgFb{Y%?pS=fZGe*#8M zV*f{_*Ki`p=mXO!%h^IL>(xFe3H`tflPrG^;Q$WSI7{B%rP=+$(B4UHT{l>nI4E|Z!E+_}tfuQ5~ukdvDSjrFngrp9-DiRXs5)H=#Ve9!md@Z0`7564OM3%zW0@br`)d@mC9+q0}AmUgBl=_s8i1=KIzR2=Nh zMKHI8Z4-!(AL(Zr3nd;`k{`!&9)dsEVMii{D7$;^M*H%2+Ct%57}QO(@XmYIZ(R$5i|GRktY)89U` z1?Hx{$=ski`N*N%AUVVpb-!k#ncBAb0BTNJZ+L~8E8*BD#L%yf&nceGZhb&jW(su7 zX)wG4-EJ$_rPmr?ok~z@FwB(akL~KQn$Wx+jdq?Umw$G?hK+ms2lu^3i0|W<-Dfv! zUt6N-P*1t-IRkrCzE^e^54z&v9lfU{7~v~c{L``5>sAYQ&%bj+#l^DQX&~C5yY=W+ zQl`4J7LV-df$eoG2h6q*KV5g|klQ(ytXzEOMI7m1C0(L^Sk28^0xg6D&NeE46K0L= ziSUN!orQf>`C_TYoZ6i06b6I__N9}p^JK?8p59+Gt2ht(e5nXc z@O)nP3>6AWscA)$vj6<1_wp0+E zRg!zK`7oK!05-?TmXoF)_1D*fGu0SAV1{xd$C88E&OIL&BNRG_wW(%E2o&2Ejo!Jj zXrrEqcwH%*UG`?1;0&pp`bpwioXq86%#!%hLic=(}Pp*-g#sylaR`6AYtT&XhXsGw?Ru#0#^~Nm?%4cOPJVC>3JT4JcPtfe) z=fUjC6k6&j9ymJogC$Y*Uu0;ovRQ@g1@a_H6b^=KLQNOaG2*L~;e(&J-vZ50oW1pBcPV zI!mHoKz`>xB~cG4{(ext%QX~gp(ANPJL6&sS?|?D_6-@i9FEShJ)#+ym-l0MnJz)3Xs-$p{r!jUEzhs`Aqh_hm8-NB0P)8C}$FYL6s35X(5} zRVH#zC;ipZPkI?PEt%*JwHH8mdB$w_AlQw6o2WW_%;F+lPESqt1Yr=Uj@4C3<`RN8Z0mkWR$PK% zEz$GeYq`B*P?-nwv}sN8(Bsz=+%b*1=$_2)hq1PEqgA~K8F$_3YVJ!dL+#(cey6;3 zK1@78>Xe87$Xy3!UKBv;Z3dg#V|pY`x3}e5?2LB-DcsbGEsuAwkeglHHNG!N_ zw?eeZ4YZkBJ(7dsF&jnn(^~2Pn!kLv+=qts_7>h+7dxx^SlUdjaRJ9JR+0J!$-jzt zsfnpLL6Qj!nSlG9wzQzLdk=Zofemwzbvz-y9+DXkOW@~K-MS)WE|rv!{QKvZ6o?q~ zRX@nka!8|=MdDm5d&e72wUf9dK_$gta$?p&;U-t%l25M>7K1i}M=bt##CrC!76E)U zSWz&D;E)L5rY;@g`G?TfYd4FbGqO0e5GP29nhyO%;+eJBm~PZ}BNb%}p(=Sbhapac zz$519+0~e1&8rbjw0!0%5DIII^p}bv1{($j3Ocr=zsN6+8GEk)t#ldqRF34kp3F#KzVZRY8RVgXa^H6iqRWZ`}yOJ zp~Axv_RX~$u{^LrF<6n6wE3FO7VP(3F`4{Qaj~E^Y1_3*}_mq(jD6ePg=Rr}8P@pTH63 zXz~!L4c2s~mS?LQv)7S1caYVkIf-wtb>oz`cmcU`3o{#O zsGmMuYVM0fTF%-lf|P2)u8Ux!}2);aFxlqXKQdtcGvLhv2P^^Vy9SHBL1-381lx-)7 z#b%N1@(0h&3JySOK*F=o437eV8z1{a9D_$xTk7BexW&%kBlpuGoqbCuVH1{+OmS|5d|nOsJxfe1>U+4m zl((2n@%V`0(+HaFvO3u)3PQMGslV909seY0_6iLSnHxaLntF9CNw9SR zvy|U>?BUJPLCh*cC4_x(Uqq&M%RMF&a6;Qk)omHXQ}a~L=M=%Xo`&r4qq-&nA3HWmyMaoS^tOwd#u?aNu_NMA|036^HwZnzQ_q<`#2DM|kF3Bk zYt#tiN1IffphL4QG(&T?BgZAKlgHPJ(o}f=wQ6){jJShFJ#{>=Fl=?`tv`_M;XIF2 z$SD5z0}Xwyjh?1(*y(tETk#;+lJ@rA zLhh$OJK}!YwL5lu!0k`vUqR5L+P60l)%Yu^Z6GKe9}X0hb;d@06qe*njGT~o_A%$N zWFtZTwFr*K*EMdt@m_om0_U9~N+)vC&1=Ow2V;F}fSSo8lc0n&==8s`@=1P&wnT6o zqi#D-!s3jX&9pvYC;#~s+2&_6fx7FR2i&3Yv&6rjMn||@-$#JbEYz(oVQcF&>rYzg zP3OMc#0bF0a7K-88j`z-wqAf?!;iT>6b9rpkk2xec?WX0 zg!|y`*jc!nX!|rEU(<~*LdZRS+Bw>hsr*XQAiHlswV1cPMd3e$jJ7C$(FUU<2EEFL z@Ncv_Lrhg=Cw;03jz|dBGo^#RAk;p}W*xl;iVG#8Xt!SbLJ{G-{oz#sz316`8ixPF z0-GQ2jP$YKs6L8!J%U%!Ak2ZYe{ARPOwds5$(;)Q^B|RxFL>b9o6a<)B10C>Im>IZ z-aY~^tCYXTR2n0nELe^+K|lpA9U@0XMlA6)5RN;NfcBtmdT%I<`^?$hnfA0gzbl>V zYx&j7z7TO0>f=N_zry-#3iYtcF`NXm8;h$;&@enOOP+VEbH;BG3INqjc#A3PEX~C7 zX;?+9+vyPhzigO)vaMJ6+V_3gl*IRvsAHuIcxk{1wyXTDk)2uk`ZAg3cXD%MJVGGN zN&fs^3rvJO8oeJmAwpJ>qWa?q4GEZ9{W-0X)eXx~Vh-t^CbRu(uYRcc(${D5nyc}?d$6CIU@8}F8EfmA7woAa2jXESLt%zP ztceCOu+A>R7TPsMERR2C98pydncY&?zlMwXG_qU4Oqm`hJ9PCr6rj3QOq&^PX>(JmWWOE< z9iV9{FiNk``{G&d`jiY4gU~KcAcub1A(HN+48^6VY8dxqBWA7tH^l$KGT!XSitFR5 zuea26+Lwm&?V!PP$&?JrZxrRON%?c#xzd`vMWo6HN&9`Zuy-`7 zUagQpsK~huwXFbKNFO0;?m~cijWBN!UJ2dNiHQwkOvx-=V${-jQzhlY2R!6H~Jup~cAfqZFwVwz_jEItXgDx}*PKuMs zNHcn+M3=Oc+uQQxk) zHB=bY96txX4b)8A@W{1Qrgxmx@lOf0rj##?;^YW{tj%Yrg}bKuRxZt2xb#A zWrLtq|DG?zdCpupWr*w@(5$FLLEkshJzYXKZvv9YR3ZbDO~H<L6UFIACy3edYgQwPn(8giGV9Tlh; zhGyG+G*XaKyQMK%kPh`dP6Zg#@IR?`wEV(TpC-Pdaqa4(osg(+XzE6Vv4;;Ry&DU%kD>2% z!3LLeRCP&0Zev0nB!tPjmFtyWK&7bai8FF=IkW3$Y+K0Gky?DY~dBhSn)ZdhhmV`Vec-}B4p?IlrFQYoJ6 zZU=w%_2`7#B}+$0EV07KrR?M&(5$i5!=|7wm$+7fI#m8$nhKe=-S={Ailf<3<|3OZ zQt%qX+8tVeaI;7@Su}l=oPXdTNt}5E>D!Mf2EK~pr2n9oCWn&mKIw{Iy(CQuFvZm{ zX%}XAr7?mtuo7FsyhJ&FH$_GXtjHFViHn$u$dpdYznj) z!CTYu)vnMx4GBpxy}lRc(Ws=k;*0>Nka|3tn)xZ0D^h49MAbU%fo^|s7P!MQ>GMUM-b#?s7EN2``1XhWCD-Z;Sidp7Pjl4_WdN2Bq%{*~h6$&D#;uTa<< zujiUA(8zqC;GIj8h}_PN%P+4tJ!Eey=ai`v5hIiE%mp^@$9e9)VKCwwVhG=*7usrj z5CGOr*F3U{!)_qXxisW$FR^L`Dya*&WTCk}F|TNI#hLbhRi?OqF{Xz68Oa@^GN=V& z2HGKIWxD5$XU!HXgX(`eIPE(8z?U`G!!9e5j6OoJ_TgGC)%m44Q2IlZVHX$V3jE-B zhqv{*-L_co=E~G)X!?D?hF!2%JWpg^kH+zF)ISAP(D&mf0 z4Sf*9WOD28l{W=$#6@C4=XEGUR|)m<<+#Pc=ZdYA=*a2^V*%GS3-oEl;L&H6{4bOft1L%E!I&Ieh}Epwc0*&i`jOnO&T z=#-OBlf*O5L6YodEQ#t(*%+gtXnIeqX|o}z8{fmIYZqv-#Ap5q%{@br7CgyHTi9=J zb7;8i*ji5a>dO(D9LoZ0wLBR*vkT(#nV#7BETHJ3-khmQhUV~emf8x6M6WLBjW)J( zNUWs93|pQ4nx>4+U$@h`3ODg0zdhw!rQYPy&$z#ImIw4(!S<}l9Cp1mfsdn_H!J;D zk)8|Gii&dkFCSJyZfhV|Mo|&nHYgV{wP$g?m)aGKD*PjoAc#rP%RX6$Ob7|??Cd?= zElZ6ZXOOnF^%L~9Oh)(F`{%aTIS@|OSQk_K2sxXTFrvl7E?h_f~=E~2It4XJ9 zC~@I>d}aRm%(nAMS|!9DaYIvgv9|+aS}0ENuOe`|iC+HQM~uVq-P51Qs6riT_NmWv zgtcK=bJN18M5}+0;BWH|ESgH!)a@-xXR6vVbz6Mc3Px7nej|v%sZlZmozDcg-Qitdg@F;RiL)$ry) z5=$Z$^CpF8z0~3+tZ&Bi5nep$kvE?4&U@S6@t@TU53dT-ybvn$sVZ15-y^zQ3RU-# zB$V{lY)ziIo!Raprzw0C6EiSs4#>htT6-9or8DIk;ED0K?DA$Gta+g#enCAqBsmRE9i3Yc=5y(*>VH{$60SV@p$(v^ZY=YTv4h zsX#}P5`k)V=j#Jcg02|WK&WOwEVW=YH8plp_1`92@zX2`j&oo5HLm^*qsmUHWcOY zUd!ca$Dw@3K5!O?!geyXT=k&=CTd3G7)VQE3vV}U^G1@XWIgAwWZE9f%?6DcQmG{4!;GS zaJsEXEn#=MbeIt9C;LAmoZ?IbWze!wk~TVyp)J=ujg8$3*hgZ?V%|_M!%v7RPwv*t zSovkrqeUSa=+=Zl95%%0yM)#k^T14HCj3z@iU9Ooj@a#%#6u3@?9Ilx4V%jSw5tzw zNxNtg3z{>P+mP1igpo?BH~L5Y1*$h0I5>$9#6sxwP?`RtEAY)w!jt+H-Ggve-P8=R z;s2mpAObEsrQDJIVq@g^$CS&T8rR}CpDW1(HeXJ)LiSy)mt0MJF;I?X>GjD}k#nU4 zo6l4#APGL-W7dM3$Q7_<^WU^H-QAYCKv)7Ykz{0?2^L~Y=84!XxlK|qrz`i(ngRn6 zexI2jD-)C7Z*SBda0*k*6T0riHu$ju%NZ3tKb$z$ zDIaGj3M^4yS55*Ymi9^Lc2$~o^w>_z? z@0=T^)Q_4bFY9kiH{g${xILjLZg-H}ORJ)p4L$KeVOCun4%4Hong_>10MGj9<6YTh zoKTFRd@5<1cvswFNq0I3JKsH?UtU)1K274N8x4x%25iyn zm~=rtA?4<2d8byt@5{fR#T#b0jJ6wk&@VMU!tY)U2Ney&mhaX{?J(3*cl-({=OfGL zP?Rs8dxG5-{JRQ&f@A)0S)sh)cX_5ngNG|7i%NgmpqfMzYQ~>^eUNQ?F!a}akz{}7 z2tn~cz(Evs5y~l(yS{hCI%4@*TxKPv>0$7V9?;DrtD-$LhR>93?Q&bMESUOXSFq*3 z9z?@>3Z4$LD4PwzT$sVJw(@^=w&`TTk+6SwSbSDJ1>61D#-=-cOz+_(8PSnt>WORN*hVplB9Hsp|8~lFc{}j zm~%54XMc~``&xVAi0U7_=zeFf3^+-c?VngoRt%DG(ec&;Z@rG^xwOBX)TodyC_ZF2 zR!;NE6L17Ea^IMR%`-#JQC=-X6#ytFa5VPx?XrL7m`3hq4jA2VbZ44Io_z=3-$xG( z?u$oGxtT;tS9mgCugiqiKql{UL3+)F!Qj%h?|*9N!e^4wjh=H&QUk6PeHm@@qdOTR z>W70A9E!)|;X~3?>T;)lz{2hW#fj$~AC`okPd3l(lD=_L8d9NH7O_xW^ZD>Q z)%E^M&!jzlOb7WB5M|(nX0hV8+LI@c_D(a$KPMNett@^P{002$RheD&@Bg~{QnPt7 zKl$K+*o6Z4oU)-CT?V|vtdTPwmMe1$)}teEJx@+%N_UslT^x_pTf4PE%y5wWK7#FT zJ{58>q=H#95hr))hrtEL4!21KapWNz%Bazi8YN&-D)RQNH>|n8@&~)b=ZL`Tw~dqR7x(?5K$GVMd2+Sx zhHavzv%EF&a9@1oVn4z$W)%w`4A{}0xWoU$N&IQ8(aQKShPz~ZH6Bf_>Cx!IbbC2S zZVBX(kfHpF8Uv==GtCL+Z3(T|2jP0WyLui#|oP0k}t9qv(pgn+CI7F z)Y^=B=EcOGT0oP7z;83^K_)X9)k$%Pv;b<2R8`EJ;y!IWP7KOreEo`6C%^(sQBQB^ zuQtnAtA&Cah2z)XvhA1(ffSz3Df8vb50{kycKii*R42g#7uI1u$(no|7947J}y zuF!&&u;_1-Vr0UFAC-!#);P6;w}+L78jl<`9GIi4L3 z)-*(H!*zqR=8i$1JVL)b2rewucXa1NX2iPnIi+GT&rcA^zwP{3ZXAv*Evl)E<6Qrs zqCRfe9YgwtzNobCs1S_LRUX>3KkmJ#l*1<|k3ak;eG09uB)a=QjaM;>y3W=fHnKjJ z9$JhgFMIT9ba4yCS8q*VlQMKIO4PKF{~!BA(F(8McUHhpzxciS3NCXrUBZ4BhiD+gf@U=361p$EO<$|8H7rt$%2dl=nhlv4{5HpTe-tM? zm%PS#t?Z%e%B`!RgPTR(d~3tXf2!f=I#kL>^a)46>ed(AJlFnp?78cWM^l#TIQc{J ztW~*wIV(3F<=Wta$8a|>?nvm^4hS4wIRw2lTA{YAT?vvG(f99z;6&|CAtbMS3N+c3 zqc_*M-7NW!*lJ@p?(S9kqZzD0Ngp`-VkUAAEOqtfZ2p79Z}dFWe1Ih@G=;65h**DB z{dG~;zb&OFfew&Dj40$xs7+itq2MFS9_(kDXWHAcdrxH1^sih{NdwtpJy^3S2i({l zmO-ndFW?A$=HSY#g;MDiJ^VfEiO*RRN=Tv)ohyVMvmp5e)i;pAW_Pr8?%Y9MgmOhM z2TH;cl{W~lIxF+V+C0`;OJ7A$;`kRGt3`t>=;<`|dzLv*r%*Id9Zg5L#1OsAlFA<+ z_pGrSjhHgRJE=IzW1mHiK%wR`IV%ZooXIlYzl0Q#xsy~wm73b_qX{(lpt6Ue>o|d|C-Yp^ zJT=%5{oq)lH>~2hBeOgxYXAC^(=9l zu$CI3xe&9{s>yzo5o)P;kLM$liq)K6izcpGZaEvv-DhwF<`yH9hcpaMVZ_t#f^uQ; zx#}mSeXOv1GUO3ht+U$W&6}KB4JCUs%w`46112EHu?;yJme|Ls$ELG=x%v1 zje`mRjpXO*aVHHq`HWMBaN|rVRk3h?^pfXlRMv6{()IF{VzT2{K=+_pmMdyNI|dVc z*oYck6+17sNT3yylPc?onG*zR`U5YUE%TBx@*A2V zqrwn&qXS8n5{-%KjywaJZ98Em5*u?q8FaUHRq~~Krit7U>V5)baIZ4xsa!ck3+mNG z>iT;a!#95&D3cIJe{K{ExI=lc8Gsi^n|Akz@wk%=)MR%>_PY7>KV-Sed5gC*)~_&PLD z>t}>O;KQ8bp!fD5J^jB=A%FoXsQHj{@QS zmHPk)JUMPOsc-C7>SDFiX&7jG^>c48qGS$ArE2ZCW5drrwW+tvbzSqG1$Ys`{O8olS?2|z1zgRCi(K0A+DIWTRv2}gcY#(Hn)>5bx#T}}_n zzPQvOofL0C5(9lzeA`%xNOy{b=-9!K6nc5A!`f7IHF7UOG|=OWL>K8UB4#QVc83kA zTR>?_txkUm6QZgN?uh9ln2K8sj_zz>Nq_eNB||7PCT$InciKTB1KD7JJ)C2G5Tx3ZIL=nyu>-gcL6y+} z!)*S4pSb>f_RgbYHo`a$04|!s^|Gtp*g6pz4Tgo9zfX?eCI|DXH5`E-@6tZ+oK$y^ zgF)47q_l=Jl(bwPYaOHp3Ba7r_QD^>&ZQmgkGM3ZLM;Q}1J_|ms}G^?1CZnDMXz4& z5b8fQwZAQ`aq zlYV#cz0!hMRQ%BZDo4PsV%tQ%o@aGSOp$z=za{ZdLDdW`cgSx zW)iav;(;BwbtYuDnE$@*@A7(|_3`AY^4Q~1eM&Vf1*>1KY-X?OGjAqpL9dcF%HOX* zcz{P!Kul;5il9U@2o{S1E~q-FTg9{TKC(>_yebS zq3Y@i90zlvf6t#zo@$r}L4Qfg*ZPGl*-OK1%&7X*rMa-MUJ~Usm|RVAt|p{1;Sje< z%f+?$>p2$*c2IvkqF{?Vd1!ofzOG|g_{0sRxZn5#j*}d^s1s5|)~FH!#!CYgf z({qzs9mHDIwL(=o2x|6Wh8J_Uy-+`DKUMoCr8M^iN)Qb{>Oq)yK?b~^NSUidoibX9 zYa=o&Pm_A^aSV>YcW<3HX6t#mOH{=%&NtdPeU@L%VHbgNPW#siIYoBoop9j{&aHmg z+!Z+gn>Py5 zPO2wf=t*8{P#_ftFW~qBy7DIc^2krjP0V*(L@a}N9TLH%z=$%F`&xxKuY3MqiRff5 zj|Y!~3e@g`X)ec;FLj985!1%Y}{{73ze*&y5Hq0U;qWI8M0D*7fOct2^`(E$M4hDkmOHbpJuU z@1SYemEz1%bLonujE;0=zAa1r%?ArTuMdaaFM%8^^2e^e9A>j;yPOyCQU@u3KAuB_ z4sP{;!e2iv*BzbE;V*?+xBcrM#}?fx?JorqK>*%}y~)HYVnIvW zd*A|d{P-4smQgSle*+Id9M?WyI!F=Qv2Zwx-~w-q=p_8Z5Q*dpMCpB_ipBA#PT(g_y8^Z_P`*AN|4*Hl)_rXBLEoJVZ z5daJmbm3WT7(A4VxMDbC$QPYD_6`iAjhf`avgG(qZ#A(+{-p$VXmQr%B+y1KD!gF7f>B$Cx`eU5^NR$MpK-=TAn1a<)H*MGlpXp}bs>1)+q+425VP@X z8m`(YU=TPRg@wesga~UNVX~dElcx8s-`nWrQ4XHt{eWXs6jm0xMH{DT-hPYrBEO<`4ce~^^@3di=LGZ@ z0@yVVg^%CWsBxe|>#L}H;ur;7jdmLU=+0%jGs;j6If8vKuHg|kT2z#5rkKi(VEYWT zhOp(+b3&0{(tz}>^@pUMtEh0nQf7p??wjyz{{1Qmd&_=(}Z{B&$`3l04ss zKue26*SFoy@=WgmoaxCY5q-{;W|gUvDxv4uLi(d|#(YN?p{R`QKUIl{=f0`k4bR3e zK(fT{4>(CgBul3mFS0M4H*$J zO`F;qL1sHC(4BrY^;VnJGD9KWmP9tK#>Aj}Z`&-ylvIY{Oohs zv1d^2SWt1>ii#@NTHb*ANmEYOAiI{;X59pI#^j#nT7tJvp22U&ouK+`R+uf;CgtCr zu$AcA+D!39hSeHZg6#3e3BYp!x6}`etA(M8pH*S%P)^$1D*=h9&RF4=f_c zi$CNOjb2@QldBN~%n^25feO@*9y-Y{E;ujI>T+FBymvB9{?8}NY?(XR9r9W!o_$RJ zEy2(09k|a76xe3GL$E#mmN^pN%Ma*_%)T7Sj7-mrna*h@yzK;=XeEmA-+iICm-bfN zpYddiYvxyU+{mb7c|(@=jYK26G;vX_0RQ0msPXNJgq@`mIc9yCYmh_pZ5DEJd7Pn# z6Qb2T{pvE;aC#%?lJjwV^2I+RwSVK=XNmFw1s673s&dW&$e8wNdd~6WbXVhFiL#PC zzYf_dk}_(VViD)ya*<+fx^uU#PArSIFRMk#4Sm6v#!*yt73h0Ex?2?6zf0zraVU1 z-F+{mx!v7xc2)rz=KvBtpLpyx@!@hoDA#Wl^ctE=1N87@XikNpu%Uz88os|rcqTgX zP|Q(jo1kx_@WzieCHlv)v~`l*e(G&}K?FKT&Sgi3@%)vS$_29rfoOZvZGfUfOh`&f zDR10S-csO50T_$iPasnv=y_wCF?2#xlqtRo31jE$Su|hj(ZnAo- z6@ND;H9KpxMgI1DYcB?Bzqt^5hEd#uLz9cQt{W)e)Y|&FlMu*hat`)}E(WzT{QEoi zyRH!~+_KxIstCYpI{nWN>ac92Bj%daxScjsX~@i{!4)!AGQ{%|jC8`|la0?#vNEG+ zlHm$HtFnG+d8OG{iUeiU1L4qKepixwFKbcBoa`1qa1p{s54noQ-Z%f6&G3+Zy5W9u z_5ur?X~{*X7rGGLG8@@=(mF#_7A($)5<=Mx4Z;un4C`?bJM`Mi*{|)bp z)AdW~R8$3T-TF2GT?2ZRWW(=sZG?$yi3@wcHvaNqkNGLs_c6ZL3Z)%akx{X|srAL8 zN2lR7e|ad_{^`rx^pGQK`#zeWmx0 zeOI3iAfMABVL*_y+s<^bdq(rYyXaQ@YSe5%>g$r-(d{~%c07p1tlvmFeZ6MTx<^a- z_N~Fs*&}-g^sMo=yO`)~3RF&>J|OZRBx@S&#TNM?W!HajFweU{sM{FHL=RNZ3T{|&pcQ+RZm5i~|8JT5<) ze{G3J4rZCQqoekLuHk&LX2b#p96}I>ptAI{O6H-~u!JlmbF@`j;}2B{#xET*XC(Mm z@a-Pp@j<*>XttkAe$-~VPXoQ;*=}jiFs2aSX&}>3wXWAh#i@9Nrk86(ff{o;5lhjc z(4ZZ%)1wK@x_dZ{;=@a6-2hG1qE-brsgv!|4u}0?{bU=lTq+?=23w8Bl6frSscm|B zI|gnvr1^Fepq?IO5Izmh3(k+lbPu4z4t?JidSAIAt(I?_t!DHK+_@Q)b>B-HmF9GA zA?h`$mKT1>;Upw7Ern1~(-bYMa8(=$iJuvh3LO@_w4)HeqjIR>4O^(4Uc#WrB3$p6 zU|ZUia&2}^0t~qU4x%!hYd4?icVDCIN98I<$`gE*3zNRSA>cs|c&JG;Wt-HU=&N3A z4pE8^-9B#?#BpfIgHi}%&P7rxo|V??iArVe%`22z5`DXtgZF=(!Y*RI2srOx)V(@R z|I}<6l=ALg6&E50g(Wb?%jr+$D2+}9OB8&YsPn>Z+;e8I{5ef7E2Cu6MK3!dT0C!u zy5B1Ga92fKBx+-tB3XZWbaC&n`k!aIF=LxY_H%Wfhw8u##mPg{G+0v{`a1W|r%7kJ zomGBEhHOG8_)%@+g7LEM1S?GRe8h*e+ns9Rs%~G1-f)%r67~-wNQF*>hH_o5$&&@y zk^##9)fTKe{w0dsLHEEWRXuAIQZ*{%lvR9!;NJinj!WwB7_(@~%_FgWU+g$B@|*z5 zmin2jYA$r(cWZ9ar7}*k%DQR*ryRQZLij^mP#7}Qa=fCpzlvGOq6KzMC$0!7^SkpC z-g-qe}DRx_lL#kga27=IhopIx@>EpJ6D(jWsMAk7)?XL zeN3MQ9f{BwFbx_~9-P)VC((?5R(#f%wrBk#zgMEmR+RkCXdu5gr>@~>H^ zjo2n(& z6s(I+eSZnM{z!n$ybU(AkgvZ-s|?B$&H(yGrUR=5v7H{7ktv@?B7uz{AuFi? z#(A_{v3wYhI4r&cl;#B56(u#d*p*LOxds7R)?4^cG<|BP+C$B`Et!CG&J7As5aOo? z-8~^z;Vrq~q$zFr!AP*q>lCKm34ITKD6J&?Uf>&?Fnw)lB>gNN=Y99tPV=OUjBGps z*oOJJt)pV+{)3{5jp(XZY26b*ytsq6oO=4=jdPP@`ROcBL1(Q0M#b;DGXH6HM*O%E zT@P`64(7wJAG-98q<%5rh7#G__k%1n%>#s4*T`V-tVo4cM%?*gXvQ*5RGk+g+Q>#R z(gUMLfbmRNSI_Ry^Gm5OwlW>|uKEJF4NbnfQD*m>sf(AG{qcjyA`5@fa2lphv4VitqMsMcw4s&Oh+Nq+`igXrODzL33JZ;$)p%QVX)Q=I;T zrmPO8OmB#&V13S=xJQ@|OjFVHqbnE0i5OVrlr#jvpp*xRS*X%(1I`107W`CNfT2hh z(37)Xq%|?L`8?DJN}XeEWPkUy^OEKgWI`CmBM>}@RtRHl+;n+-RQarY?6!s7ChnXN zp#YOYIp?B8>os--#RiZ1z_ndujXE^ZIbt%R3K1yw5D%F-ZUgXPHk_>)AEeA$JFvYr zdO(H*W#HFuR0U|W#9^*iH2UWhVo@|e{QTfJdO~{`jQRhErj$&lbe=lgki%Mve^WkiEqsfWRCr8yu`J^#G~eMq!rfUAD`gPSry2nXJutT(;R1hF0( zIm3g(n-b1JR2yIz{>UoH9AuwnX7oD181s^HIvSvE?!Zx^qu2g})_qe{Xd_UH(2fCC zj7)zGGF3tBK3Qdul@{W$(HH`E$9Hk^=9t>%>>T&CQJ*`^;4YamEIHgKPs^VdqTH6^L#1nH zfYQrcv&$1c+<|I1r_i64rs^FgcFyBe6OcpO-CywnE)BB1yu=*O?ehh~r&%Nm*u^)q z^s66Ny#5Ld<$r52POeY1GB-{>{RAHTXX86rn&&n3%a}7b8Q?}r18>}XvD$~@aO9J) z#?vdpuZx5`S!fjkTZo`o%S0;~smMwwUoK--V_Oel08k784@w(*$_;sf&n3R$4w;N& z!=7K$etDUHFc15@oro=>$kq60O(LDOlwbtV}T-N*r%~T+A zi%~w&he2ogNz2_z^+9fWE0!;4YQDogXVb86DXEpWJ!R{V$c@{8<9>u1*2xi@Sfrg# zdfKKxA@v|S46OKVC6P>nnx1ZN2o;u30~nT_R9ue}#{sp_quGysXZ}&A08?mMO%26T zOs!5cBC!XIKGq%~yFNRs4BWCAfQr~y?LBYhg>nO4u;50+T+DWMXOA@i0=apx?4`z@ zidI4r7%F}TSc}*5>&_+Q`2yCBMDP)~-5t?mE)(Oy~ z?hG9n`Sj)ITp)|UN)t7r<~rn+pH7lsWy=U*4W95$TOAAe<6^pbIsmyi2rfCEe~f21 zOa8d@y)M=LwS3QSvXBbUW2U&=(stPBgg!KUV>qk6(47D0-GdH87_Buhf7(QBa|mqH zZ$0@oO_QePPJ>3d^L-~p_A?uo?c(Ff3a0-u0-U_9BeV+gE2Ie2X#F(o_KW!X>6~?+X){L#kHw?X2 zc~a9W{pEyGh%g6j7hu8>*^Vqf=i5JP7LaJWmQr87D?sWFZwRFGT3Np3sw~LpWX#4! zgVejdZsT~$?CZr)&#ExzNXT76<=#1Z=dAboO3?Q{~->SgqKXhWfQ4*8y6{(?nf`B-C z;%Ck*#>I8$bNv`w`}Wo5+gQgO1!$gV{LmUI0mzb!w-)3>l=2Nn% zb`C}1ZTZEf08ar22DX3bbB#%rs=dQ9^G*X+n3Eko$Vj&3;L^J+n={I$#&fGg^*C?X zi*9^VFYATwtP>jYZ%anQ#}p?;0vgmL>VnrZf-zqyWvyDfGAGlA2B};@50UEPzt0~3 zIHb_OKue)9(rz_e(wz~U(?^@e3;)N_b@)^D|NnEZaqX3HZ`S2rdt}9xbrG%+*Ph8< z*?ZPSDdO5fHxaT$c2bn=l~l5l8A2h_`u^UZ-yguk^*Hx+Ua#lmZk(ktL|?z&pwLHZ zi$>jz*G02}GnuS*HJNwFSq~rOq57wGfvvtdX^h`7NlqMw2X|qmsd^5wwuF>McDx;M z6s@wat;S)@1hf)1`>r7ZpVZ>K%hRnMAvjPrT)3^)wJ92(ESX_mI3z8<)n|RTP6g;g8i$~U^!ZlH)Gd! z%j2za2PDhxdzQ;vQNZYAKpxoOnh{^q2Yt{KGUY8wd0w)`vp5*s^so>KXlUYr(u;wR z>59Z*yy-iq!&YFc>VNp*Dn#W8Nu?>=_j_G5mLnVE^#h11ueL36!^xdH(=GZdB+uy0G{`#a_~zOHeV@{6Ocd68 zkNlFvH~$V~IcPcL4e8LGxa1}e#zHB?iC3XnXKqrJ69u&tAZR&L7tgh)a_J^D%`yMA z=QHaT&9o3A6w)4thXMk!fK$+4>R6NlLRgP&UGJ~TiE zm>Jl@M1zBUeS;n{f^};lD!mN7I1ve5qUjH;N&%N)9b`AXgU>IAEb*9!$NbWAQKg#mL zyx49iloef)2W|B(=_55uzq2#^;wJPvztt8|j80TQ2BwsSc`AJ@>T1YDow7l&z6$FHUA zn?ihn(C=&z0dGbX2s`3J4Iu*gegpWHbeT+VIh&R%zg9~t_Mca|$VplO3N`zEo~HE* zZa|?+38u)6c%ERmW>uM-bw$fU99orxmVa3!ahAZu(xAd0e3X;g3H&I{W|Zj>)RpB9 zxloR_EHOw|1#aF%I6FtdeKP&vV%5#?_P)!DR33+TCh3rN6CtW9r@wwN)|Ov;6BhUffdro? z;B^!2iXpr_?4Xvyp$2?e=r>IU1^Jkuqwo zyJ@N)LMb_dVLe{4@@;BF*W_pmOIxIu@!kSiA3hga+m+WIPiQ)j>H0&SK$QmotAkb1 zt=Xe8g?`rmPKlfm#cX6ofsG;!^Gfkd9!iYACds2%7hBu=m`;NRa%z7?4^h)Pg?(CT zFM~U}nTZM92dR7M?j?%F@j2Hk5TOdR3q;zmCZ02={A>{pN z4vH@RJSs=5Wkn%ZO3FM>jn_YkK}X;_l3Mtg_VXvs-4$_rWauJ@gn@?Al62@%P-Go< z$n6_tx3L5I+fbW7xp|f-T`E+!d?7ZqtsQ(E zZ(TFW#>GMp_TKLCLiWhN?_HMv2VF=U_Ia5+gdB%7#ZLkwwXTF&?;jR^G_xT@^CoI+2c~`$b*N&suQ7cR=zjP6sfLh9s)7S<3Q!h3kc%yI>$Ty_xGp*3P+{= zN3*Pcl+wJ2GHWsLTfoNeqsDEYD#|O=8*M*J;srqqp@#AOZ4_CiT3{Giv+nTOY-yvE zMmm-Csh@=s%kw=%o}2$&!JX$Cit`~3F*TRyzZr+vC;6JWL$?)MTOtG*~ukjrP zM$6uuu*<6+u#amAJ6WfXCW8;`hxiNgi9j??qdFZ@E&OosY=s}A3htNn7x=!tAdRhB z1t4IMiv*7TQd+U#J!VE!xb%U%%!w_CCPzQJy^hJtG;?V4WQ)s-Q4vOuVq!t(8{}~! ztSsG|S(p`P?Oiysoo&$Bglz%is(|qPvu{Vp$SR-JRsRwnMz+b5L2#FYBw!AfP+JOX zmC^l;AStH0Nf4L{pX!*4l%r$F{gC`xUntAdYz*P-14COXg_gohalel1!U$Cc3Y4Vp zH(ftzh*QZ={8WbIXx|j)3>T>6j6|NMFmPNqg2InSC1ra(*wZ=1@|BL;JDxrz|N5+* zu2ZqLy1@xe9_6KNYze}yNCm=%50OLEU_ZPP$q(NF3H-m2dR)4IUYk)PTyz z96fC(mdBemfgE;CJBrCou~}$ajl&m3YDGNSvSR|C|A#tkwoLp+4_y5Id<(R%$mo@( z@;CrSS2e=*$UqO?HLCT+g*O*kV>zWKnxBOhOcFsi*Yv)UV3((SKJHot)vT3;bfx{4{(?KvjKcc~NIOwUW`bS1J?+pxqzv~$L+8!31 zrxjM!ApEE^i{iLHA+%YB0c);>yR@d?X|#H9tI?W6MUJ?1#WD==po*i$(5 zVGsQLBfs2zEIGs(_-ao>8}+Xh8aFZK96t0t>(QyxK|ROxz3GR*5v;Om z{xpiUU0wQ3Ks;qF?XsKPiE_20U;nN2h*AdO;d7c+8KDYMRsn8rB<9tdN-Ajo-#&i( zVv#0sv5$FWpQJGot1;*i6Q#)2nB zdJXSk6EM5Xns@EI>9IHW@%Nh0?<*Ed^`KW_M+(`vN?nARh@;xF*oK??@+Dz9SAhnp zG3oR~Z;&QgAf$xGfbXH*S=y9%q1f9m|YsVTAf&4BR<*I5ED)ONHIObWM z!h)ziS52aO9HIXPmBw027#PE9xHtBiKuXzD&$y|f%Ta7Z7%&Fxf7(@k zWU_%Pl-k{P{3Ds()Q92#Y^^A8Vf-kCxhGSd3lk>S^F;l4B+^qZmpg8qt9qIR)JnuU zoV@aYXm;Mi0<*j^0Csy7t9xl2(S!!LD})1`STDk8A;t*$c*?e$=V4$~Pdr_DcHXZ=Nwt4vqUgQ^W3y$+u4? zpKgO9uFnNZB1HL}-kqQCt{Sf8J-q?Hs7j0(0t(>E)%ILd89C6a^yObx!G^zwQK?H6 zfL1uSh?BpCNcT>(PrFH^@y{{^Mbr$hJnnS=?xg;1efSiw!0HFZjj&H~E>L+K*Cgua zCzuA)K>A-W-lHtBp)rs6w|U%{PF5izTn!^X!QPSt!3*@<(r!L^RR&`G*|~{?_&`3E zSj$8wsvJ4R;Zk~=aO%vvs3A-b9?Rxgju@R>90VJyv2ZHeh4usu-!!^ST%b0xlx~5N z!uRA@3{@$0GriWsgEd$jitL?$?f0&f-ww>k<0pj! z)BHZnGzg`vz3m9pF(P#hy@rs8HP@R|RYnE@d!1V1tB*T4p_})D)H=7uaS}|?=*QPU zzW;`}ph9LL*>LMejk~*O*gJBxMocMfkx>Sz05!0`lZGFQ+@oTWlk==DMy2xuM=un46)&*G_tojhkBpK3gj)X zaZG5P!Lu-Ts|6eQS(p_YUKIz578rs0jO?=lQtaUqgtwKwnlqLp@59z-1qn@{>dI%m24Fc zPcz_hu)?fMO|gV3n%TH#m4yMtN^=Bycb5=|N)+pwtb{aGn}u?$k^FQTe%nsZ4KuW_ zJHI3e{nqb&g?+GN{I*$iCA$+hv(~`1Gt@5-34%-x{IG`_KLv-S% zN#q4phIA|P4Fgf%F054EALVq>U-&OyKRFbj{iCtoa}UpT-oH(+9x_J~$Ziepz*&T( zn{yEAAIj$#zyRb$UkM*#o_s_sdBEr1SR00ewO=8{_5GE^{#{rIZW--8{=QNFHALRr9m%(L0sZfE>JxhOMkz%bv!(KhM1Z>BJA3(Jqsjq zy8`F^4ba%N)u}f!PuGh&;Qa#35Dz4?oY9<04B6L}s<3Fs6O|>@nTjDdmA#jz^@MGF z)x{mQ?EahcG}=^UrF6oFN#9ndXQgql03GcMO+QZ(fMPgy6;e--So$Q zt)zZC45Tu|)zH{)e#mF154^tbw7O^GS?8aahN>=aCKEXO6?yZhC<1wqq0+RzT)*Ic z!0>DNfLjHXNF!8?jm9PDe5rg(0a;Hylpa?zIAI!gQ%_Y3SV4$xPg{dyB1Yq^7cC3} zcGZVcoN&FMxl;IxN&N8LpI4gLO~?WyP{Q06<_%VEEb<;gRQf#qvUmh4L;y$L2QHy)Nu0 zn2j-s?$|n(XO_a|exb3ZIOKCp)AplQjzUZf+iqZo+C2L-PcstJ30B{ zR3LjDf3yCx%^m<$dX?ymB(6e|(7oC9Yo_Mp5@e1I7(T9VJ(*y>yhn=|PhB&U>55w}^>h)YD4NGrTzs`lC9j zqX)6jWmp#~N_jWV5k8E0TT}=rC}#Pzpf}2mUTh!P6#3@`L)J?X zG|$Uo==WNfN>F_&U;}V-DNh=DaSqVzm^j=xFKT#go zn7$(GWfCdS3JrNZOC{=C2P>{WSQT{0mkl_Fl~Q-Me8dKMJp1`@;My+% zy4o6bl@7zJjLqmh1@FBys7HEZ8?{SZ*^rIg+)uZ-Y`ngBapBJ!St}xShaSnIlj)V9 zijl_?S4%Vx8h)Jt%AS%O5d8haOYxphGiaEgF~&KY;qGon^KWIchw+(_&H(TRPcP>^ zV=Unxpr<*bMvA}d#*p(#d_uDoCM_){hDHvm?pw^|xM#^xp^zXMaa?gY;#zk9Y)9%H zWwn(@*;yb(WqsDcOX%)@^rlpcwDH-&!)nlxEBk56O6q#u#mKLzM4-C=&+T*X?w|uCYnC0DBz_|&H#@smv$d(yqTZ0^`qK-!c(&Hb zgqUMHf+2gTS!s>Z@`~u8<)#d{&SS_XjwxKjti}07XV2q9Eiu65R4Jh#y!(3y$E7Zm z(PWu^OBM8=UN*iYb@?1qS)TVnwT^tAIQmz~42-)Bd9_bca49*FocHsrqi<25a=nb$ z-mHKX@T^|)aen{%Yi@J4D9T1-uHj0H`)DTBh|?xOkQpY0t}>=R44v3%VmjIXWpk}w zz>6R>pU4(D>9j%;K%HkKc)dFP=4!_>_@08vXq=VesQIA2rv!@~aq;juVIwAI`Zd%j zy$JH>;29lq{lx%9<~dzX#->~7eeqNKA0)P$gK0vl0|z*qyF;!cZ=^3p0auwQ5dCSK zv`6LJH{|+H35J%%(CA{sC&&NT)td3%uy3@)sQarzAc4IqytQgns0NN~KAM#g!Y|%C3t}!>GO;pz@^6UmvM2x{s6?oJg*EUo$qqI3yeL zRGAkwXnF~D{l%Hy0lZ| z$-s?U$NWlJcXfRT{chBvj&Z15t=9{5QPIRz+Ykzrv2u9xLG8((Rw3;}!W~X*H(JgB zr&|{TE`^OC(|LwmSjz+D*hd9Qhm@p{MA{-e6!#>@`)^lSm=F_b=?7~jIMn< zPpkVu#LTZfdE?aT$1;l#oZn$j%B#T8obq%RmeAuzZ^i40H}Rl${q$G~66VoUpwR5J zmzMaR^@w4aamwo2PPhaaaMa)BP>zc4bSn9~9o^L2LCyC6g@iUDsW#!v`6!K>B2By% z@lEl;A6H1yM%`tnaRFOB9w(C`bUL%QtkfXH(l11D;h+P8Rl?hc_WZp z#(a;IUwM~wazdL}CN});(!-22oNIRp0bC_YenY0Kq`0^lqa`=jRGc%5I7GEB2|uWi z>~L=V6Ih@EQDYF_IAMTz0yYSB?+5RT4we%sfy>}!X}Xog0IyQtEc+t?S<{3hZnMHk zoE#9fO9XC{VwuX9Wedsj&^*7kr1Bt5yJ?a3j*J_ap%9{<4I%2IvYjMkh zj1<6Y9zormL5~!FT3w}hrUZi@?q_bdc8*lJGVsuDmM}b17je9c=Skbm%l#uJ@Xj=p zd7gh^6^h#+p+x_7$2WZZs)r3378pARmYEsoRuXIzb5Mg0CUf1=98@P#VTm~EgG=FlgiPAslh3Wf3VcR~NGpwdR3Qq6Z# z$Q3?)74IZH$N*bE3}bj9`9pnqqhw!_?qi|9YbR59Dc$%R%p-%cD#jwV0EF)kJ|g`5 z+Y$xZXbfs{)noz5x0yWQK*F*cc1WC2=|oi$my@(1fnF9+S(Lp;p$oNJ7T197+ZY+R zm=M_xR$0Y00JUU$XXG$nWK!LG=D054ckg$tC5R~zs8gteJpyjcer3FnMd`WB$iG8JQ72a)Oa3V;=bw! z*9Qe{l7xi`U6;VEL32aq)=DE6*WL^v0c0 zw(J`krkXz>14TK(ZYIv}gF*?+2D~O-MA9;gBTfk>R@}?i%)B7rWADVIy!gBUln!_y znI^2-Y3WbnW{d@X-_+Y5SbDWY5*Qjjmxny|;zH$D+za=^+bmPf5NV2*ZL*z04qOCQ zxvuS+G*=`T63v#ysRWkB*Z!`h-~3eLKKR*!7kS{(na>lX3z(#`vJ8V^=%X+q7&8Qs%$)^Z4)0@;#mr27xprJq|!aojKiZkuG*-1YX5!&q8qOob-AC* z#?i|oH-A>Yn;30VU$;!gvk#0l3b-+r7@crox3L){-YE9mT|8U=KYKZ~GiM%Tzj*5F zfLygPG{$TefIcvhHW^+clO#{6F2$Kd>$V~*YLf?rv}$I+l|DC)B4R?L_bpI%2CZzp z*XcJ8lwju*Zx5q;*XjJj)vcptrk6YuYE=gMZtvsSMg!>YPmCQVNn|op@W{`+AQrU; zGFC7d=1e4IkUv=#-#f_~Wyfxg(A8}NlW(TXVnvJ0ob*(LIoWTq_F{m=hUlpoW{xD` zAl>p%u$$*zSC8ZS#(4wg-InKcp_>K#TEAvsdLPU3^u#_ z3X~zI&HKGSYTg?M+o}>-sX-xy$Zr!j3^t_T(odR{3vZownyj5c^*lRjjKU@JLu=%I z;CC3Qf^Q8ThUY-athw~F8CW!!kkPsDjC`^CO8`oj&p*RZj%8x9rq$Gava97C^enDM z38pm0C9{kO{+N>xwp8-s>80iN*@1+n%`#O@Ak0t>*qne+;hJGLiI}(1tkQ+GRL08KB`K}Vw&;7NJ zW-cd|W~IOmT_Te{^kq|8F=G}C>9!2l#E0w!RtN^uPz?zgOp@@DUe}G)8&XL?`cba- zM7kqo@gaqSv%^;Fv+Z%hLps?5&FCfraH;N9aTjb0ZjQz$^SxF7aEMt%Mc_{FHkUG5 za$oh*is_?fDB!Rd5jGG4NqTC*m#SZJy3OC!EU$F%vVUp%;(FvDRdXqwho+^W1WJVD z@E9pFWM^uI&82LRq$Mjz0%v?TgW5DFgpLam*Hm+5wbge)mz^h_Q0~ac{capzXo!X0 zRV-*PKkjCJ%?YB(pb2G5Wrx+1oR0BU6)4uc)o9`giv0sHbv*EU8f4z1QO1XIr$#x6O9$Pvq_XSDopda==)JN_PgL34hc3W^+i>}MXH`%%gmGMOlx=ak%LsyG;1Sz}-So_P8=#Y+g_A~IX4=#hyo zf~tZ0q^TdW(b(@U+eG>e0R_zi;0WqSVZ1G~Dy_gsp={Kt4wQ)!F9a}v2qs9RWcG5P z@TvPf$)dwfHBJE&!&TTA*YBF?RHYRZBicX*eK^;r%-wR0mW)=%sV zt!GB=&JMO2E9#2Om&a#N0yX|Kep?!9bSxg&$F3slY!okFsL7#Q(T6p6=i<33bx(gV z;A#~d4EZL6)f(1t;ggG?DUom6!bVDzTS;}2w`Dr-bED>PPFeXRSQ50kOS3csIqUj^ zkxWt&|Ex=ijH$~=`C);d!{8x+F6bxRew>74N{@Q&btkHgLt1+J;^5wy!I@Yhb!D+< zhqQc)P@hKa_>?t0T5AWV4tU_ei8(jq^GJ@wR+h!>_A>OrNKh~6duxR`k1$=Pr?==F zI)1q8*3)@iX?Y+7#iTs|I8wR-jiK|*eFh9$-CQoufA%knPg3s!!~(=8x9oAe>Mo6( zykbIZT57qK9k8hNaa#{UD{pNuJ%!6%bLO1Z^&9T>l>XFzL9hYhF~9w%D_OFJ*TW+2 zS6a!tnf}jL(BP$A;UE9{$x`F6an=ijs+=DIz{udk!t)b3;hI4aHJ)W z@@%x-ne?ahND11}qxWCeo^z~v--`WoteizALfj<9M`XjGrVA4DN4<5&v$}VCd$Bo- zz_Ob6;(6!YD0E25zFA=mc{K5$*8QNA;Vuii9bwi=-My>AoJY)$ZM6*i-uJ2QE^n9~ zV;a28%e1w4pbN6jR@fFlru@>G#SI%?~Ij6#(E(2G60gOi6|43ewNyh>GFh~RyEl$GO)G$#q@3?OgBo^0B=Ur z@PUvJN6f?<&}|yfXEWb=f}l)MYd4uOCYgRFU%#Nz;u_;Ga09SGvH2ci8^}$e=FG#i zNQTePzkLO~@PRHL?Z{#B`;yBOW=>42b07=)hb84`e>zW%wTNK;7Lz^&v5qGH5OfI1 zO4xn%iJS-B4yK|?+;*U(*J?(r3H<#l4hy?02BlE{Dt6+iNNfX(=%+SF3|5#YP^Cd5 zRtQgl=>Q?JH`ZlSW9!Y{rzB|gK_z6@wXq<9qlT`v*=y=s@XVtHXdsjEZZ+Is*ix}7 zb`RC}#|N%0i(kbig;*e{-BpJtuK>kMqYmHEp!sY4^eapXEDAsmR3oOPJ7fLx+hK~6 z;9cf961SJgQ}^^uuHs&9^^yy}r>Gvrdu&q8|9Y$2TC@nPopsOtEbQ_hT29;(vAC5& z!~=6f=%e#dE}a7RFUe_MSsy|HnKriqd$%foZKRhA7bgtJh(%}3RmRK_&JKmn%0l_3 zW~v}Rs&}t~0@u*+C#nM3Y zLX1neIdBuvIOR`{KX?gxZWgmN^nr!3Cve_fY$5`?O!aohQsyPyaB;xjPV;ey!|4?_ z+#o4dAf*;XkF6CY>?z{U2uY_INr=5ow@!()vyCMsD847rqo^GRv`zRD4gWTfLXaA& zAm4YdWW>$)o7Fz`?{`DD9lOd<*o=iK8ZxFWRDN>QF3bz{tWu~j3fznrM5x!Az4*G2av>GRzqsLQJ~=vIXb z#EsH2x~iw?(YV(rpMQBNUp7Bc z`t&sI94CzC@Cj*5)BCGqwhcyYic)eKmJbHcpm`1do%4h0 zct?Y(b+O;GUr3s{mb{bsb?u*O)+$xtwFm#6LP29y;|%GegM40;99=;8Q@wk9BlVgWND`FkxA{eJ2ZSTAVNhEl#20_70@_e&<4@Rk~Z zMIZQJwzPLOG48iTZPE=6!^t2c$Usv6&4AV!Gwv^2hc%K0s8VWs*5mZsR>WsVn%E0IsNa?xKx1&=68)-YiFW+>YY`Y){xBDgsDQ)Br!&Mlo+a%;rII0 zY?|?=;>A<_y*7=iJoJIgyu1{9rkNj<6wVq>=YM-(yyr>G!>M8Vi;~!Mat|;fzr@In z=Afc6(>eAwIQ>1^kZf>Zxdk-vR`I2Ow{SocC|q+tng09_VlQQ|H60JFG-PDeh|RV9 z`%pfxh49?+$&nV=y&Gl+J*G|Zsg4-3U95$#8Rm+yQu0a0NbZ+7Hxkz>JyQ+`SNw$G zkMogU^ISQbGY`Nqt!b$!5qKZkDfYQDE^@KoYy;l4*EORuvh?tOxoaeXTnkNC9mJTXWpPlxsEvvgNdd@~wmtAXl14|W6?A76Kz(KM4<#65kvH!NJJhl@|!|2){|vpYe_49hAPoQ1VO88LLh=>>>}Mimx8zh>I5yoU?AVsuOPINfuyu&!+Yhx_6KZ-2NilHd5!{Nu%2RE=sj&kkfZ3Opf? z(!d@T)lvq-e|J1Ln}rHJT)cyN@YN+o+?sA$-vy4K+HE3r=2*~YrjKYVk&la0>e+@R zgwqou&6hv>&B`0hqMw0n_zU&-AP)FQ42W5iggnP!-}YGeZce&di{+PmzHD2UGdK(Q zcbm?+7UctHo;cA?=aN5$)kP(cwπkOi^gi8N+-jL7q$#5&Y(JB$of*83a zsJQp<+}_4RA_jsp!iM*^@a%H)a?xq27qScxJ2nk#OOwGEdD>GC(^5h6T|B2)hm@2g ziEfQ=HLZB=N{eG_{a43~`KMv8A&C?_`^@`2j(Z(MgJ3_UJn0;|S;_o$JjjRQb~Bl8 zYpuAHy&?4D_#s`noL;N{Y5OiG)S zm*>1g{a>KpD6`f8P#gKgD{8k#&(BgBNLjSuL3wdeU&-bTj zEqo<=ky2M5SNN@;g$5#I#}tHdxer{wZ|(WA+CRW`J`M77vrp}{E9alYi}TQ9TtX(o zN|+(cz`i_DA)&%){Ng}FhC&Ad&yMa zD5(OP{Ccg>3aYpKK(bT186?#FiwLimdZDUu;h4+8hUZK?Nt`bP7#0bb^fM$!vZ2OS zxsO*UVU|wdAxScEgtf5q^m*Hb&fDUiYw>9YJDs4ER%Xj!i9CkrDmg16TZGaphvEs#HbPKS7 zVENbTfmn{J#3`UbG)XeVUAsMv$SYzGBk#NludH zg7{?MB}hH1lNh?xwNAe(zL^7asZVXh{ zLaSm+CFNsnp3vUyJx(Y+@!$n0T|lKI@0yA*|)`d zlSr2^lCKf-1L3Ux$Wm&(A4AB#p$d7;6}m}&JZQy`OX;$`-bwN(t{xNX${~i%r;4dF zK=CH-V~qwQDzFd5H!r?bQ{;}smWacBR)d3^IT9S(jL(`*Fs0$m-|`Wu##?&}Ao!=< z$m;QCYQ>Rc{?y5XQp8go$FPaPzi%uI_&d#orFLyz?rjf{xH#|^8zV_!n;x$*OJ(&_07AJ!;`rE9g(cy{lo~=ayZH4Hx?6ZQCj5i``<`y7+9 z6~XZ8(ekc;`e%jEs9mdt zgiLsjDqX~gi+k>Sl5+U`jjr}M3%oRLu))Ia_6xFl#wqgG6wR(qkiv(n!9zd2_IXX4>!T9nR&X_suzZcy@ur%Bp+x)Q4hPQD_FURyaf; zLEnM;h_6M?HlUY_tDGvO?Jxz(|8V#kvYGL4jV|S5a{nLHU}9WG8PW{$gB}>=p1j3) z$c}Hb`df;+-Bpy_TvxlbIu1#9z+ChX`idsFK=#?A&%*78Ubl@ULb2H6qf+X)id2Jw z!Yf2YW4hu)t;3;YO6+eU&2NGjsQ`<{FkHy~Q5We)8sbVs$1lym@@1wM{dB~$V$7RP z|0ei(WqTgldSMo{!GT6iFND&wkh3=RyV?OqzEkZTfJzw{{hCO7^<#_N`65f7J_!JM#x3p18R^1OduFKlwce1$zTr8R@;P`kyc)&awyCo?neKb(q-?Lgno z8~6I6$kHT7z#-40m~jYh8!#+tw2IKE$(4@$Z9e-s1vDnn-4Or;l)LKoCA*?Gz>GX% zAjS_ywQBLek^F*7-f|A^$M}_*w_`20UyDvX3A9j45{EY#N8g)>5Sb0Fl~XjYkO@PJ z3R_k+l`=C{dj!{}m21qda^00lcv4Z4;y}FU8Tto zCQmuLCK|i{K|DrK+Zq06u&0_>-9N@QFR{VZrtH^T#Nkw8bAscUnbO~?k|gKFKMhT4 ze4#pD3IF=1Lf9}xY>OC_5~N^dBgV&a1no%7&3y~}ox@+j{`UN}ZRO%sKfdfi4G5e# zd58>#Abq44;%!=*ILEm*=Cwx)i3_Y?>JXhAb&7oQQ;w1ZPCEo*yI zwCruMUYF!PZ$45jEXxOWQuhAq!>Ynp6tdZQ&J5M-vbcrPO*H6;y~L#89=6PJp?S-c zZXu=xkc>}AJnpms)jTr=6TaW~;%-V??SdGNCQhh*tBX-ZZMKBf&oM}89GyiiZRo6+ zjc2>;L}zat-~X+g|4UN{-(&0(Zu=nvL@AJ{2enAokJzp=ljDkuCA1P$?W1*bz^5&E zo!WY-M=qo|ZEowbfJdJvbRM$I(P&ZiqBG{ErUYNV?>m3dVu@=aG93hnPh|4Xk2Kmf zt!vOoh+fhPc7HCqxj>|e5rvg|s;-^cwU{lvY`Tu%eID`;;LQHKyzxV(M|pdQt{RNx zh9VpfN?F|DQbiREcFGC{xGJMMX*7XFu^WJW98~}`kd`dKdK^fOQTyj7%P%c{ftO|2 ztZ&>E;rvtgcViG(vZw^i&zt%x95M%dyCdc?`ay;Ro5F-A2vBfxiA#slySA z_ZMn@2$L`z`pqU4Li8!lr{|e?qMDUR`eYuN)^t7qie@P88pwB`n-R4utxoh--mt>& zyTI8YB%*&{6lmG%-NdD1?9TO_aA#nXsnImy3zZHke*KV0lTA3}4A=Z&5$75bf)1A| zSZEZ7wfLq=*I+YR6FXFQK`SBhX0E>G2a%V8dG+_JVSzdHDIP-5Zb{Ok=~Z*i_7UrH zK?SZ!^berNMvODvd-vSrbKE;!KF|KdV%!SE{&#hWgt4`9o7NwC*0@(*T)bGx#x}-U z5KK2f!1O?xb3vN#y;D=s!DF#!E5LBQ@lsL6RvHXJ>Vk3X@!Bnyk+78N_g1uCdW%4s znLKm?Ckf9r+SrT+Gse0vHTI82JX}a9`X3aX6UgWCR~&xi!g%`|oq+QtFQag~WYZE_uaWw?Aq(!pB#8=gs`7vt#e?Nz?Ry0SsWBU| zjHgXMl%RT#uIr*`ILOXl6>F3JPAUXmb_AfdB-xO8(EgT1?>=XjmXe1H5F_SnaJh58 z?~lP9AJ^VxSE$23K`^NP$&(!-EFh@Vx=KVCzr~Tq(ni)}0ZToVe*n79za*v|rPS&~nmE1>@_i zJ-VtyyeEP>zI26zT;Qe_cUp5fs_G9@iVN2D;3I-2pKXqFV|Z9->+tPEWys)#1LSWy z+9{!Cs<~?uwXn8|TLvOD^)zAnMDo7}Mr1-SMiq&=Vg!-8s9lzi!97H=T*3zvL(4au z{a1~+CvjoC8gga zL^I>kTU`-Q;9syJs>@Yd^Uim(NMI!C4?3YuU(kzGCo}+T=1a+0_MsO%Kp&|(j|f3M zWI)Yauo!DYEwCow12aZMI_DdDC>xZ}N$F2+G;b48AZGMX6nme<4qmeN;;0 zqMUKFxm%KOR3XbUVF?;NePF*c3qq1UIrt9a`X-&lVVuhaq55X7w)MBA(9mi%AT5SD zouqZ!5&RMlJpL}@Xw3Y-Pc}gEZ$X+Zu#7t>m)<5#+Dxzl<(h)h9icubm{||TK74nL z{lo`u$A%J6{}x@PcjhDSpj`D zIRh7jA5`Fb0zDF0V0F$L%qFuVWL~+05`!kmJ26GUa>`8PfA5@`$8W!Mc`!o4mRwAw zU11EwtUJq_8sD({mc6H;3ZlQnyFD!rsu5iq4-57_&#dKCk?&W-na8-gR559Kj7^Ho zP{bf$Yx@G9Y;L$uPPsK2Dj1xV6j#%y1xuH--95Vc%1q&Vxagt+kKUaJ!1{HCl1@_n zbvEaHIAOSmprdL7IM?x>Zua1smr9N)aqsiN0{lgumhbOdJhzahT|8Mn+v#iOy<3F0 zs$ylOG}a`|cloK7_j~!S|JYh0k zumJ(tu%)_zwSlgz+!9ceg34oR3Dh`DBod+y7)aO6@yuyDUUKdf4i=5>88sV=6@#p& z=>g6(kmkD*RCzuqY+q`7!+Di4_UG)Iq~}18n8azQ-X0n7Sjtk znHRSXDrn4{c3|LwR3i(lcHtC)B>K&wd4E4;=8m za0XWWqBZG;u0*$(Wmav2CC|P-bs|mmNY-+R;c^^9EfM9cmyiKk-dM9gl9vm;dWQt3 zE4#HMdHMpnTsy=BW?=RnTCupK)n~UTaaMQn3-%WcU09c-W&pLi8}8$gk>=v^rs}!w zETNuu@r_~>7-I%}X%5`(&9nrKTE__LTC&mMf2D1Kyl!jP8`Pl$$#UeggW6!iz}Jux z&|f!O_;C@r$y=7kF1)+DWBKg5`%{M0tXJZzW&9b?xml-`OKZ5JeOnH@C7mLMSEYd| zB&os)PmEL+9#iQwq*y(nPzsbDrl;Hrc+|PW^SW4G+E4j*kgXE*J6Qe8O77K`=TYZ2 zZPs%~J8~{Mb*brtTb64%W8vahU;Ah``MzX-6}pz}8Ss!KRD$Q9ZRg=N_-tMj|FXo& zC19-?Xj;csqYB)l`=0R5rFI15;8hot(?KSlYftdSu))ISbZ{+w{T_2~IY=J1DOL%| z87N$#r-FCw?Ako+$Y@mm^7iTrCanfYyE~OqGe)*WN7gnQMx2Q;K~_(q!QQ2mv#MrC z0S}I7?l?icpp)iC*YCQlR>-9Ny9q`iUs^><#Q?BzS7eu#6H`$a{SZCS1!6$>>blTE zO`BDOSdYTo=SEkxRVgLi^~HD9aEg!f&u3K#E#X*(qA5dEU+-cf=#}}y(M&XEuC5CC zAzV9$l?YH@4m#Ad82kjR8Q}x;0UT&Gs;12?lLCyduYAX8dX6j)KbQmd#pmx1=Xo59 zy>D!hmMaq6Lr=aZ$!Xsob?UMK5Uo~tYf0IPn-LIp9Q&gGnN$l?WA>v5SQ>_b+Wk%_; zBVG8`TL5*{YDe1Fx}H<_Y5sc+CUU-wDbBVDiJpSH*`<8QxQE%N8KTV^fFv&`jzbsU ze2p+)*XX~D7c-Gv9)xFw*D1zB{_crIwqJ}wz8}L6z6X5;e;$@ukaY;Uj zHMafA7)kxQM957LR%8)*kRo!20p~9u>57zx@lrK?F9Bed_{6Yo_1r>I&v)XIy7{@{ z?J?F70#MmxJ8-x&*wJ1oq-<&oZW;}ZjQ<`VT3+YP{=xqR=5aI7Le+4tHvk){WfUIR zZu?zs*!J}D3&8vLlerHp=q2U*c8QAvSlb(7Ec3U2fROb0e;HUWZPF|0O~L!x{3EI9 zI@*&i|E+f2b-J1Z3=Abm_o_q%BH)8h^E@<=c{dO;!>Tz z86fMTHpZOBELMgb_4~cQ ze?ZGVd%r*L*ZcK)J|E8@%`DGC51bpLQkrk?ESOo`{cip{Z>AjRsm9hL zOP<&uE~#H#LA}Rc>`I{We<%o_?kozae8;n{@y&o8Ee%45lZI2pY9HXkXJF3e1z`dS z`4_nJ^#3wItMNmc#Ce*LGjcNZmI*fIYMJ)VEFfQZX>405+oQz^8T}Kx`%!0`VGA3q z378VOP{3L$9P0S_$u6Y*65EL}6bYtN=Ihl?PO)_zh7ave)zLok(!x6De*o;_gOFYK z?RnmGGcs+EYj?iJtQ>9eXhT}hR`LXt`95lP_s|0Q2jr)LTtk=AKETJJ{ruC>ajaV; z=v+4XMKL8*D4X2}@Z9&enff&|%uK_ehxuDKcT7gYk3lG@6gczza~ss5Z?=`}Xafd$ zks&V_(vsyYAM-Hdbh%TOX_<&-X1JL(K&u@X9n$QGv>cvP|q?w-!=x1Nuw$8**e~A{-+M9sIQ9)$bbY> zwfcwGyELF!vG?m7USu%#%N?M5ra>15 z-E8vpfNJdBovN$z#)=Re*lz(j)Y)MJU5>KFvXJpJDbwblyZp+r~;@@?%Kw_YP2Sh&2ln<8}V3b$|10$NYdx zb9iU;)?Tzd8&P|AJArkFAyXt5@nwBWx21nO6-&l}l!65d1BdL1tV`91o0ql86rh?7 zk)8T8I~q%e!S*%5!M~A}cu>I1G?&I60M#`Jyrpyp8K|D_>Q4(%c0?-@Iv^FLLgVRuudLhAI->y|%X#fpWL{_+^i(^8-sPRUV;Z z!_J>!*f=NEf_a5nnOKmo?9}C3A#r=AtAdPkYWAMZ)X%tqt>p-~y^%!uz_b6YRp1MhN^S(m z_H&TBs(JlB*ytrhaYLf^NR%j*!Ufb5GyIaEL~U8XD*1H_U`irT%zMbpsQFpcj z?wj$wx1$Q z@Z}*`0x5Wy%z_6Ae}Fhb9ZWWe?oeT%N3nc{!X#|@G2!o$Ts#TnKEG|AmCatiXxc1) z`rjib1+&=`VE#F0@%LvR=E3G&uck~Lv2`t-Y*7DBm7URc&XsI}N>`K?)!}~0<8?}YzAlTg0>RWhJeQ2;{ z%I1;Sm;JstSK%=Ws`>-k^KUqagWCGF>1H+$z=Y$StCMXzJq84}WKg067Tcu-7IJ>v zt54+j!R#5>`p8f}%^QIZnL?}^IFy~Ny@lNM)Ydw7P7q3aQ@73#4=ehlkp*cO+H1?4A?S{0*7pHxm zvuZbDPI4E?J7CY^>L2QV_2a@OBnoeuiK-{yju@~a-Z7-lhP9Yl-TzFQK`LB2{tzwR z9ikjEEWC`lqz+vO-;ku($bjF?%4!@R<_FUE9@eSD6?{VcLA!)K}6R1r|=!N7?SIm`NZy)dh zA8Z5|40{JI^yKEacXcJtYi~-)=+`K{Jt`LiR#@K5rS8YxTTM^Lc#keH*u537i(bv6 zK~Q|EQ1~DOiBh+g1M>8;hf=K%+BE;=gczn1{YnUV*gI1k2O2|; z_m68t)C#qG7eaZ;Mh>;4_dU4zeXaXxeeO|`u%a5P!4*LrAFcOuvQ;$O_`7U~-*y9d zjq{w=QNma5L=;C?bqKaf;^5Q3i0Fl0bd^1BvWNkrena)@7livqo5H{Kk48}W(|6i$ zn7wR%8@D&*ocP%pj@j*m{(;TskREvkuAvQXjORl6X)$)|qJO@> zajwzKi5vco=h>-3u=mH+{HzI!73=oOYo??R|qE6v&zU z&>f1Au2wlY#dhQY(vXlRf*;ZOnJ4SxX>yM=H&SnYm&_rZ1`wQ#)$86_*3qBr8PC;m z!A5p7+`B*vO}ss=8$4xbcxJE0gL-#jLu&^{0f0?mA)(^YuhJMxN-m5Rpvd1ZLd9Ov=Dg~db>1V~_rMT(yD6X$ z*?tZi=-GR*;qf`&P@=21Co&MQF?Kpr_pFNaKgl$h&=w;~6N!I{ii>lgrziSCJsWKb zp03Groe2w%Y1RTTS6hVA?3<;x^RyOxb5Pn_r`*R3M=p99+`Y&y!C(IhU=nD_W+e6$ z^K!cWKNSWfEpzUH(5>IQ2RJ9)df{zD{r9aY4PU6RSGJ|vW;M6(<`fCmtT!yE_#K<2 zS`(;(QVY`5JGLh3(R}&1mPGC_8(x;aWiAuH%yV>k;7u4m%4A9Q*l|;>kbmdM*hS=j zx}k;g08ECi|1-O{#cVZkjbad_#zzT35VZYbyG~=FQF6P3eheU}wK-G$zswz-9HF8dNqEWNZ(%=+wB%wY*0ARL z^SdSAhFgP0M?_c-N*{{&QDxs^A>cP-ZAQg|X)(4SCo$n#%66QCmBWzAPxKNVTMZc@ zqNnNdRzJ#aM6?XJ!~2~9$F1V}lU3XMd=V_Q4ykz@sTYRZ3&{$T2k;Nta3R;x!0~*J zBY^Rm*)2Wfk6c!lFEFzv&k5&1XT93>^oj|VI0R8F=_^SRhG_9__V~T@+o?H$wyRbe zNR&nS+m{7a0=q&9nBWtUGdp8E2AQ+kNNJ;n*B-<@p=%>HwyHd2siycv1>qm@FBtR| zX%GYv-{AF7wJT8EjqaJJoOsoX&Y7P!l=jP6F@<_aO zUlQ0<_#XUXaPhAt$&aS6B3Q|d1r97ni$5%BBI&s*4izzk0)-uJ z0H|d=2`RfqV4vgc4I(jNpoUIyGMjy$I=XQ$h|0*Mh86*~3*$=EK$pil^7x^x6OGof zU@tI}BF7@X^)&^V*}G_642=Qcarr3TkrTKuYh^>>c*C}64zK@^{QdxZBf(;QX)!c< z4yXvupX~D_;W3!W!tM)|r6|W*GR2lpmn{?Ns~#P^_r-W==f+$IEBt%Vwtc_;mH=$-O*raASJz}ct#qt5@F zI-?l99_DahD2up_&xLH^jt{A)LJLUl=Rw?E)m_cuqu;PasvIOPormW%iL&%4@d(S<(Bkp zc>TY)Nq`Hk3#hqGuLxZcSW3HC9lUQedoh_U^TgV|HX^rR-*129gILIi zTGaTWc*b%5Y(kRWyDs3lpWDvj!zJ(AaJu_jPyatx$kBW^0;~5{lMHL*-$#a)rpoRw z3_j@JBSdLTz`D-rBtxAU>JrxMPdcF5EEp9N5Zy-^gYV`Fb~Dhwg(c*>ekmlu@~L~Dz1;`axuW*R%R3Y@yG|M z=%CzADG3y0M%x9%RiyoBOB*#YOjsHV^Qc2$!!vaNbXbu=RhiSWsl6*del0j&2N6YA zdqpZayIkzbyELnMiDnq#rO8_bqGm5Rk)4?kcf9NF55B( z=C}iM-X#A?tT{UjKVEJ0f|wj z<(i-dT2ZV0PfxQX?7%}x+KY$D&P#&Y=9Br?#0|c7D6HFn-W0hk&uq{b&(Qa(cI8bp zJ0H?r!>__nim855yKeRs=T$NN{*$%8Scn?hAR1;CZs5h(eV{FR>T!%u$#tB_%NrUe zr<=o1oqWOtNI19S8u^DS!a0a$>~y(4cCQQ#%e&jv!Zw!m19qJ5e6qe#0?l8+Q-HBh z?m#lS=ArZ8%+U|24G}XFd!1#DD6uRgzg_8RYqfr~ba*(h>3~UlU z83})60m8C-=Q6V(97^zRc<)>f%7&YE+!M2|f>nPsk!8j{3Z_&}h!oc@5_7 zXH{GtQTHem#jDf2@Vk{C8-M;rk*0Z+#$+CZW z_Yit5kE_iL61xC%I&?P^n;y|ricJg7-qifuR&Nsv`HhoQvy#JDSIAPzPqeZce^On= z7{DC7XXix+FRf4GOP^(3Fcsa6(IYSBsb>2y!sVgti7_4@eEtXP zwayfZgLhVF=J697TP*-g`z@RbsaJ4#s{ZB=*HhAaKa%lq(0H;@$Af@!SV?@v#y$KH zbNiXf%tkY~&t2_-4Rtdl#*&JEWbA#CwR<~rW7M0{1sR^3>f;KIZn$`$IrVxMaCAEhgFP;vN4vHe^S)hc#))7Y>Mh6Bmx( z&(hH>k8C87t5D*4ZeA!PJq|=xD1X8Q)YQOvtYcs8x-y~cy{Mmm<$J~8#T+O_=xphnH>i zA-wM2@o7ALfXRse7yu>egf2)ob*awp|32|^XyIz^wN+$!*dY0mRjpHvn@s>}%X zwFrIqhEEbzw2rMM?89GC>N}$DTB_)GmQn{ko(YbiV^?JC>Yn&;o60Tlgl@Ndk9A`~ zDA6A=phAZ_Ybks8-KM{oxjvn%`R+yl?7Q?4ZUaMTD0iB5#3Dj?5s6(`0bkdx+_z0r z7IJ*+)H>d^V(L&Tiv$bYjh$z;8J1DXDfJOFQ9$_!>ri)LkI1>8xs!Q#>V25APy!HB zx$8}(PG@zC+_+;{^^oA_Exim=NP{G$_mDd%6Cs2VxZX$VQpFNgI4Y;Rd)r(7+}_mh z+dhIK*3CVTJ|#8Dmu_ZwMC0i0Fa^XCf-BA8?_PG1b7@3MDcy}8lIn^C%GzhzB{^A7 zU!4}DlR*rIs8}!YKA%bG#A5HC?=xAq7-D9p1KM#fXR=lvLxqNITV&oUViiL&W!X^eAO!1!UXwej zx6zLS5ys9ES^`$LHU@eivq9Dya{HXtO3p%t2g;Pm(px_US-wboV_;_PK*K`wss=4{ zps$VyScfE0n8G{@o438M6o%d&<*f*2*!#2$ z&-9x^iLT#8OsGBq!DGhw>W6Wn0|V%e#d|@#*DjxuKt}1p zTur4e(zWA?Hz{Gf7&#z4m%;3Z#I4l^Tlp2{SQRy3o>O)FHF>spyHYHo{Zvvm@U;!{ zU$uV|$^oqFt7p4{JMfSE$wRG4_Y@)6$91 z5_2)9#dltvMFxXAlV0;vOU8rMI$tl7Iy(A*ysjnChrP}Lz=FLwgRO_pGMWty&T*i! zfC`1yji#`Vh0?+X+= z|3TJot8+cu!3=M*jkHhlOiIU)^SNVUgT;JdAYqlInksX2&hNfyT?L;S%y5Gd0Oiu3 zwZe^Gmd;47?OvY|7lKZ;Y3jC`63YlkCfluyc0yyoa1;ZqALXpsGVM{j=Zfr{ge*fH zU|z?+Ejtg_bka$jaZca%PkZM&dC<&Cw66{FbJx&~X5M^0NdYbQ>9PF3d9;Ga+AeQh zSkP-5*1Oe_s{jCN-sW?Pw*LNDvC}j(hDcWv)ElNl^LwMFsTz;XLdm0`j<*PtfIU{0 zyDZ4Iyqa^hHvsnXLx1MaPxjr@Ug2elGP+RKV7(%?=sc;c<*i=^MNA%P2&ja7K2$6u z_TKOa`7~DK)AS?*<9ZKra*xEbg3JrL_79af0H2y{X0O|x{VRbX7XwX5pZ<%X6DQuP zVz_ROdOBV%5TwIK4}qR!^Ty%>Gz6)u%riA$U-fO_cDqFNbmab4QJ)_Fv+^X~f`i;M zjXqiSF!txxo#)tK_k+Xw8=%ZKD@~c7Rs4tfb8qF0{Zwx)jY*eb7cVkYEf4AusjH*M zlOjmUBJew^JaKL}Z+A1a;drK3g`&CQRs1i|9(31R0c6>J_90OhK|;qDsz{?pHC9KI z)-L_=)?*9B0?X{|al>Vv6H4_lX~MhhQckZF_ZHe@OQCj{v3{c zj^m{}Vpvr2TpDy*QR*P?$UAGBOD`|ha$YrXw}L*mWRju8QXPmo!FjXOm2GtJzhVfJ<{Vu}${X^B&VgGF$IY|pGS9;H6bZd~K(py6z5qp3 z4fy?SMHQA;iao98vR!ML6|OV>{Z`K%=C)9(DCX{LNhf0;f7WRNa+ZQ|ID4=!Ucgqb zZ}0pLfSulQbCoSk=r`iLBF#l}g^51t^`$n)p*o!M!U;yC_UXqnjElL)4NR|VC=xBH z*a#H7BnWz>;M?TNJzd0rWZMGo%=$2D4frWOAdlQclxAe#L;Dcc*LM0wzB@xF5y_>B zzR8v^QB+REOMi2e`txX-|!Km;f~eFIq9s zgg@zfi$CdAWnUdthyrSjKvbQ0SP?y_ZTQ9TN$cZODh$MgDSBiq)JA#^42&37z35<% zBheUed3T#WBbk2DqXjRk{v=#?8IfAo@jnRkX9Lp+8H*kECQ_2r2WGuU%6U+hNi8<0 zcpa*rpD(Nndchm^s6KiIq+ZNydydCm@hsYJ8In_R#-N7K6`KY%j;Y>E((?B04<6_C zWqm8=j3J!AuBmLPm`S&9Rb|UR+|3_#Z2$1Lzb~j#|Hifnsn-(TJQhqSIGNgfvegD5 zJG4E9nD3Qr?p%?N&7|?fFjj5N4}rmC_`Nlp=f>ZUCV$l9qJ%1JMC}na?X#e_PI4R3 z3opHLYHqy0gHBa+|JK1l__&-LJ{``wmQ?7K?Ahj8L_b;s;HgTxsY`(wfd&bmEJ6tJ zdLNJ2ThOViAp32_U+i+4^817leZd;M$9*}ye0B#MZk!I0X1r#3isZUvjUHj|N7%IF zHhFmb|P}FO3v-npcT`Uu>|KLHuV+x?Zn7J#6%J&|P)ikjJVWytRiU{!1}FG0t^r zc)$0w+gJc7X>y)xm3yA0)O+=ElUx=kTZ<1KY0un(dlhJD4-rO4(_FK{k)w% zjL6NimPZQpuE^B(10Cn^+7)pK%4M8Kl1M$8+O6)qOqlrJ#c#jHU{ON3{dy~WX2rJgI+fuV_<219#_kArzM=`KPt@{5QMD z%nPmcalmP|ci8Ccb@b;5v%f6{WI0fBew8_UdbFHOv*nBP2d=G-CHw==lEIuDb=aI- zCj;%W>yf1I4GOCv(WyW+J6$mXF>@cxLs=T`0SiS1g-g+pUJN*Vje3Q>s7ipQDV-c4kn z-d=7;cVbwiPnN&zYNOz59R9?dkUk_6UYxtvH(K4=I!X{K^n+u38m;5iQlFZ%maYQ2 z#fc_6Nq~=g+twCzKRnd5rXubf5QgrqRemT)?fcmQ1;9)@<0;-GFh2Od0{GG*o%dB7*@%}E9`%J?Ni?x;vx z5sWDq>^8%Xy~B6uCUkVmCBVz2hI()JSD!RTf!>*#4`YDO>Srb`E7+yj${_!V(hDa`qBANo%tHy$R)u{ ziNvfrdb9&oD9dksuw9BC$7*OtizzXSxi1-d<+kT?yazBDL4A9K=kA`Il4o%EpKRxL zoD%rPT)esOgOB<$3>>ZA;yQZoAKM)Hv{}ta3{jL?%b&-vjVuG9x}{@|xcE z6mhQp97o7adHo}PI<1|ZkR$YK0KdlJJ-yHAsIUo!wQ2j#(vEKdk;PLy0dOV~;dKM^ zgBEjKoYO7Wlf-_P&<{<3-feW~QRD6{xlNom;d#*yWE!M1=FL3!qep@-W=~L}AdAKk z{A#At93BlhCS?g`O1t0ybUt)=DS{R;P%HWVPix-2Tt z$HUmWzR)*l6B_p__fZs&8P#wX21eyuo|_R(^&Pc0I{mNB0&{nvL_6^>@LN96x4>Q~ z6BVXg>sVf$lT=VMqyn~uE3G`YQj(A^f=#z68 zE|x_GU@_Ti-eW~O6In+Wk8Tm2e&NR@VM^OU@coV??{D_+I>ip=YF5-3fH`oD@{ucp z0Suu5t?zgJ`>GD^`jpt@XfV^QLR5d9sVb z<{i`0iQCP-j#yCrb`uU`*PUKHPDbCo?HJJ5Tb(c_@s5E@z1C~!8dm5!wwfzT*Yj6m z@STIyfGo_8vW}&%{s3Re>}8!-cWC>k!l0)Z(oDf0a3bTB4z;Dc#y)t^=A2@9C1Ak# zouMkMrwcVWh&;4vLpXd}`C!yGm}&;iOt;3+l~M_xjy2;NEmv%#e{lFY`xXYWo3p|` z*@?+HJ_#|;z-?dM~VSiL1bF3TwRr|#cwVxr3gXrUrjQ{)fg=DgosA7ZICYQPkw z2LOqn^(B7UnmZEoCu65J7Wk77w;m~*@=Wn)k#fsrChu9o|GLylMO4;aN3|QE#4A`q z2GL=v=YlgUzy0X5x#IPv4HI!|PMF^XCB>MZ7c$Iil!LV}9H}p9&M1o%3VJsP7^9+4m&@S#NqH z&uEu?4uK*8vN>)Rqvx_$xBAMDsuB8dXR?6pwj4T;)*;}tJ(ZDb^2zbF$BbM4Zt$@5 z$^9~srF@0Ort?HK=g;@3VqYJ1tUHyiz)t7gyOJjG3f))oVRj58$nX}di2EOA>s4X0 zAh>Sc;h6PG=!z8vIJKsq%Uq~L1Y1KCe<`wKp+Sn)z=G6S@8{5g_Oq?~Pq#KiRfKj5 zA7X7&78zLWS;cO82&Ux5%~uC*Tqk19ahg%j>ENa&;^ICK`&)f1|E%SHo(kN9}h)Z*U9g@Vyu2(dHO(v#5 z$fJZ{7twtOBohSO=CX-uBO-@DcR6y%?Ebytmuu5`Lg{Kgg-tm_9j zaq$wpH#sTxu6L286}s0hck;bON&_xFWei%}pB=et?cI%~JJp;jzEW*|GfH3PYC6mwcHzdf=x_H>e~OH4FKd_eQdOO?oIqo ztK>P1zt3R9*D&$233N(3bHx*%iG-l532{E~UJH{O#1BBUnuBz{DBcdwjPwPY4Q6VHS zIS9@YuQNzlhEJT;Nj>$}uluHV5Om=nJiiR0wGqZ9Mu}F_X<1;e0>fgnn6|UKUPMFj zBcYz*b$lNGk&)Q&iw!W`(RvG~gS&Bh7I&3-Lk_}{$=m8(GZ3E!KkIKTt$2>Q0 z`I)C#BYC0XE{9KkwufO6NnU!So5{ePCRW(nH~?#qSZj*&2*1Lg2|kV9;oMIw5(klY z7+3Stgc&?NnNv@24Co@r#`Himm|d|BE4p2Z@25SYV7dtshB(MvY>`-#p}~ZTrhmA9 z^AM~MN>0dF6|FE7()4KgmH@Qr`^f5g!lDU`c!00mlXui=<{Z$!5Jo;E-Q;Yra;V0U z7VhW!0FyTTruV7KEMn|9f@v!;KJlFw5|m78l8<6Wa_**wS2LLmUI=l;w1@A*#L}}M zIw}V74-A#`OiBG6pOMnf(c9N}Wm?DJNg*if@xs(=A1o-`2bNH@U^`*DNt&B@O!uyE z-=$Du+!*|=sdT4AR@`{xD*1El+L$f;6_`q5eU+4GVuORqK~xym78Nx2mWm^fDmW`f%nJ+SIB#jju1@r>h~@(~JD091bA{=1bc8 zLKXI#Y3^hgCtF|I%ht;w$(Vs2=i&a*28pKPF#Do9@_=Rl%rSSF+J|6I=8rH$n+;j! zy3p(1$*-=Lepen*Tz{ic2o-v8G%{(ue1U_M<^j*vXOd7>bINd3pbY+X;R8W^F&%Aq zWLU;0>Ym$9=3cQ!6Opg0HSInwSda&FVTOO-B<@!uvHKvoTt~lM_QZo(Fb@BVd;%9) z`&{>~(@;?zn06kg-X6N@y=ZFjIl^vMO#>R(sW56m{ttRhjd9$jKlsuhVzqSj6VN7!V$2u%~hK?C;d67yT;K=&uPY*PCN#g%HCWjLSI%bFQ@W#01Le| z%71u71DEzP2^RcC9e- zP|(x&V`x?eVyB;If9Y6cf19N5DFr%O%_%pflf7XekNK0`?2urK>2P;;^Zp=6Kr`*W z&5mY#tK(!yei<@vx^D?ELi-^j7i^mA_p4tmC?OnvR6wPaYr^1)m{rN6|EX| zJ%Su}^zYv?3u|6Wcz1^&@GgvQ<-@Q7%9hWul(HEj?O& zx-rN7&6C4(74YA-yvnKeW;k(+KEvnw$`IfA_E8ALXsGNWeV%T{rWNj+BDd$DF!F8S zRj8Z%)RFx62=49`1QQh{8vq+57S;6y3l>6yJuQex5Xyr}9V@~(rGLhYaS@4teQ2U^ zYar1ApRjerTr*Yu8&bF;<5CdhENHZ7LdgtB$i4WP6$h%;;F{Hq;u)WePu9uqhjG4W zBv4=j;?^2BYRe|5I{b8~sd(H{8&DZ41T-pS--A8Jw!68|&Ea=|7)R}9yN6Oyajam( z)!(Hk8y*JMpG8rbJ55>Ic8H9>W4BkLl|BmmvQ15jT1ErAC(pS*jrpT4HKZb9eIwQu zaw5W+3iAv#Ki6AVU@2YW6gXAjEXWgZU1K>nggP3TBg7I-I0!#+^~$+~wBY-qgSix4M zJ;dGk%2nVlIs5{R2!u#oTukzmU~s72lLj9*>8PhG99y1_5X|ingybpL?5%V?0{_jo zs@(yXQWz?3rUZ}ootN}Am((`&mzYU_4Rn?pt?YuRC(P!O4xFnx<}GKU*oOm$hta^1 zG9vTBeG|B$+StlYpfGhX&j~oEvB0QJH)Ta8Mob?#5fEV6h-S_G;PAX9V>6}t`t_I6 z+9WN1sy|57VDB6T|v|3pJv7PkK0$g#OXvY@Q$N#_jJtq`I`m z;cKwJ)mnBe`;Rx<0;MItXragc&!Vf*t+#UG3bghpA7b`9`6oH%AuKQ>Y_xk@q4QEXE{QZ)fWhQrOLHG@rRFZf9?5X=# zPjt$8X|s?dsErlqlLhptbqz>P#ZB)!1TRc`N3mR416s@H$sKEvZvbciq_|yK%@6k+mrO$=62Ko`mVo#CY>b8h!+2)p%?&nxDdLIY; zkAvp%pMhr7me*)SPcNvvZl_(}78Y=PpZ-Eb-i)Js@VFGPf}uUL`apN%!}KbzpCfn5 z!OJ%wr>asTkr%QYmXo9u5&po+JEdG47_kSS!>g#Q15cRw-n~eLiEy?OH`X`0Z5nU^$9oqf!}t34bE3?w=C6h_-NtVAFg7n1?y&N9qIQ) z)Tq0cqC~T^TUUxuUiYflS}Qz>!URy}?DlccEEsU#9F4V*x*V|FE%#rf=7AcNfzO*3 zOZHLTiO?!6!aGwW2+GVHEKTj&AIbeL*+*M&(sY&N(&fSFigjYL8;?YMj!RRvXIZ&w zFtdQq*?=x~@rTZyooe>BgCOM+;e)F;o=ru-ZmnOd^-d~@)nYA+L9-)55H+pba*4An zbdZvGfvt%&cAxvvUiscz z{N>ph>x)0l0FFwQb(B~`lI~C`OV~xr7*jQPI1&#m#r7&Bvc`B#V5gp+0{x~5c!+wm z+fMietl_g!-2!B&j}sZFwIEf`iA%dYm>Vx|sqQlB#TMs}>OvW4_bkDFGJG;crmVAe zb}X_7!YUaL4oiCD|K2v`xCyQbS4HzMZ~-3`q-B^9@7Qd|tO9q|QJ_V8X_v2UMw$PH zaX6aQ{H7h_-xvV9EXG`d*H)}76PQdv2p*!?e7cWZu0V2VyZ$-=Bti4{S`5gBjV{n^ z(Ae+rHpn3FpUTkrfwEo^ehWkh__aJ=-#j6?`TQ%k6LX?dJ%-R5x5!m{XfC&auAeWDnEq zPJ=Ed3!@_v+WQ>rtfP#${b)cnjSH6k7&2wgFSl6s_;kzX@+%<8oR4Ebh2wSKOgwvs zmj-CA)eT;_qW;8Xy7p8CnR2Gv)*PfbV+ca56S@KPgf9k?8+;4k~e6DN-5pO@* zUkFO{+Jg==wmj5s`Rj|P3NRC+qOL6HH01~DHQ``>9;@Qf5S9by+gPgHt&7iIatz}yn5^L~8(?~+jSIm)Z&$94aW zB(xaFtSvHh$`cYhD_A;3ju#zkXw2Q7zZk$_=OvT8krvTPh|{C9bfG_G9r7Z{L!21d5^)>iq{>1Xjt9FR5~0>pW|*qPOdM z9B2hiyXV-&=3w54cCssio-hKx1J;yc(JgjKHAGvL;hXAXUEUn6*0i~r7%D3u2jowe zv!x@?=4WSkTTVAF#BLe@F9a=@1tGEren@TxDrBlU6cYv4s1m7CIwd*96K_PTwp+Ht z`eiF&Ekh~x0-w^Nik7!OG_HYXU4Hw*JgOu}5p)j&**Po*u+7r{2dS>I)Yz$f&RXUX zpQys7j%*Fc+zi`BY7U1oNm#G}ZEIX0zWF6&xmjr~Z4@{nE<4HV*&MPvdlEbr9D-O84pKrUS6Uqe|6#wBU6d2>pd20Bhv+ga*0GX;9p)!Gp;Q(F*jyY2uBXTs*+;sOcWARlG|1 zSss_{aWKVwZPXIVSMxxSDD(Fi zB5o46y^_Wycf1zI_z;*P|7UHGtJj4_CTjJ|S2aZ%^P#&%XXSLCGtf?*ED$OZH_J!$ z@smF0Bvzj6-4*s6(ow_6lVmLV@a5K@ySz*7TLCahOw*Sc+^w$0jk?rXe)0y6MRfF5 zSh@>JmL?4>vj~LCq3`~rFaS?w~rKsWIj1u{zL9HTG2x|z&ny~`D2 z+p)q4>x=-{Uf*}ubUkyvn{M`1yRebPFFN^%G{hME@(D8^jUo@##qxC94(y*KjIY^@ zE!>qab@4E70&m^Byd|H>>G^G^i@_Tp<$Nb-+A?uY_XV4+AW!@o)On&qW8bhyGd#-) zNXQ(XG)HSmUntSj>%AB(-&xl7Fm2j8c~WPQ)>DvyL!ILY4k{Ijb5z;p_WBctlsiju zxHv8lgza?yLsz7hcxfaa$M3wGfJM5=hU%%oj`9e@zBqJ7oYybT2NQqw)~0+Zs+Wzl zbg2*B>~B?>{B>DXh}es%k}^z~E^_=?)je11_tvqv06Z6N)*@cZ@>?%ZmhY{LZSQ?B zu94AR2vyc|0%4B4WDAIP^wd>+$>^uRmL06oQ z-v2vELH+1CpqzWUGP>hK0PK^FfR3(-ZTxsE&T*&kXt>jw(s_mRlCV>1xOew`j15A% zSn55KYiAWa&^075AaFa^jx*NjkBfEkM!#8~Z;agw^nk9gt96=haKbhVM}vnJ)WrHF z*Wu?mkR@V%p4T(cxY}GPL-pH;$p|k*ka=!D$oGefp5N=r6%V;|-MAaXfN*vxhSyzX zv{ROtPG?FKF%7TN(nMyRMgm~og>T(NJ1|V0hW;)fBU*l9w;=Gg0w>1>^JsmUtB!iG zKI4ij|ZNWG{jL zgM1Tonmy~*!RN>#hH>j}O;Th`j2Z@px^JYI7@7dUN5Qj88^--qMQ}W4-s-PHtHS^B zQc>=`huJ+%J6(|xedUz>1rS9eA`9dU$gk!STc8kN#qLJrgurO+q-FD$4BXp;2Mt-#VTj)~2Z&+0)~}INj(JVN;3i`VD6>VgqmH2w-g) z#<+7l*N3+YG6tK$(Qu*PIHrMlekQtXL%?B#s2>7)U-q{X%Os{BY15ttcF9&?(H!2m z+sWJ4!ZxDrNO>_GrQUz}vkYrBvhwi|v3~mNj-o*v0WLppi%6iu^}@bhD%UlCQ^4pI zn5If(Xx6~<6u5;!S#@)A+gMp%NYQ0RnErH0GDBuh*JF=Jd2J5zBhEeKOYfL|q50|S z1V)7}gbf{R@+?|Rln9)HaO-Z#kZ#SQR=31>!n*>!Mf&Xoz(Wwb^#{~@n;qqux%Tv# zNaL|Fxa(nqpC2N!tv+`r_u~N|HOkj)oT0x{Xd5mQq1L}m{TcuZ-_fSxgv0)-&OPgY zgXQQ#v-7BXu-|D%u=2BDKoPy|TIA27mR|fxB4c%-GA2S^%kWoOy7e4!+6H8MTL^9Z zz%$gmkG+r9hJ(BU;Dx_ogX+8aB(1=L=uw|H3ZT@uyYofdHpQ9t_;&{W8yo~0$DWq{ z;?S3DOOx)SFvs9h8=2h%>bBp-< zr4eJi897}WJl5nN73werFKz1TbI76jBbBz$x>$bfT5!)uL=uo2XgVy0ny;)iXhrw9 zH=W%VS*lU@c&QRp_)U3eOiE~ck2y){qZH;l0Q1s5??Ht%-BIU(`9a@7W;l#;bf-26 zkpzq%dGkm?w|;CfV5=O~yX7xEKPUHN>h{~#5)O{>WG}?qwR@Ysr>R<>{3c_#j|oOM z#Rn9VFjD^kWB)W9!Y!0{NULU$(CC&*1Qf`lDXHVW!ZVudq zWy;ZQY$3jvMSHZzxg$;g!WG>>S~j_GvW;R`hem6 z@>_)chN#UW>jAXlOJ~w2Fc`-CWc>N0&ZVSX849P@5M#5c!mb7A`uXCraku}o_ugSm zc3YqD9fE|8ASxXc6r`y1PCy=-1rSj{5Y(W6O0NMV0R${`M5PBQf(WSeBE2dQ1Qh8d zKeHP@0sHt6>O=bTC2n zy|`8rWC$QttJKpiE$&F>ODJ6S4b!-nC4L~vN~5>wMM3l1qJlVDg@49}3%X8xm1n7{ z0QqH6lO>PX(n~xpdW^i;aRtYoTQl3j?%ob>4CYp`zIGoY+an_AXkkblt!O(g`pf<6 zCrS~giQrbnrN_2V7%WEO{66GHU%=I*r*YamAH))jbQcmQ4x6sFjbvQBH;O(S=h~qM zQX3)^F0em+u_0hR{?y_Ud$h_$s$?4FPO3sb2QHbb5f^!r-i|~JYx{6P4yhrf7c>ul zWkImFglirtJ-M5>rp_o2>w(goK|z^zqYDoe`?4*Ja_8yeh1nlMF80@Y;dvskTY*ds zkNAq%KkMG5qq@yMj(UYVG`tU;iOf;B6Rj#z7Y z`>Mz9{o$p^iwaKRx5+P|L`e~)kBwRVwMD*C(AnMzps|Fc2EpPGtDpbMNp-uY}Juj;@)W9!ID^wp4cXU#B!%CVAXyv+O7Mt_?VtTyHS~!O43K z*QA>~gC3{3()%>DO@AFQSH1+8dfAx!V+6dhppfTL>pT1x~~ggNk7ii-d}Z~ zk})FJDS+eOs7xD!`)_#->O3&(*OE+5w21;v{g1@-XodSUF!zAQTWw72kMlz-Z-wGhb4j2A-$98(g{7P(D0gk*^@F) zIq%hbQ;ERFW<{^lA%@fJg9icdLSTG)wz{F zLp?V{tRf#&oV~WW4XcMGf6sod(Dn5U9sQ#LC91lO&}HWJ);qD&>6F42f*p(0k#$gPc;~qC3)UJH z7du7;b+a8F6bNSTJdBB1^H|bMMF_1=to_9DaYV`=+J`(reZa;{ekK3wcV#o`R7al% zMmsqtqO#t#)X77S`VI04>U3+&8P2WEJU!;CeRtz{t9RF-h#-y05;!l3>xNPK`3P_S zaC2T;DmkP`orof5{rxuxWxb^s(3G7@OB-4cwBy%5(|BH2EM@=%h~xx$D-!&-uw;>Y zg3vlIor5iGKr^hbo{xp2T~PGXtd61roE@q@`DrJZyT?6Kim=F*S}Mp6jlw2-&hu*f zbib0E_;|siL<7_d$lP|VgcOM!dk+dO>PvPemu}*yXPqQNo%Ce(qFsiYWgx^*nYn*5 zvvi}fZL^C1ZYNb12V@=7Ju_|Rc{@)}g)b=!=DjFTtgX;tTW^Cour69Bz$5>WAvK*Xj?G#;N!@Dln&4SYQS7?_5I=r;+|adUl<~URL|gPn2V(&0 zUY()-ZjCvP@ecc;={rjJxPd$L{PAZ-5kpnco*mm~JOSYJHTpm&?$-?XOjSE;ccIu* zv3jFC&gyXVC(aNJuNlgNV(HD;5yQV(O5Q6O{!-EMNAh*eF(Xb;-^A34dF;FMXE%p6T*W887NFzYwb2MkFMRa zIP&u0Cjx=JfYClN7M1X$j1cOP^<(Rp&}u{HAs=^JS=wLWba-T=`H`fRWeuiU;7RpA z9YGa&=Qyut+Tti1*nuJ+PaM6x+$sYIf1u+)|H`g1We0(<+8q<>P2x|i$gP1EA*1Ks z_Ip{u$Ps8_k*PzO9{S=_y=9mzcsbf9#+VQXQvc?p)Zhz2l^-pQS zDv3$%Lu|ilP*`nIu@B}Nlej7EQ{hU6G@55-qqS?C)E>kh@%F8Wz~N|{7yFJ2=Q$ai zeiK8R;YFD-a$QnCek-s~Xq1S!v-y@OTeiGP3S{7K;uvoB_}zbfTDNKCd3&h+GpCwh zp4*1-dMb~g(G8)V&Tl*pI07U3pmM}LEH&P?!5)>jIw8uwM4V*)_QzW>{(K-OMr0!}D~C z*~U4|r4X}sJY>^GwP3Lmf{3qxbj34bBD<2(da1Byo*k2DLnlSFq-|q%*LT8OsT78S zT7^9KK>I!GP>ZJ#R97P(lh1VBmAXIxoHE@l0{l+vtn)zE+W<}8x(^-K^>(x%UF+NP z9SwnBN%U?;IcFE0bZD^r#4?ws_0%>|HCeH5ni}<-yE8$(P+`C$q36@1&ao~0VK;*+ z&G03*%dS-7mB@Qu}82~5t;Cl<;IKU9O@5|fdpty zYfmOr`E=5T9$I}S*4olE3w`2b{Awwy??n}9UUv7JfQ7u(1CS5zF? zb6)FzdwQ@Nq@%i1Y)hWw*qpdc9waJF!p;+rH z{uakLX6>$D--poDrNWsau)N?i>}3e&^{i93Pm2Z$|5{V0_sl4}3&VHD$B%Q27e4)X zqNN#72S@D-r*0c0s9o3{Pc8C)t1cPS@xb|*Y~sMm08u9~n=t~K|5(V3*GB?vgqf|&dBJIqoVsyZm6`N)R5uB%a?VP3+1DH~ryx*ev2(#6{?i zQ{Djcd8Y@$_oJRK-E9U52$NstiZ+Sh81<% z0pZXF&JGMb z*b?2Qt>5-v(z{bU_tqu%_YA#kMeSxKUtVN_^J-h)A>MsAL1?YrB9ACy5;0eRM)YooXGyy1z) zG%At%P@Q++U?+8z$I+I{jM#eB$FpY+=QzLx6?t4ij@0FZC;D+>(X{+^cH&zwsmSIi ztiegFaM3Z|wG%h8@CQnuQScFREs*sg{&>zh5JUliT>KfKjwDdI-RG26n zwKSSW@nV&0;a1~?>`kYRr(LN+i7pxk^^4k{g z5na|#Vg>9zC3;M&H<~d_&7`43qIj@=z^T{ex*jSNqb`dMP>l4!0fu+ht_Kefu8~FE^*Aj;W46jc3F)^e9@~m% zdj)1w$D~qZhp-ejF!6kmO`N7UwIuYrY@V=$H+hoNwy^1yXDS6XD%QK+T{84#iY`^} zT`;1M@jyU$?Xr@>q}{cSP`2Dy0pBN`RIflCR2b{Dww9E+yFemqaw(XQH*qGDihOBV znTSVY<)DEgEO1tp#KrSD)D5pf4~2fZ6AaEh$~`m4OhruAM_z;+YJB*8xm;e=&??TF z{KN_>3TG-u>N~4*KC*{cqXGZp`~L7*`cD&}uaSuO*Nbkzsi%^d>Co{bq-NM?-w6yQ zWY^%RuUj1E&dPn|7P2|rC$daSF6~LQ7I?I znTvFyInf$?Nz*W1LyV9JOu7Scnjx_8I?|$Ec&Zu!jVow08=l z-B^K`XznUoSVM#HUK_Unii2!td|!KWZp{#3s{*MvZ@!~kTG@wEjWBi9s9s})D(3q> zL=uIpwoM4aO3!0etML0!pJ6pitolCm?5V<{{qL`xmXB{0vETW{?U1s)ac)l_F=9K8 z5!R{EObJqFO$i}|;Ik({+8!fjzK12G#)fj*Y!?*Z^%+v7gp5WdU3y1W-iJ1gL&laR zVzSA||9;EF2kJJIq=Q?qU}1*H!ZO~y2+CLN;XO6#7;+{|wXq7h`M_#h9TZVTM(&bR z_My)>gfO-^09oiSw6TgLi6Y0S9CkBx-u3++r^YC2vipf%kpN`lG}&6 z8TX<29_XCEuA2pK93J*cprz+NbnV~~;g~VH{!~*`Rjdmg@w^K;>y7wWzEX)`Urz;sp=eT?4;MdgIqcu_QB{t=Vh}NZ@ z?$gT#@y98rAyjqL?{6=Sp05nkLG-qOalCKdJJHZ!ILnF02p`M?wH1xM_SGzxdd^iJ z%*gfQdjju|*ldP;>*mt|+`l?(Sy2ca1@tYec^6D+y5k|gIcsn17Y&n2qF-ItjS-1Mb%xViOztP;+*-m)0rd%2ou)1}rj; zb{`sJ=3gT5=7*d##UF}%Z}(*SIq792-ofSzWA8-k=2cC z3tTD0ks*#<*mdtvJedi@f#TX z53fB@R4H5T5`^M2*@rB#yv}2;Dw&$TIvntCGrQfGF}5oeem#=FdI0DL;kW;IbA}=7 zDE_im_eLhLM7*yCdna}&a|fcvC08z}aX2im1nWx4=l>Z!r|A=7baqpOLu`@=_~~m_ z^ioxm)y;pGA>-zH2SeE70O`QVgNfqb*0N}=9GM{S)B_b?esnWp&=nP9EZLI!V`Cvi&~@d$ll)DHnswmJl<&VH-sGHSg&X%gRHSR zFv-U!V!ZVPDd>ash18DUb=~Y#1BQNMMkiR!DA!AOGXg9vFSt`@tyR$qSd4vWcm2g! zBLDW@S3rk0H?QFk5zMfk6Re4#&B(#JK9jK@JL*7B&2z_R7vv3L?n)8AmeU$w&I#}I zXx4WCalWDomMI@JqL2^qt&tmie_q*twSa8bH7&l#aqEx*amx%34wSQEDGyNVM}tKJ z;?bMgK9=$F+^#;azUJ?z(5sXuV)^ESnrKZ|$Pe@?RX!`tr&!xoT&DtS3e$cvTmP;6 zdMM-3Ro9eenWcFFc2RvAQO`J9#c5!JSIue zEv9ra#l_=R3YcWgbZh>Ry`gYC<%_ON8_PycH@_=rl&#K+K^<<4CQ$D~2X}mz_Y!t0 zlR9M+2_&3XVy&?Qfis|d5wm-er?Q8sIry|TO@OC`0pkNyfn`t^uW^3A_VBNRHKFb@ z)8-<2?!m!j_3g!p6Nw^Ck_AY>a>)eqIv~dQkhy2NhMu+8)UJa|LQLs2qP~kd{GQ<7 zFOgqjh}8~i2cOHX2W*qK36!{lHGiiqUXu`>6o3M|wIkZhqo_$+;j0_j?#0`MQF z#FTRf&p_|yo7wH;+N_M(-H`p6u=(ogPQ?Wtf`cpY6gZvJpG`iiT*&5wZdIj(ou8g) ztzJ-Y@D@Ip_@gqxTFw2>)GCzx3iZ+!1#+8cIa)7+{Q@CBONPZPa5Tx;>~`xv|DZkd z8DU=yrqStptm`v%Jc*y|9j;1uvL*b{ISY0?*cwcn{?rzsfxQQsq`N7Sdg^)tbtvj! zlCa$5@C&i#ScK!y*4~Vj&d@$2Q6tE4LJ75kgUL8^o)Q*|i3yt9?H;)}oX%Ok4*@ag z#ZgnflKTvh`6MLS`@uBnhOF80VdA1Y*%r2tLDO+>j&dLrZ)9%2zka$#Rl99%rxV5( zE-2zVAFEtYlD#)me-ZwnZyy2$n?R$Scw0Or;hI>C`9aw2PW#?}{=(2<Y*7c(N?We~>nnOkXo5EhdDCX|+~q+V{`mu8W`6D&d|ZJq z0f<9yXJ3=rPxTI`BL}}=Dfxbk(U0VT{Be*fwgw;hVO?DGidUvABjDTgv) z48JRGZ<@+#9(&Sw?Rs&<%Sv?f+=ZFToR{>^BZE-=$RtR-+$3(&kHlx%JP6E5pnj9? z>*R)f1JKM8pT-$*cRH^=r7V$B#9qaxgOL>OJMEg3=x2swkhVMNZnzp&!y1|7y8Qz+ zHfrwKk8ZX-&@V8VcuCu1r%ECC?_a`}cO)tnE=6h}S|@;OOTfV^X)3x(mFSE2j&oatS|3Q<8()P>H1fU#9vv*$tcc*{3awr~pu{$&t(ZTj zY?F~&0J!9=(?K^9;d!hosl#nA|d6bCT<^!b}o1d7ik ztIjHDSMxW9>8;N@&g8{C#{$L}>1BHxli}Gx?Tb+YxjTo_X29=(2Hsuot5mt?n~p)0Z(*=bu4^Q%nv7o@g1RsD7IOLd9e?vKS{}-hBH50+&gZ4Sw~1=Qfl;zs#tL9ER}t5agZt@L zF>6I42j~QUE1zkqYhxOr40Xtb10|ih4?WQ~Lic<`_F-Ab@z}#&vLAIeW2_Tbz%MPM z_Bbqvq(8Eo6haQ58HnW79c|uwlAzGMn@{a6Cj*cBd-i_^*J|yjB2! zZCl?4l$rq;^(9~nO)k&!gi{XQsWX7LcC`pY>Zs5+0L=M#shsEdQVZvwtp>=>ZrN6l z-(ev@CAoTggJHDmT^Lw@0pdP%A9qNN8KBIFQ5KgI2VnE`vbwmzK(i{w`L}@R_?tL% zXMs)r$ZH?!%+?9uhSc8jz=ubEBT+{xVo;Ad58=EG|Mtd-E`SNUcSVg#x#Vo7Mw4P-#5Z?H*Ow8rZ~!wN z2_C?1Sr}jXz?8sofc7EHQR!;W&A44OK=5K->>m&Of|ltkRbZ46hSZv4Tlkpl0Og1R z0(hkKeJIFVdLKG*A{0gQd!PvZke$1jBFPTz0bk&%jBK0yLU1K>_s$={eu#fL-XrQzmqOl+8*R0Bp_;h~SKx*j!ga8;P41VM zHTEGLFl7-&Kyu(s$ii*jzk5l@F^Hr7fi;ox1_d4oqzZHb_}KCKJi+3!bM3)_+y~5IgjL zU;ZcNfL->MqyG2G`~`*x;r0%ERDwd2H+)tMNjc)k4ED&u#%euSszdGa4c^*0lEGhh z6;I8F(o4Im(7;t$^kd(t|MN?TehGQ*X0N@fd`THEg_bVzeaBfNw5qKHSLa$cgqg*^ ze!3nedWUIY-TB)P`|*3zXnJO)@GrJhm3-$r&z?4oF$$G-lM*~hIfbPvo`&9H=|%qU z%m4E^fMd6TA9KE%^bn{v6P7zD=d58JI~)EumAL*2YzIs@rdi#V>dK{yRb82^Rs(u) z{&MqJha}q34PLR2%jce-%j`+(TC$FwxsQJ>6tSj*ZF6ych37Tkz|pCRV%pA8`oRW_ z#cv;>Xp{I!cKguVeypnBbEsj(vZ|ap;R}~ET6p*A#0}AHu?mG5>|-r7|L*;Fz#WoU zTuk3>S@7RUgn_cM_Dl8gRk5`3n@zgoi?f>V*Lh-vd37v{I(?OXPK6k8UE7Inbf)CH zg*;N$l&m9B*kLnnhA=IN{hz!Xo#0zcK=ebFf83KanF;TI&3=q~ zbQYDQGt5wb&Jwq(#8>2iFq%U&d@#lo1Lnr__uLqOxsj7_oL}-J zhc7aOy&0O+<#+J5;n%#@`Fm#nr|v9*ahTqR){g==tmX;GfJK(LVRq?%?L(s%Fkuk? zKsl0p=n-c51hOelLq-1lho7s>GFP@Nhuya3Z(k zp`LDcbKV zRihkyHk+`dw&3m1`3 z@4%L6?;qum$iZX$erfBe)JeCyM(gO97H&C>t2n3GV(8rCK6ia|CD`9tsv-X6orKgV z&f_cM5Id|y3p$Dzn5Xn@B8zG_^LGeSOl0XT?{$!5^mp|qEqji2hRe8)dtjRIx?JS! zx6V~|9(P{vA6_bVa2A(8ozCF6KW(k8wRZ_R1lfOAKV+P8quV~y)6t#N0CziHriu0FB13Y_94N?5oF*LeYaQViy9B(COPW78Wsv%us|DKR3o<0)z%6M zpq{UIL+dQhQ`P{$E3PJ;a-~r3Ls3=Dlzz-~F^IT-?j3QJq?EgOe1-DX zd%iaW{xH*R6v|O`a&qv~V)B5^%Yml@r3xx;pgsX{l~BFIey(~Ug%WymRY5qu|6csC z*@;-cxu!-_kJ0DQRyk2clc3DYW>du4>gx4oh;8qES?y%RCp-ic3b1Z<%|DUE{$|QH zh9X*`qDq){%)Df8B(iYt(YU+Q$wl;_pC#5^5u8)v;ojyI)AVYWceL<1OObx{oC(_C z99BnJR}oQ?0lDl$%bzwPb~b_VSMl#d&-3c*{lp7;3y$FnZk^lv?1{iwRSbQkF#ws3 zV%yyfuao&n$A1z0_sFEmAHv)o1P;NT9~j#0#*Ktl30antX9oCfOrxs&IEwXzoS=0a z^Jnx}aZqOID}}pOm8<8k^Ol{i#8Zx`f!nG5L~hmZLo3FxnMk@opTC)N)ri@5Y&eAejmp2UWZgv*!DsDqmQy;>=2HjD1zTw4Y=&G^s>O);2YYdIk7qEkZ?F#9jXb2@ zc8KfQ47LZ)><-1@k31K%at|ZPkup55m}{+{ zP~{3@z^{*oZsO6vdScQG)A4Yt2teVSw z?sagNsoFPX%==rDdmVHt3(#$5Kw5hJ^JVXqOJQ?A!RSA~z;!J`;{lfX!5mu#ipsucB?w_gC((8ps@5TlsuUxd5;H z#+q4?13#wDLk{{u4B2?PsD}SM%Sc&t6Xbpwu7gNz_t6U4s7kZp9ZF2LX0v%{FQ_2I z$D*o%W8q$5E|IIqR{gdU)!6(Ru6qkL3J*_mPLA!=Sf1LwC41i|YqV0;e!?P)$WtCO zkPJHMv`_c9Pd)QGD?UP={*@F_3wNTn;S@3{G_^9Hm2)2IYn{?K?NQt%9iTC~*24uQ zALw`%juqjj4k{X6Lb1N3g8R^QW{ti`>9yNOqNUp(-P}|Dak>C49M6E>?qlHXBPO*; zA$USryA7WFHhu0i7*8eEu&+9Tft^!d-q!7oTrCJ+>@bj`lP)TbN@Qa} zfR2rb1fZr$fts3XrjRg{(-GSyT+MrXz_5-u9>8;iu*yUBwr4V~Su;X+0#Y(ZS!OJ2 zM@vr^FBiQ8xdV(j)h@ajroQuN8m!!JE4v!Oxt+A((^OoLqU4RTyYg*n;*7HYTrh&? zx~hEL&8qCoggH}`ZN%=!E~v{zqd8M@gs%@r?=4q=erw6_LVh3emJ>!Ts}I6BoAiz@ z{6`}VBlroeFf~6tM;)F2ni?~k+M+B)98V9nGCe2vDBi%7`#I4esGZe$iT=n5L!dZ|Q+6eD{IDvh zhU62aD~Cp-udj|qkc|})_to+3Sbbf*?5BszOUCi-6Unv`M1r-rr%kfw`4%1*Nzj^9 zk;IULXPILie4KKxhm$-wtu23sC!9qye)9#x++*zyPLV{ z{_U%|@+qz`HHFwvdps=mNgBA{qGs7uwxcUB!6pLlSXZSYS;lp>q8Rv8T7@2o+9=>% z6Zx<|@N8cCYZ?s|lB=Nq|Yr|D@zf6 z#~(ttl|kO9O0qdMG?8q@eabH3 zYGw4|s(}<*vaHxY5jsHshKLVdM<`khzrB_;u99ggh8&WzM9QOzfDu2BozK)m;idSV zs}C_YKGpM{P8N@vmPFM_uH6ct1w}Hhx~io6!$d`8%(nm)2m5q_5Uhrn4M4=~$l>|B z!Ny%(dW$qFWkM0Gd~x^_ zKC+45hqNUiv}Hnu)q?_#4-)A~A;V&I`IhO+0Lud>j@IC5^)!GJQEB_ocorO0X9@aK zEV1_A8a4wOwAvM&<^1xt0$A02o&SbAe}m7_f8(iDQel-@xG}b1;igl7FSkM_f%AC4 zq2*}jEw7p9_+{a}uubG<_dcXm1bLI$VH147Dn?b2D1eTiF3JlWv)XG!F1)7T)p;BL zsf>NV{3BfE+I&Y2OT%L(c7|_^#CQx&V&?+qEQM9pEZTstBGgp^e3b&e!59D^5%u=6 zcdk04Jr7{a0)R0Z%|uHZb-rx1>c{8mr+!{d3s_#4%ro}ea)cH1SB~g)-~X{8^1;4e z_Laqg6x(sEfdZlntBl{iQH((ikW-QU)x^x&g8VI|^?m5!NPp!b-BfWLfcyX!m#=)f z!CQI~tBSf2Re5zZ+v-xV_QOQ9bIWz}$w(U&!nT3;Dp}YlLAU2^Rf%c8X+fbbouvlP zXv9ruJ>qHkOSyj5PwtW5(3B5Z+}ce>uI!>1PZ#zE3<_^%F3G$3e0K>88!8W% zR_E1en45blKEH4>=FxHGy&wI|3mLS;EMiwBrG6gsD1xMkC!JH?3w4}!4qfR5En$qf z=$`PNy`p!I_r|24=#X+Y$I>ricx>t@=TtHS=nV!w82b8w(&4uOfgjhEe&2_PQzQxrc#D-)U?!Qo#IF2?TF(}Lnt3{~mIsYJC5S#2EA4Xo zN3%ws@1=y{@r<5b=Z4yboPAX7_S1YX(g$9>_1 zuyJKnhU&2am`T0|LbZa?^=Ii9pUae{pTZy4Dwo6@@g)u}xb@Zz9E)zJd&(y-Hl}Na z=tVh);lAO8xQpTM20in|lF@nhfiD+rXAXQhvk2hJvHSt}(uX#is@45R$T<}6N8C#L zkZZod<`;5K;E%$-!mx|x+`(ZPFStXkV<+HE>KiyF{E-oh2WmW>e#+(v zZd5tjI`x?4NK3t`*3Ul~(G}**o-|Fvw6luskn2WjzFo?%{)SCH&!kEFoy~;~W4<_W z>QBG#*<`yDJtxrCwr^MW3h$q~rrcVy5Wo1Ji^KY0z-1s?5|saY3*1VuloPEdUae(^ zP5H5XD(Ugnv9m+-$5AJ2ogJ5ca#xXOgX&8%RwZrxXjlWEVEm%#X02<r8wP?h8_6kuH&OMdP~JY8IzpI8%!xqn~~ zpC7=Q-b}Bvbh(uh_m~=tG)0#NqtD#nSLV~eF_nuz|GP%0UpJe{RQc7*z(uJ0Qg!f@ z)hG~O311>1TS(_x3tF1+rDaVDbMbQm$qyZ$%*j@M))Xc8hCTmCrYM}mDkAAj2WHg- z3kxeF+5)bSUORKvN%Dky`&;7G`L3?eH;uEfwtI^E+%6lKlpFQ5*FuD+rQDoX;~O!@ zkG0sJjB(o>sQl5dJ=D?Q+6j&m;yO+__PvO^_3V=PUh72D)S8721OxT0htQZ8kL=f% zScm$}nkQ@d+H*=1@3AKvsDjO+3-tW+W54yhwyYjOZwlbW`Jbz;j5!4lf%oh?3&yE} zue%`!R&PP#oFG^|zsW&ur%)ZiWSmJsC*q~=g-@k{4X(Go@%=jMv2&%Aci^f9A&dC4 zf+eKn=i0Ulw{H57YVne+M^uUHC!Tc`Z$pH^-}$?v6Ul1Hb}HB08NLZ>9m7j-J~RG$ zMuE3Lo%db+0JoQlBGcg2g=C^<{J^uJqF>BP%&KCVjm7eS(mT5#P5_kN{U3ugPO}EW z;n+^Z>+OrXBl;`68NZ><(%^tz(^c%KYtQpd1`;>GVwWzp?6n9r9Sk0HllAsj=fS-2 zl}NK2=BcPCC|D&%-2b^|K{$(B)>k@WQ4P?c9bv8po{Jyn8zB138eSHqxs`eJEw7Y7+Zy=L=raWj8s79Mt zp$+fvu_rS3;h4L}7Q!m|c!C#2Hu0_%?e-0e{qqQ31Yd0G_yBj9PW@|-SI)p`EqJg9 zS8)H$)WW~*oqFu}&MUkNpqaC@TZ;gCh}1YK&T9FnLgB~w*L{>5p3kbZ=Q~jXfQzZm#(J0B=19S;Qo>NF#OT+b=dU3p|Ps9Ahb=x zExCvGZX?RtcPsay8|+E-ce517^L!<%N&;3_yUgV9t_5Fjr7!mLBzrTf+QPH`4C~OU zL19g4Xx@xOKkHSSgY~3NHsFQfSWN%&Uw#KkcBqGkt#;HrV=E?xN)(NhMt3_ACB3kG zug%qOrpY4)$K;P?msr0!S0PF_A*U2ALUqLjU4##5%>mF!D$i3Be)Jje$&OE5OC)ge zY~)$hy4Kr|C!G0Bbovpa0GZBino|IuN!E%D<}uW)W;xf3#jG0y-C|=X&rp80i$uX z{fOgWw8%ZAATSy%D8P~~(?gpqRG5}jpo#5~>!=&!28=O!^q06V9aCwzaVSP56r3uJ z0B+d9G)x?+ccpK46nFh&#GP>atB;eG6Xn!OUdHdqSiUK9)xO_aTjFbA2M0J|NpVjP zJc*6B8?%p--9GQ6bMhOEdW{a1swj->l=7Bv+ck-u99DbB$WtZ z&v=}TTOdcRS}-6Yff$YaO^nP<@;ec*m1~qcn9W|0Yn1jU?}zNne!%Ed1QlhaZzLe1NbnU8Z!efA+`n_r(+?K{Y`ggmci!mXzNMZ-7e<6k+I@>K2n zrUKDG&|yhdy7$X^Tjn;Y{f!}N9{^k_`@kumebc1x4qHB+8X1)o@;;4m#wIE zyq~h9TQjdu$Ndz+Sxfar6NsRQ)qx0d6vBvq=H>IPaLn-WcYAgF&_X_?Rj00c=Q(EJ z961@Qyt~SBusKKmzBzApB5;59t|Jb<{BJwA6Or=gfZ-QEtiqo{0sQp)(9{{Q$*VWL zp{41+i+$x^cE&W{LAc<;?{Gm=?k==UzqJef#k7DB<3Ad2U?;>{4#ZaQ@!!Pup*o-c zUu+@^lyCj70+$dvC1~Ao;mchyv=Qd>+EnXdQpy=YVQR*x@CVJ|`_P1A6`+wYyArnW zP8A?@CxiYL1qm}>|4qHZ{Qnf50F>09q{Cz z1NqyBn6~BCu2mxY5993~@{bH>%0CJWE3@D^EWMG-!nv(9qF-RIbA>#}N@#yJm}vbz ztkNc1Xo4(4l~SL35tuB~w@n8o%hZ&&{_ZD^sW19}iJ|{BLX4nZGrtT%LPApcAS9H- zJjkj}z1Hq34PtCD7e~+&ciVhiu6QH-eb2}iexL0f*S%-4b3QSLr84)6?k+1hn{&?? zs4Ocos7mP*3k*Q#W}4+rMsu1hRzA8O;y{aINEs+^CLeOFUIuwNO%V%2C`v%d$o657 z2@JgY@h>p<1^K(;cUO@cY`SenrA$1Wdj-6?R*~!NQfn0R{#!a5VZDNDoCt76l&r)J z()Bl&Fs?%LYSRu!z1`scoo;#cV?VL&=YKjJ`vVLl;_inEE9KZ#rIl?z8QAoZTBbIM z-`3*sM3m;4r50JhO)hUVhb&RMl*Cb|%x2bbFVpy0M4#6O%^Ww&r?T(e-+WcF)(A&U znvg1qld2{$9g@lGw*9hGC0-Kq-Q*b#-e6x(qPNl&;to2rTZsZ!_^P{EPHR+myz*fZ?&du$BeQCnpNMJ za84u?SuAe_Efuv!KAi|?Bf5V(ZR|cx$6dxj$N-kxDTQr4)9ZGR{X!Z8-%?>)g(lCu zG2UYxoR_nDa1S?Tj~Dqx4&s7`PuRbtqp_*eez6_%Wz2rKge%`b=>8 ziFF>g?Dja>bWU&|x+)Y}d+n5`4X+C#H6Ogc+;x5l#B%FeP!8R5hg$G z93CFg6T0ndjnxhd5BK=V3AN+fq5`mq?gkl=Jr!O;!fa zohAVGqs?0k>LJsF`Hq|v0c0+z&5xOQf^OQw*fX``s+I#+ZmA9L>e|0v{BT>0+0&`U zVQ%SGNd1|qvVsit*W!n=ux<|M#I5>*IGzJYWe^QvjN-nsA4oB?^5{|^@G|gZ0x6t^n-uc3bDAn07 z{{pnxcDmu`?fDp3lsi^Sd`TWeaf8b#H&?3w%)aDW+< zZ@C(OF7UZzzBBIsX*GvD?7Etq(@NyPd&}5!t(jLI+q9#shcY*eMwMcn6Az2Pv$kKL z9N(EuZvqFvNrz~EtATJzZ(^+{{?q?I;ldirGp>~Wq9-3$Js!V6s?9GAXL+MS{bXJ1 z0~}erMAvSWL@VAa?JGWWc;U)t_KCm_R}iQ9w*Bx*h)J*g2L~>#=eGtzt!c*e2m-q% zMV*LnuP$2q%4!JAAkY zq&P3^rO1CBSu3z%V{{kp=|u4V6%75Wr|bMz0Ghd~GO%e2AlDV%=2*|o7sdmV_A%aP zuez3$Tm>9>>9!Bb5fO5t;<;+U;NqIOvvP3-tXZQuOuljs0ZMYA?zO>JxBB6cbNt5b_%{^)pyWw zr>_8%im7E{rS?q5OWhQF6I^+E+qVqIV%27LY6L<98_`t>nX{HcJN?Lo60JQBbx5gy*oXcHnBZR}0J!lpt>ZsWFJXv| zBnP!CrG1|g-SLWhBJlit0pLxAZT+UwwQ?a~rv7_y=P&LU^jCC{(>o1sQxkfCdGM31 zzO`>te2-L6?VUOkF1g*Su{?5_bT)j80!l`U2@?Zh37pLqYar;OwEOo&w10Qk98F6g zAD;@*OM5BGUk6ql1`N{3_hep(fe$~Zv3w8na==@)BwdJ2- zHSnHA{@(;C{vBju^0`K7`cAA~^#u%TYlVYLje9L%SowiwTS&SKG+XPMr!UD9MQ?C9 z;$qLr`kLN&LYAc&mq;<%w(Vl(RXspuLNOq%p=6mH7cL>zmTa(5@E8}iEzEzGmIJ|a zL+wj8fN*n1Mt0N;5=th9DmV<4Mgi%gf!m%Vaq9%slYT2tir?Vmvzfx(-glkU$4xdp z%+>ll4^{0gtXb1l@H(etz5Pk)*uwl;94+k&8#eaeBxX9VgMR$%{~k^K)B;lOX0Nn* z>C!j+S+A8c-**Zv)Gjnitm<6qKRa6VTzSvlJybK<4606=w6W><7D`a1 zT|cVNf|x$GCZ3(yWadrBn}j*s5C{r_?Ud{bzsXjm*c~{s3fDm%ivNvis^FZrcK!HR za;hT{ZPvH;nV^8bpn3?PhQDYS;HA(lFOlK<(5eg|K=_xRYkQ~u#@#6KFVx{3)lctgCYJwFpswuLe9E6S!mJaFOTKqE*S8`A9&4c~*Qvt$}j%bzclXVn2| zTG3jJ@2UklX{`26mLDHb`$3LF#LWa2WU3K|$f~N; zAY4aP9mhyw0W^{Ogr|pxpT!FCAv(`FQe!mH?R3#Jly&xl_>#xt&s_{*aRXCLAQg>( z1gE}jRvqokmEXB(%~Z{~rt$h+<~(6J*Bi24)^ugibY=%GJ6g!_tIsNvNuVA`aWczY zC;g7))V;4Sp{9pGE){$0Xzs^bUY=+|bH?AzL9n^Np#a<`IoufN%xh%&nQ%dF3l z#G8JMv{Lak^z^)22T+e7Jxl6rrC92%`8Bte;m56RmEs9Ej};&DFsC%z0m*h) zO#_lG9Ra#ZPvaOPwzb^O6+g})NwG+GBWx}|##ik6Y~<<+1os~e9u^sU<~OUIYV0{m z4~5Uxb!^!TNC=enB~?ATvq{X8UT6QOqX3-29T3l3rw7DkjT=Z?iGD16r7SM9w0i-d z!nIa<5L*MZ(Is;RWeLDG6@qZ5Jo@UBq42ooo}Rz?D*t(MMff;#>o#H}rpWPzVTr6e^{_6n)=aN>fY0#9^v899EhUU-XWrgZ!D!a z9H`NlIdD*J|4Sw91c{BhmhW0&b>|se)Coe~y&<*Kz0%@xE5JH$8Oe$_S3`z~N)2kc z&JS`cI*gaVXY^^+(6!&OsQ?Nf&2GV3Q3100o@87Aee%?58`A z9kcqHJ#B>JsN?zvbUWw}fp|k(c*6z&WP(Pon#R)X?luuPPobfeNY#hmCHzj>xtLYH zHpElUMGR(*R10sL``1hmLH-E~vbwx-h33<#=Dw{VgNUJ;C&UNft98!&+y4}sL)DfV zfAS7M>fxWcK{E8eWo?fCS1zLz|IGVQuhOIAE#IGh*h2e18}xh$=P^W_8(m_nADhWL z)L049ol9J`wI>EDaH_5;S8pauuKR)T)No4_T|`@YOaNIdlG5Ch0Y8F zLc-doN_tt`6i`+RX!(NhEz04GtRf;A2}Z4$Inh0YMXd zTk;iBLc4RK^6!wC$2zRVYajZf$35jE#%5q=*d(r+c)iJkkj)ny0olffhiw9`ivc6t zzPjl<+t)O|F5F{?u+sV;OzQ@bpH0T`^op!5o9EivW^j{{YU)vMX6Z#+s%rmA1IKjq zX8+5zr~HmD(bg}xpAoWJ1#!DQ$#N@?l+wje8SR4RJUf$VFl$nVbIP0Bf#6&VY779q zMGsjp#7CrKfXo~DV&RVG*n8OhpDUdFzqmXDE^W~%?mm6>+zs{b#Bb4Z^{1T^JrrwZ znOrc-AF5~1?L*NcEF)v+qTbr@^g1x?e2|j z?91Gj7XRXJtlzk`1L&PCz~*@Q$tStzLi%?1uVp9&R~(|wahL74l~lg5d9Qx<_UE^E zdkk!iY&Dqsfiq;Oz@eM^KhuDlFo5IP$UNY&(@5Oc8?YCP{gj6<(@x&%1N(1V9r^yB z;TE{9|H(X=i#_)6r3NcrcHlaWix0r-ILblmIF108lkoK0iGv zojBRzoVCn@QnsD;$WdZ$WBcH=tWe4>BhTWBnYXoTR_#3T(0$`^m4BiaAzKu{`xMwE zSJqf=+%dzp^~$uYx-U8VvcA2X#PR;hyuGXT$n!jmoN^~?k|%IweL+j|7nM16A&j>o zB5uFDRtT8|FI)Gha1wZEWgeN)BN9M87jQka@qVz@JN$ZH{@UkTB9`9X;_*hhm3fXP zaMkzX!%;c6+*)&!3odTiml_#G>Ailq zcJ`a!atm!D&b>Efnfc|I+2j3d#rE^&T&}-u%K7x1*-sDU^xB+!B;^dzUY2@nXi5RYU zGHH;5)iN=}0)jJRZ7URQMZp#Vosn=FvEqRmG)9Vu1Q100ZraY?`@639kN185`@VhW zx{w9ru-0!qhx@*t$43_*{jyZNCMSI>A<@xK6GuXb3t>_;;s!^0_;Q8gh2MS!UrYWt zJ_*N5DLSFx`@+|9%Ko?SEpYtAAIBYVEcoL&G&uH$*D~RIH+%)aF$=z5g)ev7@9(ql zoP)b*#1+2ZgRk_xcE7zR4L;!Sc7(hdu*QF#-*W+e34t&8y&UN02haI`KfK_XbokZ3 zzX!b_^n3Y@SJvhD1^D^B9N_2g7wjht_It_8&p+5NDA@lcvU*2GZf1IFT9y|)BLL2w zRPO%$O4h;Y9QxyU0~|l~@$YB3@c#Tij<+m){5}SOlOuis{(kg*zh6}nJm2n*;|w^a zN`HS21y@Qd{r&wvT=;$2@5dTAru}hTvGCe5`fu;4gJb(YeqRH}4EX)O#U20KvC|*N z3#aoz8H4pv_^;Q}48O}R+*bO(j~(E14K%VCUX}wFWdFzSwQ&5{ z7r&q9M{vCCi~qV?q?+;D`7D@Fe;hBI-@hMA_=MbbCgc`8;eVz7zxrLb%X9GZ-|y|h z>pAcfyan-ruch$y@1NliAOAlQNOTR%t;3E}9! z+Va9>GYI{Uu>~6L|8gvHu>Y^IhzZC4eJrAAzxRO0;rFyZjy)HeK0N;2!~XsK(ZxrD zsA5Fc=I+XSYSpPEO8`xh`>W^LYG;7_;(pk$yotK zrH388U;euFv4ozxK7pGvcRM*hzKrd$e8oz>!1w8(7hZfxC<?rfhj{Yie5hHbqwU&RsdVO7(|(KHB^7C&gvu75hIsaPafWFRE*5kJQy4{p&X; z8XCWCI(h25b3gpp)^6i>5%G+7 zCtmvHb@#R3Zw}9%EhY!kn)cSDOw=Fw>XiE7`%K1{LW85^NV!_F=&bm!D{spsi-Rgi z#ll@KZu;_-+@s$w zCbWR`7xxuZQ13ou_D%jR(z@c~ z6}r^(?Q1862In^h>l(B#y_W9rRf}dgL-E!DrScojIntFWY$IFWNoRNUd+CE;Th=?p zIXH1Y&OW!H&vfUmYwhcQ3h^PjDTCwopwVLu)MWk26F+y7<>c72?*_X#7qz>_klv=I z*S)grCLi1R#B+aN=It>eS^l`PAceE3bf5pg+(hD%^gnrdkmKRciv_eR+K)fn{^_#Z zUt*Zse(rgt>4~kY8f73*U3Y7ICM4+>)}EP@V{ZrV{Cvq1uP?n8(RWq#{FnQDI@0$4 z>tySEqtEiJ=u-(R;`POhQK)gRO>6$=e6GtW9!R6X}l=y_~S?t6dU{I#?!QfM={eE(k+9^l7D``M?QA#g{o(B!u#VKYU@Kj5>6izR`2lRo^F~} zYSZXPiru_Fdzy5ysFDfO#Ia}A2LAY==d+@Be(>4yh|Jd0U%nRmWM8OXe7^S|m89W2cjL;$*B8yLi1N}g_c!g>{_GpuV5&HCNwPhqYn>b$t8z#@F`0Ty=0tRKMoL3DuhpUi#*oAnNto9behsjm-Je zX@5)Ajkv2ne?xomXn3yN=;7_45=6fnWEBYf**?Sf7zH7df1f!v?dP_e646Vx*Y(ws z)Z|Nx&tCPi^o7m*eQk&A+&MpbK?C(kHdA_)&<~wyJdo>f{_IQflI8NhmJ!ZSDx)dp zUw!w;Ynw8n%vCYuTHk>kOqO}zcqokE?+(U{bQ6@%GisHN966vI-|#XPi|PI_=r9+3~yCIEq&XtFYHZ+9m%nw^A$&W zhJSAK=}76|GLDfqKKog6>Eu=)y~Xf|OuqK^LG4>_e^GzgarmKt_SK)Cy*<8P^UGTo z-Al$Ty(bb)SxcYY@{yHjL~^5U)ylbyC>|MA#sCG;o1-&ABw z0fh%Ep6~G~%n=OdKl4SeasSGp?+0^R7Y*=fsuza7zBhJ3@`#A+KHC200q=>M$Ifvs zdyl^n3n$SZ(nTr zHqdk>bjo~sIOUUnXD+{R|AzN}8m!Sc&_fO?X69K`@|$n`L|+m5!}t2V zBi4S`t`j?sZTQorpK5kX-=8h{!Gm`ndeC#3X*Wu@be;QgiQ?x7y61t!n_qT6P96Ed zWw>tLZ(YgqLQv zWc}3ln@rDCIM>v9tXEG?KO!Tq|Lf4m%bx@r+*32%udyc$%(8#!uiyB*=p23Jn~%sp zLe1e?`;C36nk#<|3VU;%O3|mSpwiBJfA&V!pSs?7=CX4Ag>&uKFMAIbj7B8|KDN0f z+dOA8IN9rVHVN8F{?W3oOeSg%H^o%E^Wqd0+SnB<>fnxBC9a=I%K|z!&@Qs=9_Kgp z*7m*TV6pQXwA~8uA8F5>HaG`-RQ%dI#jD2~4;=U4JXA#83%l^Vn-BSuBU72(GE#J& zY`)x)F#6uLGtX~y$qX6d>91@%@;Bd^9X?(mOKwDXxob1SR}EZ(#>B08^2Ni8K0WjE z@caiC9vhjz#hTe7`Pn?ivCI}eO>Q(kcJ)SF$>;ewUC%rn*C}v$Oe-|2uc|!V1HO%v z_vxQ`Bev|_fN0L1o%>?#Z1Y)E>B=kH5W>{5hxUL}#L zr=K}6W8QHx(meWl%0VZOsXd-!OyX$@qN?(;nyCJWl|#mVkS(5ZPnVv3=kxuZH#(n~ z8TO1HDJI9CKDy`1+x98vj_*n;iTUwg&)a>{xATXt9G|QyV}!&b;OZLYV_SZBK5TQK z$vj$Vnf#BMRsJFB7v)B_&y<0wyuEJg3to53&&sQ6Q!lsg`V&`iNfmase=g*D-Af%m#6RAXS&|>AEi5ePJAXj{A%|nZo8viN%DW~ zy$-t%hWA}|-dS6F|K{!a^!FzRpIMZ<^RXiDw_H5G|6$M8lAAO4BE-?_Th?<_+!sH6 zY&ck~OZ>Z4{=vSdF8@=#$fqdsCY?@ndbRcSdh!LtVGIA<$XdIK-FE$NM;qUfU3iCk zV)*U9|HmoiE@wx&j`p@_So5yx+56l3e%yCZ`+Y(A%+A&=j_ynUDL-{`P0JIqtIBY1 z?-$B>e=+x^%O4T2lK_OWA&GED()xWy!zAplYcv%nfuZ!f(@AkGH17j zyxkcg<(1@bb**OKEW71<&L28c|D9@Q+FQvlHa&=b_Mm_nsv(M#`vM|5gXbUL`?~s- z;&8X5w(NSwzSCcOz4l&!zl;ohc*)VSFL!$l{n5{l#vaXWSo7uvLLT7*1yBG5Pyhu` z00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG0 z1yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5 zPyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu` z00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG0 z1yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5 zPyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu` z00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG0 z1yBG5P~d;Az}Ta?4Qt-qK*;}G0=#k*Kmim$0Te(16hHwKKmim$0Te(16hHwKKmim$ z0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(1 z6hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwK zKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$ z0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(1 z6hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwK zKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$ z0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$fk$JH<~FQ(a|0o0fdVLi z0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2 zD1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVe zfC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch z0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2 zD1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVe zfC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch z0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2 zD1ZVefC4Ch0x0l5SYT`W9(JSm#^BgTDW@4|MBC| zkJJ~0Q1;Ys5Aw%@qS5W_9T<*I&Wk8Q|NU?9@9%&8Z+~1uXcV1Jv!mPF+u6a}H zotypAC;cNF+~eP6JSArZ6qO!!^nUs4*2fZh?)n67&fM+f{P;4q$MO{``2ydkgI;*? zC7~#I^($*4rBTr_>o>gi`WtV)m6)>Sy{)Nf>Dv@p**kaTqLw&~=l@6P@3V_Uniqw}ZUzW#q*7`XWJrC+X_hi}}p z+_K&tzc*osBZ4z=XSxZO-g*e%vSMmlfj+XZ_nr|L>ji?*FTk{?Bv%&r^QXLfjl3h$GyICkf4s zqPtO#E)f?x1wYW;NH`hNM*Ksef0*vycZ~RYnj-_`-+S&McD!q~UIu&1;n7Ita5pVe zS;<&uE+%0WJI@oY_^5oKj$2PTnK|*32C`ddiV>fXFj9Jp%J0X~%zPHrIXWU)Y+c*! zOajNGvI=5IRfQyMDkel+&lTUIG9x@YdQPc3L#%8c%G&1g8SCKU9RTb;K3~Bs%eu{+gRMD8V zT|h;OCP$+sjMb&mq)HMv7_@U#!YF9h)$JBXiVSvKQH@7eVIoPsayD3|Qsvi^%Ml$y zGbdr#3>VG!O1WBMtR<^FBch8*@_G$tA8t0jz!!+AX> z(cCR^o56DZQb1aL6!&}X2uLSgeN8L~k1t6iubGb;57(Icor!~DFi2gONY?KYAC-mE zB~AlnYsk37S?M}nBle4x^^;Ar<5GT=wT*B?5=vW!<(E|MRi}m3M_J@_m8q!M(ZbhE z&URDfTBnY&Xqzd9_!pTHH|Ae7*tJx9SSyL2YmCTeIY~;XpL@{LCmQOer5ukn>UwM} zvdb!MY9_v>Chjexf&EZGImCuKQ|2ZXIr~erNv19)uXPUXyjjlvHNT#c>w^<^T6A%= ziSKou$Ah-)rsjZU$9yy0sveOzJ5G8%aF$)EC)4K{BQwq<=t5Zdz;Z`XALK>k}#6Y#bT+F1lnd3N+(i^$?C~Y!ri)2c_GFL7pp2s-mAd)q8Q?s*!TTh0kS@d>4d%_pC2lw793?}Q8%ac!u zMkFM`FM3EFJs3V5M|=2dwy9Su_1fp!Ozac`$DSM+lPsENeQqspBc8`ER20tMBJ*xh z!Py2Px2}bosp04=zEHVr${vdzlQ4t^05Wd39jd34@$KR?!d3Qqewjw*(qoealy6kK zXjI}fx`uGHHu;Ez(cimI$`~movXIJT{|hSjAZ7(|D~=)Z&=i~Ph|R!AIV`q@WE6_C6 zKUlW=k3|L3p!@e}va8P?-$vqU^s4D<&6L5;e5q)zo{~ATQlN#pXb)xN^;oTA5=Vzv zPS2n&j?xAe`vt;D40;%7(nY%LAaW>D!ciua%r_G*!!AL7zneD6v+;y;iwT=s&+&vd z$yp*{uw1%nsjOd_nXxfMHaA%_mus-6t8e>;FBMR-!eUK^ToXsx+6Csj2D>h)u=dwX z#U&L9F$)dD*P*4=%%xG%f|*}^g*JozbohSmS$!HY`e?81;p}9y2q*Qb_M(Aa&pWa^ zXXHLK#3TM!weuC^!(Mag$$hRLz)5(j;;~GWLU@c*H zTGyJOkqbQMzay*?naw4Rrpbe=DJF43ev$OsG&0P*A9N_32Y0sqsRMMn)*tR+NQA}5 z%t}$4!YasK_Void=yhQYWXl%aw5`^xVZs@FJ{i_t@;+UAUFFWZSgpx2Hj-7GEkmv` z5=N{=vZx(yv)99`;#-8H-ZK>ECku%q4pNRndUpFk1D%uFbg%YZW2};_8P63OqmM zhwmX|U#3`@P*t}YhRts0IJt>ECUKDK5|)ZuU=9$`StsY3Z&9wc+E5r&*<$0@)#>eG z80P#tJ=(=b3xDPUO$Ll=2KzgU;q>TQv3zYmGeB+(BIKnMZGIYmpqVi0LaUN`w`4kc zTP4m?;qBm__TV^LmMy$jTh~Z@hf+!-{f!>Hg%3(94tFNZz&37kJteG#wl2J-h}H%` zC$Qd+*XBaUaBteS5k}~lytsfUudOPkz@ufTn@A0FY38uqDtC^%Y_uKbqS30D@Z}`X zxJ?Elp{8&Tr(mR&j_z`jl#!P8B%QO9yE_Hw{Lw_zM*r z7=w5faa4a#ugdj!A|tth#OquNvdF31PC@#I;(Edx>d~L6Ab(RaH*mBMk5WSu5=LUu z-Qf8wvTJnC>NH!hfw)hO3a|F0Y%M*8V(Q-M27& z@P*YdU!MJ8z*fWiNJY(AqgsughX_~bH6OIobVI`UK5;hNajHRHW1X0@j0nr1&}4=IZMZwaUj@h7uAC7bhw z20EkGs|_Y3&T{sRZz%dz6Dc71ub&5|xz%yPPKfRZ1RV~RL zO3=7>g~9JQUxsAYnhG~=BTk(|lQokD`%b>t3g^SStZg`5et)OI?zXGlO?Atd`jW~e zTkIb(H0cAu)M(o(`fbsq!OqgBuQ+WPkvK{@>dD%!kVoWG;=xem)Qn!Un~*q1nLMU~ zvWdeD9UhF(M zefSgaT`-57GNkqo#nFbQYS^YA;xfeua^Y;LI;Ub@?Ps21w^&%j^~WL^|FS+fj-F$i z`;=|IcG5sLWpgYSv>$3LR)~I0ESQ4po+@v;XRz;RWm}pWeY+UzM_?-Nencj3-HPB< zkVD_~R-L9GZgoGC*ZUxj(&yGd^rDqt)tK3)UKM@t7(e9H&`s5{vvp=^fjzO)3R!aN zCDoFwTJ}g&C1EZWgl;N0qFUnOW>f}3Na`vJ>AlPF@cfi;lv~R$shHujHu^OdX=a!s zY-YZ$nSMC+(OyFvmxVp;i00 z$JVY)Bj(9k>#yQ^s$nd}Dsj?n*{p4WJ9p)1Al#ba^umqpD)(Dyre;E>JZo1y2M(N} zc!_i7!f78$BZm1y^~5Oe-7PYZSw&2Qb6_QjZ9gDnceGlB2D>47v3M^`THRSABMWg< z-!YFi^1LFtD>IFX=AF|XoF0)lB@o~a+2(GA zrWlL{1O1HS)b(=s;8h;s=IFV1lyJkvU5h>U1`+!GUgnH}xQq{6)t*z)*X~pFLvVAj z!!X1Qwj6JSa9FdssB{#fJYmZSS{c(dbwt z6NcE)E&)WcoDySE#EQT0>H32PJAP=d-U+(v$!FgjAL(pQC!JkP0VOgHobin^-tVS5 zr%knXO9HK}EV4u~J6Zj8=Lp2wB|jb57X^X+Uq8!!&E(_qpf}~qVAn+AXc@Vrncn>B zLJHIA*K2epqkW6$+Ve&%oJ4vc|xfZk-H*-fQ7iC^~;#NMNTp2`L81 zfJyaTXvQYKXUzJ(g^Bs(@V(sXuMA|@ZN`^^&aT3z34_z^zz8DMa+Bx7ZO~^IwTkAm zQu`oBSXp74hJ>I}EFi-r@uQL8$Fm(;Lj)be1r;exO5)^w*DzixVbt5I2l~5d?xXh; zY!D5k)P>8^{p1p+MPy2D&{xGKlGwYo=3|5r{)@!AFL&%b#I7~mgAvVG*@8hGm9iW$tJ$#3bhP zzFh)}vAS0amuCv=-!%yX@5+kVaS6kGp6?9}gnsqFaqg@IyiE`-`31%}L?$LRIb2Et3bl?maZ zYw|;ZiFZ1Vy1ZHYxm7}0Zl{Sv20A;mM>{hI0sU1q6kiyj*>h>8o%d_N(2KUo@gHrrrtZVRWw(J}?%aeV_RTqen})r=ZphX15X(Cz>?eN_6}Y!@qwn1XptWPy6d z07UzWeayg^x+9VKpBU&q!!RV(@*^b9uE}$o66B?&RY_0?(AXHY^!l+P3AvqD>3a5p zirH{l1H;u8UarGm%AqEwD>Wr1`}q|@$x6c%8K5|qnb&L9nCq0pzf3uwr7MLn|Mp@I zbeo-HB}!MigNmu{)Ne`n;K%MP{s)4;F^m@4+q#%LD;>R3kF!WdWGurPZG;M)h^B2I*|QIQ0~d01GRgA% zPH44C9D{o`Tef$FKzU1_)5dpDQS-Ugp$ZA(`0;JY)Iv1gU9cf5yq0*3l-!63a9AjS z#HEp2BGyi8tHItB_K5tX8FDjTfET{XDF^SJH6u`Q;2w<)|CI0xW0D=&zlH}BvB0$O zbAhcnWPSrl9jJ7DISPg{eo0bRb(08AiJNAU`4l_<@D{b?s5QX+*)kY36cuI$Nm5*-I!JS ze#+sSDyDynSlO#E!mKDXDc751F#ox>A2xX2BL|K*jQ-q{tpFV zR|JG@{GbO)$-x}RHFi_pY-<}IENvv!7ENh8ytz4Bhw zGEIuGM!ay3$6`dcnfdh;Q@Sl7|LcY3;PW89A3~@R=s(H*hrSz^*mIQ-Q0OeW?${>C zUH!G@psHg;;Jr&6(wjO5eM@Aq8T1uUi>nXH7O}{oX5Z+}(U~|}f=UQPqG9t$Bn+2b z0#iMt3Aw}Z{V?B6Rzoc0>bb|)2t{tmKx|R$gO@l zP_-SRSO&c~b^_{ScMFd(jt7y{zDa|xi4M5kvlG#fx%FompgbX?77G1EB}&;56{|2f zwpYcXYl?+XI}OfoO>`C8L&Br2nKsI_m`gI>!BJ6O$;!|2c5%d>exS;$f; z-n$WI=7h|G3iUz-EBv5=Lx(z&7hO}O3~_O})#795fltyHB~VG?#D5lc7SgdFho>gnI7C_aMG2=T~TiCr!MHT?V4JU*(m`hF;xi3O{70 z$`nC>Jjj`?4B^Dl3fMOflpHqLSvzV)qOMnAfETYJ6MfKkO7*@%b~{usoCEa-tFKih zlGS5tRZxa^;go7TV`Ng~HVBGEioxEZ=@T$Gw_gp`E?z0?>tg0ByQ$fE)>ak0h0P65 zj4f1>q>23<=Q%c*XdL7h=Q(lAV91aTF=Xix?TXSi!fVNU&|~5hKq{cQ8$DK9xlS*a zIQS3j^UY9Zx0s;f&14jAa~#$}6>+R;yt*j~M#Hnpcuf`r8_M|&r^HeR6)o3VQp@m} zvcRmA%B5IYzfC@zgiXNOUTzZL|Oz zNK?^7!r`4kA1W>P48*>*D`Lu{)<<6_-b1_|rrBTtl9stxSl*D;0xJaEQ$eYBjS$J* zg~H`rJ!$1IZE;k&RV%P?qej~xAQp{CWrtwhr#-JbZ#i*gOxAb}LxRT8+Y5LH4fJn= zdGod{`dB!Bjq+xNZLI1DoK^mwmHvZ4L^*!xkUk*#JXtC14F5#(?VG8uA=x$kIsImS(7gI8@vbjX_pDtcqo& z>cNO#pje2|*qybVi~$I)+y^+$uIt+u6T0YTF^L~5epzu5%88Sl9(}U|4279WbM4B= zJ31%;dN(z%jN(dzNOM>2YyveKM*`JaxafY@ZQ?-5BHd(=`uJf8Cif|>9?*>!l}em0 zY3@NW=9+cweF)y?(k6X0-PCB+g^x_@hT}*yUnjRl--m%YtRXLsDv{}J9DSV-O5ZtR z$LfXki`@`iM;DXu;Kk(mkeekh$0i?9QGMmMYQ_#RRC|IIYexql8r5Cb3=elhsXJj@ z;wZ3$#?f?Tx#GO;nxpp79R{Mlc0mgzgRGlHGZ0XfoI4apl;3FmFd4$-+V$s z)pX@`)5g{1C9%rDNf^qwnE@FBp-;*T6vJJhD4yL8i=j-{T0UI-Y+UdxEcj$!hsjYG z_F+#4bmnV8%!?xu>egZpsO@a}`c~jf{2j z{E}qI-@G{LL;d;|PB*1z+ONtW5XVfh4)Q(C5_^G3At63=O?-Ko(=ZI!3+dbU-L7Pp z!EShRHeu+i(H2v38nJb*)MiGD7!bAP-_f}$*X2O~DvqYZ&|O`0znc`O?^SaJRHiuG z`6W(DC>?5>OtDNb2HkPp(C~akbHpRE9Kr>4Rju9bfWT5nXH2{!pXFq8{8ezGVIhpvdO5g#jc~Xwqo2H_-GB^pl zpdY`#wL)i6DI~NzxBc=m1B(MPG6j@VS3vYstqF3s!28{5J*Fq zj!=bH>L5_v<82vOV&P7?4}O`}3bCXA{hq$YdWqH6-FO>Z+ zsXHMp2q-+paVol4=>R?s!lNw{>=>ftW+( z5|1ueG`ncDv)xqd=U_YvIs|tf7SBWGE0hY!BGZ7_dUBzVG^#7u0F$>YJ@&AXaTv<$ zzS6U>@|G#@;93>k&?d!4Lnff0sL+g?SkQ3iZUGN5CEKzfXiK(d)WHx3@k}#$p5YOm z__ALb5vM^McrtvCD5|4NG6H4Kpx)t!w~6e^#M;hI@JNBo1MUeu$eH+GJ;1M;fD80j zLk%tU-QBdpJs|@y4+^Njv6-I1Ix`e^VFyGl5twz5?j&Uh2z8L- zVypg^N%8uLi)Os9`ANoc4$RZ#5*`$~L}t69=#p))Wng2X>>^BAS8j1XCC^9u$j{T8 z*1_m1OEYqyJJU2-Ef7tQRyvtQ;h#Y5*;d^IbtPBG4i=R;FqDu%4^NKtmN$~;9fN3j z&LluFHW1`*QWD11POGkx>YRno+-be=nToPi_b+_(y4l{La%hV(zu9FKVPqL5W?`0h>WYmC(YcI6WH4~Awu3!12`fN#n$+UB(=I6x z#k0ltA!k{V9|0@a5ux&`n83>^cMbGUtF;lsmBAJ?%K=0b|F*Yq1g1cpW5C z0n(R)<*UGi3!0L~(LBWsv2f)(Xh4{*daYlpKW}NGFP-*4kg5_1Uu-3br$fCkO zH!Q4wl#!OXbpMVHSOgnw7r=feSmBsvRs909s@bk8Q6^~*hWdg}lhAUh92R!$R2k{v zF(iEN;7CknG}J|(46jlKj$T!f3}G87g&1_adwzrZXuEu2Rl||XXy(W3V4^KdbWpm) zQDySc-UA+9F7B{i7@IU%?ZJiQ$5$3L1@|nt46Kf~G3A-cAeJrAHP-ssY>_c5ruN8~NARI2&r**am zLjQu?3r4l_0rB?E938J=8`4@qb|behrL z5~zAlw6upi%RaXpcTh3SK?WGFQ^dKm@v4l#k|c;UTg(I2wQ7CgezI7j%QAS%q9Loa z7A_~jasJQ;Q(eT0QYrg3^!t?Y+7WHl9>VSlKesys)^Uzjb5a|j{4;x9#VR?@=~0&~ zWDM??_Q83<0Ff!*mqrO`6*kB{hO*79zeGWzO3mj~H`$=TQ5@gO6^%ibkXj6hwF1f_ z8TJBipCwVhLKT3qQRxbGhbSLrKv>106R<8#?8dvKjl7Fy7IACod?=tFhXCkn!zS1= z$8s%ai}*(boFx!t`L$Y&+Z;RYhy^6<>~Mm4muz8h%!nEtl?{&%=s)bH=d`EGi_Q}- z4`^!!P6k}a*h9_C~lYk5%cOF7(neUDKN@8+gYFG!lu?Si&9B#)H;3Y!PQ zN0*aM8&GW zc{lf;@)%j*aq_AkrA)x6=`A+04TNg38`c&y{5^0$3o9q<=96ta zrFNn5cnaFZr-C-=<@anbvV@&!IM~UDZ2%uF=AMH1!nHpyPF{3Pn*+st+25`mtzF;5 zr`N59V2CY*`uzZ;#J3c^8qO94G<_m%6igan?c=XeJRLNkcFAmfJ5)%hNa=-=dN1BQ zEbq53Ug;p7fEe3&q@be4;V=k8Qjwc4ccaD zW}M|fV$t6-=m3M)m^2=iR~Su+2@V3{&Cqm9`JRbHcD0{(A6o40G!KTi*Mm6PbWMw; z3Cft(5lwul8WOxk8ryJh9QDvQ&j#s|ylO`+ulWSr^Xcmc&ccWJ=H7u_K1G!Y4(6u1 z_aQ(58 zmb$*s9)dZT@oq;=yFFoy#7JQ|lAAMXm3VNWG?>VMvG_{b!q%~{ijzi>!C`jdJUcXH z!oc9Eh94xPmaK*>)-1ZIy$oZDrBbG-BU?T!uJPR{$HsLW6!y1UZTw;Yl%FOqk zs2S}%Pk0Y~b8ovQ!*nh*Zv3gdPId;Kb2~k;V;ALkukG5J-7towPn|? z9pkSXUf4jQ)cY!oqB-b@gX+Eu<(hkPxSO`gYVUDVQaAN$ns>Jr7P3qX*6A|1XnN_u zWVIJ;oLQ2^9=&?tOR0y!e)B(#x&Kh!EazlG$fSb;cR*CD8AdVUZVGZU9V3)1Lm^G+ zD<)1h%T-M_)K~l`tBGLdJMC_$o^|t@<8G#Gx>u&CNu0K807!L;YZdC?{=?DB=RBWdy}11}F|X|g*aT$^Uzsf?qg(7*J}gKEKf{zgc{ z@+QZkt)@gmY+muE;}CFZ}>6*F?vJ4(rAT2yF4Hx)A~#M_6ex)y|cYl#xNU{A${Kv zc_~U^PsG@TqLTHnlUE`2i6ezBJ$?J2;CZ$DTey}424}dpKnSa~V*{)#*hWP(OM??i z)|uf$ymj=-z|K(^)4LT?FQ}He*TR_Yxu0i*fVaMO=%v*f7{6@Qu;gwpY-6)ULNB>C zjqp!a8(Lr=pcb|ZB&7rq_1w6WJ#8S0%A_TM#YEK6Dyo^Cf?564icdtQ8&Sc#BP9@> zj&v8Wl&~12vsH6p{oHsa4R&Wd^pAqyO@5(THa;2^yAeL9quu`+tedG-3>dQLnqAd; zi0lrE{S;Ct?|DGuhXyYW>2@t>Al2R)a}cy6Wp>c#W^Nm?%MRbaf`r^{x1t#SOYG|qDvOFc^G%Vh_O8~2cq{~u(C1d3o+Je zDJ;q*A2eFvd<$50bhfP%R`#D{^lIngi1Kb4$87`#7kTOZ-r>SsXV-NT<`S()oE|a_ zyQh4L^i^G&akU0MVpGwaRX(7G()?aN6oE=&BP2|Yepy|R&H@GhQBAj40Xwgj8I184 zH1~2^yCOqi*&U*Z|q z($Ea?))A5M*z=C0ZgyO3@FqObt}!G5Q@S+?(Gor1uKeImncaH z?8u@bU>lr)WTb^cxImeHk-#{3p8E})O0s|7WcB^b;4JuW0MYsu9gMD}>TYdFG1UKf z735@RsA8ZP25`z$(0|aXhx_fd-SemNqQMCRY+w;}!nSwmc3X=68YEz`p0&INQZqiA zkfH~$ox{CmDAdxj%*WX0Vd+=8jkt!ES0(!S!z^Gj$H3qoBEN7Th%EIV)4$4rEjo7e zAh#0Ow*=#m2e@iHV++!Sko|ELu!wPmPKR|cS)HxcBxJ)S!)Bn{)OD5nw*qLRA@ci` z!BenM)jyj$&`7pu_m0C>GT1G4R9OY(D%g)k*TA+l$|0=5wIz-=8P;&fm?E)fTXpb* z2zDDYDcHM1D=&dPF0d#KJ833n6ZAqeY+np_c7?Ku+?bpOQ(R6fq!4B{8(Pk;vB65& zkFcV(D#l+ww-cI*I4{26X8sap{o8p3tO6KyM=zCZf(_LE_lA{#U{sR-hokooYx2zV zh3^pL5sHb{oYTZ6ffmMu&a4{Ynn^+ep&kjKCV<^Da~v;9i*G^SjwJ*T!T`Zc>nv9q zz|gg2Vu%|edUlcaEsolXLOT%XE(8NBj#{M#4KX4h0ThA0pJ)Eqz2>S4$@ARz{r!GF zziwH4!R!#A8iq|+gkl}nE~pl}F+F$itMNv}3m!lzvc7Pg<@qp<^#$iSVD;P5n6X~W zAq#DjpbGMo>H0Lm*C=yM9;x4)Q)eqx0tw`X3lAGgV>*cF?Zd?q&rkL)!o)0fE|5_K zu>WRVJ=vk$#~cNAr*F*6KU4j62I=1knT|)ZDqensaXeUe`&1`3*hZ#Z;w-Bh~WbU8RdOXU@SmndDn^X_K1%9>Ynh>U;#@HCGS!p;Dd}dl?Tbv2(GJH01 zRC&(V0Fh!DpV^XJEhd5cm_#u2@oAxH%5yYH0&wT)eLMAWx@^|$HzWo znk@Q^bOLQXq<#DvPCCwi@!A+Mnb+Q7|L5Hi#%L$;F31ErRpfp9YQSLf8W~2F zQv4(l#1KE!o6>BeaE1x4cO=OC&)`)^2==RQHn!HRBMH4J*B&%Zu&$s^&6x|@&jd4% ztC@3vTYXwx!e4)Bm(8l8II$WWwK<(}pd`$~NcScBBsGn5S!%WMMT0DjDSEZg^L!uMgVNjen+tb;oRG@ql87Mc991$c#E;DOb$1HHH9Pd7{|#*C@gfhF2r z@G}`#txvg{jha{_SjE71r<-QBOOp(sbY9eSwPN%A(TkQk*v6x;1GV*8ldm zb?GcA50AE&zu^2Us9;82@Wo=APXHNkrN5gSXU70o%zBc8uF{rYHYC7dc*js8o4)rK z9Af?HML4|z5NA5)@x$^$t!M~h=U>3$=fluOK|`b6Uk~fx9dP%oX1nbJBWKRdiH7Kz zScZ3)6G2>&;Ee9&`|IznmKpCQ9^8p)_gL8wrqSSLF8PAk+*@rWi zVdFO~IAgYzC?yd7N*7!_U&9aZY*6)S!X{15~5VBX!y~0s9oi*1hmr44Z(*NGE-%S(~|{)L%;h zXvKjypCl-up`dNK+<8d^cz(m>DSdrn;00^}%+Qj{&E=zoR6L_TTuor5^~-^S`LFmo z1oJPzH*0h^KkY6_M`su3 ztQ{O-9B1dKxjuT05Pt4F9CGXB`MI;K^xEYRInhX@aW^g3D@?aOFhC^%UyA8f*!nU2IrHPL zSg4(tqp~*Ur6c5@nZ+LFHCYnt^|-TD^&~=>`qr*i3=@_YF7mI8%C#=C$s!Jdza8`m zlnJSC5v8ZomGD^qGkFZ|p?|kXp|2EK8jR|%a)t4kcPNGF#^ zn1+&sO3*1JCM)%X3^0ATVaaKGbq99CwZpo`bQOfQzrhNbj2?l*B;*hRe1!oGGjEw2 zqFDF``M^F#q|U2Ubk94K*Q0 zp(Uklgfa5S$|2>!-DI0|iSs$S{uei}C%Jy$uh0t?TB?_Gq32{}diJNoRJIgU*GvG~ z0r>3AhXU!9VcD`5I!-d3xdK6xUp*d%jwGOpOBo~Iev!_I6*`rN*;`?R5G}ZR`+w*A z5@_UNQrmnGO~FLzbVh}P31~H&R5+r@!Obh@9E5q2%F=?%r#|#9O+=BsAvoW#zl{K=c09|(fiQ|q32Cu=JgFKhIMCX`T}d7x z$^QA7w0`uil0GqLX?|;PNB}b?V^Jw~pIb$DL0xh9&dhS$N@>v~cLn;#Vb9UmtXk zns+vt4}q$%gucK_vsq6T4ex;7>Vz$46d=~!56n8i?VRE(!!U`dsF?NxtYjf@4m}3` z$Y1RNurIMKEwxP?L-Ta>bn*=qrI@rg8};|mkJ?}@SijaxYS~OnX#2e!_zY`5aimy+!z!IhYQumAC+pH$X+h_HJnZ0T!;!tZfh4l&6F>8! z2%{#er|@Ueq4{uQH2}A?rEeBz(rL|Ox!;;;f$(Bqg7o!F?&A$4HkNrukuz3Yfw%YV z*TyPC)L|}8PhHckocrjNSY217^daIts_TpKV7yY4DQ=uZlmC@lt34nYcp_%V7KW%1 zFkFQqu5w@O0^s|sMuxIKeZyAS0_w@KuOlIT4)&g!E|A)l%Bg-W5JTgk9MC_}&&JAv zAnI>gPOBl>++^uaP*Im-&|6q+4M*~Z283Ot^)pc1;4!L;1_l+_eoXV`v}vo?7=382 z?w++Fi{whC)knpInVOYcINO7>MU3&7W4Yqv+{IeA?Bn|8986d2BxJQ*fBnISUt@)x zR*D1nQBd~pG0#GY^n4yp=e;QVviDWY>jzXBv^$C7wF%HTF-GgL$sbp`K#A=d9G2`E zSyVuDuu=qtioOM+(!AtH*5{RuFc3FghB*}pWKkG1{_gKQz!q`Qua^7n)f2{KVUzuE z#ta%U*3}U$7rZdjHm^R~McOS_V;-t!O-(+i{U1&ooOz2PEA~(Rg9VxhG^Rhe0FE?sb?u)S#y8F-zPI}R1B$Yz|1#(T; z|Jh#@&#I@u0<0Qf&Scl5G_y7k(LwLLC(O8Uvm2hB?Po;wY`lo0Vm@T(Y=Hgdf(H1Q zgqK|dw)2M|eh)B;d_?Yd0Ov`)7aYsw1CQe^4LYxjI`^w6Qs%$Z&BHk>os99G zVveUkdoX%o1C9Jxq>lMvbahec^8%=llL(9{%|!PUq8_LzBG{-%4QNzz?a)nXBzW|D z{ph@wSNHvL9r?pi%t>|64J{g=mFSiGBPtvPE#%!Z+~twq`FIoMkjy0ie>Lds&?B5z z5D1I7G{7}xh>`?wCOa*cL7B#UK9f2A6-FF^jsMr$j;$3uY@^@oeW)Mhv_oKjXtj1r z8utEm6$x?5(EFa3diG{l#2iDLe-?szKAPrn94J}xf-_r0V7AZRG%a<8X3G# zFE8v};&SIkKz3w0F1g)UvV(&3<2M#CB=+iOzj z*uXK1%zXc7+17|18a9qz>!89O`doAYaOy#ppUICgB-+t|Q}GRd^A-X^Amwb4;PCob z+a^_H(9{Rijwv$Ft(;f+KLV)-yK0)CpRQMdk^Ns=(szm(SAPjF)shEK#bnu86?$MD z2bKx{1gJ7LqL_>-yB$NR;cv{BR{ziCh!A9p6}m^Eyt2k zvyOQcnO=@cp43faRqx1DPPs=gz}*iyg*J3rF@J9B2a6zfmq;3D5QJsOH8;FFj8jb< zThNJI1qfJeRk?-e#&27aM;KW;HzbxsbD~3V8LEe3Et7M4ilMh_UCcV1=uIJ4E;H;M zVa*JHH)*V?VQ=WHk3YhAT-1w`U+jL7#L^HPC2$+0QPgoUabM)dg3?_8O(|c3X=SH8 zJ0cAwKB<}H*SyocLvKUNzmaKSq+A<2vu7jyZ@E5BA>@OvHR{ralM&Xi!~@()m2eJK znMZXd?6TgTwrN_*W(wRqWj9@ZV0_&M&!k$j?ia`**_2)(L%(HjzU&VJGZm{FzbN|f zeIv&&Vw%!-0uvrCibKP;|a2_iSDP$auo!ny?K*q za1t2!d{)Ho@#<~MTO_LW23Eq4%LPfKKGH3Ck;Hyag+jy6ree~D2($C6P}>yXpDFnb z3rhoB?(LJsdfR-utk<1#!bR3q`dYn~E5fVTCH7BDR!anYrren3$z#rOl)=fmmV>}| z|B&JiWc`+36~eTYGb(&wwfWCG*8Q?>Sl!NP{T`7s{VXZpYnQ?C1H>*@f^ZTNR*VPF zx_^XFqn(_p{1MOu;n@+D?j+KhvuALUmP&f-aCAFJ6(0`zWDB?&leJ%aZlNu3awm8+ zKW}m&sUYSA8Z?uI^6iU55$=Dj_-=9HO|#Gmf~filA{I1`e~idcDEJ8xa@Mu8MphNx z)X|h5A%ET$*)!M{XSQQaQn5bXRx{!-9?*lJabNdw=6`Dac;Vv4FSE{ylnuiI`06@5{0I+sDmb z{z)ywUdOR9t{ErsFe3P^MAW>|;?&)^@lLEUqm z;_I?1m=uH!rc6tsit(_5Q-E_y%o~=u-?Tzdu`cdt8Zv(pc4*c?7VEAxBtvGzu>5U} zlg$9viq<%oUJ(Q|8=?J~LG?L5J)YhYk?i08O#U106WV_laTh|}e9R>wP<^pMJB=ca$90G2!;a}SeRo^~X8!&QS_^q5FvLvhBEr@K)GkW}PzcWd z#4re`XwKm~A3%#LRX_^pSl7>rbdS*~ON4I+(Y}@ae1!SuH!@>C{c$eS{yD@W=BBK~ zTLm;R?-#XtPaRJcq9*|9Q4Sb1=;_{mJ;wsGPb~A9D=ua`>Uf$Zl4{M8PGeU!Mo1Oj zQb3Mj>N6`-%d{q?jQ^mq4vUhi87qOfb7oW+Vo6}dC={#~2~UG^VPCD_RMR3~i2Z0m z9>p6H$M1-X?R?W1J^yXT8&eE!68VqUnQ`|l?PM0S0)$=MU$GLQW%1RSTG-4>9dHrw z+8$Rp*1=|3r;^6f5Yf&sKfgtT&LX#}+|9YZgK9X3`o}Rgjo{2K6^07hJ8-%{9?p;o z&XJTKpSyV64@)6#k@Ti+llx?SqH)^7@H|S%h92a4L2H&akH*J0n=Q(KmKg^Xj8G%N zGFq*`v>mvlSZGrkLO>cTmLQB(MAIIlh_v2tz-8RnWy~t8GvCO83Z17dSk&w3+Anzy z*5#GmL{u<(=}6v6Gz++X?OO*68>LLBZ~h%XZBldrmyfFLf74?B9`kdcXG%;-pF&NL zIKpU=9RCdyE{KR-hr*!j#GcWOt-WLOkE0+IYc(m&{0L~`2PnM3mm0shF5O%+^4Qv3^KAxP$?ttj9Go3I=Cy>4Q^ohbNvs(WC3S;(CfPK&cjv z*-ArbFntO?r&R^HXH3@rhBuoUZP#V&kTjtwa~ra&TyzgB^}s3_i~UKJRqL7&ONN~~2P-|v&#pQBZ8l*mS2u{==U2pxu5O$s66sHC^2ezy(FZE?vV~H;J(n1&*)Bh7m^kW}RTx>i*l3NwuWm zFk7Vz5z=8DXcWf;`{rM4C77{wK0Y`d`z^!~Mf)4lLQYEsWN{9Me)K88t3gG~KsI#{ zBXUG>CFg{4Zipnuh7Rh76V}I>=i&VtZIN3{Fn=L&!8(fhrs-DHPIONFNV-5pso!;+ z!kZAbOEK1wnr4CwzCEL(7a$Hdst#!+N4ro|o9K;z3RB4DlA) zeC>P7p^F%1-#jJjm-=mXpl0I@7Y-*I0Q?->$THw`*>pFI%4UIS;`KPgLo!jwL2Pzg zC@x7?7sOUgW!P74+==6tw%)NFTITmjW#2>18hqRA#9~{>`1?2ZwiX@0=or)+d6`U(^ zH1tjJkw0=+&OcOu@%K#McJ36uWHGWlAjBrwHTnAE38^QVAT#l;(BTNI&m$~}G_eOy zo*E`cGVnw{lL?xF$8%b~hmLD-`td$Nq6(nsBMSwW9x1eWeu{(0R*JZyGY}fV zN;pi9s;u^lyd?nd5f#qkEO{B6Pm&j2m<6KPF#|WuC<^jrucZR?F~{CTI;I1=33Xhb zX(x_#6I<9nwH|>P0C9&Z>dVv){&GnXS^L0zGy z29

    Pa02uQ#*jj(rTDY$8{Pj$QNnaA+j(c$S=B^$d{&QETmyz1Zh@U&QM&Z($*~ zyv>s1p#dv~%Hn3-i#Zj8lqWfv$cGF&<^mgiyLryT#o;fqRV{E#v^}jwj>B*N(<(0z z^o2*zb~tZamH6wHm0YHE0o~PKQ$It)Bc3d5(CiZd2Qzrf>007T89QV@Yh+)QG!ns- z2zbs$Uw}rM3Kbumn#YN3C;%!0EWAD-6} z462`@JL?OdjX;@>9LqK5TkXwc^LWQLH9T)yBWh;bVCNwR553CTC#b@%FG-N0CI4x3 zA(&A$4=m2@a0s=EEd*U(2=oBcTs|aV<9l0MltpBD!2v%2Lt!{*0HsT=LC-&_BG)Qi1F_LQh^wH~wU~lv=EeT&StN06 zt}d7+qgLPu6Be!gs-}xv-*0}IU3-A+G8bN1aCYKrJ-xEXvA03Aw&T~wAuCKhb^%?~ zU#VDo)o2A&8r$Qj>qi)iJj|kQ=p1uQ4WAbR-hAC=E?jWYLcno_uSLc$^8xF;x|$g1 z8ioQy^G9blU3i;-iTz(Nwd6rr^6)@ASi_OM2W#T8tmm!2wlZeU^sPjOgB0#$q z(-9-4AgF`sySQR{&#)GsHT8su>)}63NTGG>1vUgf+LnP`MXySi^=Sbcw2GTQk#UZB zc$fp*mXuSea32OnN@vvZ5Lrekzp_1Od`5<`Nu-Oyu7Wp>%zn~eT@-F#{T7EX-&nbl zoL!kRMowS$jdL)+ZBvieQwo2bDq=^5lHH!fJc%$bhRyGA*zz}z0!UH?PhUH3)R}rc z1uDjiOse&O)GdT>v7gG;r&rVFu+10b%k$|K{E@I%wFWt`?D&EXp6qcVw)U$2uT}*p z*Cv}qk}i)1Q3nydn($HawU@lj)puJRASV}kT~f#kRFrae(q`2w3$9qR{>2uu_h)ia zyo_^wm>pDeNYz{hMT-^Z&AF{P4a2ldK{muoB9cK_940Uk#N3gO>PF_#t!tErtFiW< zAXLHqAqrgZCR%_4B;|_#yib0VR+dNwQ)Dol4+oZVhasEnEJ?g=1P>6!*jQb45|9rI7B(Jx}5RVU}PkjRN#Yd@J$QSaBDjz#q@E`mCqfd~swb(X!4S>0vqk1LNJZHl{DK7s6wmIjE zY^{%$kbOm66g&fDasSdy)8NGW#;`q`tkXln=Zg;V>$_%lecT z>0FXtQUP#M2uhU*mZ4}4IebhD$fhVtnfTmmIpOGqcd_;0jeZ&2{g9iFY*c~o?V-*= zQPHjeC#?+V&?J!^x?$a|9Qy_mPEx;X_^H1VAD_Q@D14Ryx(>+`NkS>8DyBX0#fZ!e zDK+!yNj1-T2cJM1>u*xWVV9OE#nWI5qeD*)12#g&;%P$rcM8JH(X-$@-!^O>+WrDZ ze*ClDLI7fv;8X4sx?O@qWkm`*Ys9b3fL9tm{J_X??^644`YTIzCkS*l22KtBZVlz&91*Q>9V5W9P<1Y zmc3=@=9jpATj>oEs9o9Z82G3`gMXEWD1u zQX}(ZT2iSy9x!5HEawhI+hw#5==8`cuuexyE)Pc-_J30%oknAuc4xuq=$GM<8uCM- z_6=7Tl@>!bQyW{O@Gb`NKf{Dl8K+z2IfA2E+U7FUqqbi&JS0I1}IQmso9%#ULw zl|Ip@v(mvXTuAGSUGTdIF@%CyiCy_+J}71eA^I@gv4CYQ7u@A5Ke2NnQL2+j6OYbH(>TXzN|4y_7Pb*Q?UQ4m9Cgi!$ zR6Gpf=dq%#*cE~@@xd(9fi&WQkJ!XT7UMR#e z?@N*XTdB}Jzj2>_2#vayNc^*^k`3?;w?d1*-q*dY(hc-s3A)Ge7#y6ueyN93I!Pxq zEly_Z2Uh@c+;2Rbu1y17Oze&uRv6~62vWL)m54SOzhIbG4GcpZ`2oxp_-J>=wy^mS zi1OMC?$GSCD0L*)Gz|$V%PqXpCLIK7HuixPY~@6mN3~BsP$7Z zpKg`Iu^k-07&}c5JYB5y_$kizqhiu8mSPj9G_yPLEf5@A06MT_uxVhRcfetn(5Olx zBwrG4oGB}E0Xc_^yVRK2qFFP_}PR36T7UZ0qJ#Y6oihioN9@s>mYD@<n41x^>nef=f>ic+c*KsN@ zKeBudk3xh5*wm|#ghJFG39I~>$XtQDv@uriQOW&DUtok)kL8ajaq8?W*1EWp6X;1!O02E^v=1A; z2RJa&WYg`rMF&FpV~?{A%R%B_@3T$jvbyQcuGmmy`y<4Jod5pNM8V4+1MIvVVk9cA2CiV)o(rb` z4&I&RP(UqWroxNqJRe*@Ra@vkoaVc9okQ-mFX!S2!q=5ICg%V|&8?Kl+K|b#4#fN$ zXg>wRdRzTRNA)d;KM|xxN!=)shnDBnAK?tPQ(GQ}z!rf#!`xb&mU@7&vjAP%UpNjOgZfp@>MCWT`1S@@4}*tN*1i=f^C7mh1f(B& z-{GQIj0YzTR&`#hi$s_(jk+Gw`5Hb2ZHo3IKrqn`+yM>a1d}F#%=iO9O}oRf8udL% z-Rp=bE5{x86%^r#dyrpD<_A2!^pxCcEXxbxNmes-P(oX%O$vCr+KJsx>Gs???|DxZwZpKq*oJY~ zu(1-sI%U((5q_Me-hZ-)aPO$-Q)8!idK~q%3VkowXrdc*HssYblhcxV`5fp7kkcX8 zWL-bxTT)M+c-n@Z%Xt>wU%j?r4SyU7!wm|A|1c9D z>Pg{Kd-FYwM$FLPQC8CE#49%YRpcv|L;o_n5~z^5(dPZwJr!t}1;gR_>20fxi6MK? zQL+xXs4EF_IZebl$G9fI)cP)%-cV9^rk_&0 z%nk#eS9|Qyauks-y1T`b7JT;}Za=~StyuW1|BgE=&#g`c5nQu>{Jg?jWx1Qp=p_Z# z5wHItdL3|~7I_Zs)hKxAO0IU(mUopqytVVlUPHMb#iEdz`tA{SZY9>;-a`{PD>CJC z%w7f0{P(?oF_&VlUkRw_2O4Xb&jczUMOqVe5CU-i0{4N15k8M;qhDhCz{;DFVfI{@ z$en%N0eA-QC-p5z>44DQ@?SR`BqjRkCKcfghv@yUs@I7pLq4pZ>Y{}n zsE8sYj}s)8?hRzS%sG{c@UoXxIip+=%xT>GOxfg?9f(20NKw&WGzAWN?Y%vgtMp4u zHmq@s%aDUJG~}`Nr#&nGtLYic0UHZ1W$lFolG&bLw4%i;ETF8WEcFXiHGY}HT&zwa z?dL@tyA5Z{7W;#cTq)Cla1U>RykA?=IKM{+_GPJ*n?*MHagJ-5x1hc&Rt zWV(3@T7Xi{X`Jtu6;(H;?{t%JAD}>38HD$KOi1hjb5g zU?kZcZ=e9YvLIcK+UvvQhtOI_xC`6mb50xPxf6|&m2;B~YmoTF`$U%v6v8_~LjozH zQ;7j_1cv>HonT_f@}$0NAn~<0_EIR0P)$dMGsihLA3=zLgOw1GJg0iaeIS({GL8VeSX6R8AJD% z2|Mu6!>V=ZNQgqb@*P(UR#`PPrI;X3DywIU0ywQo#2yq9xdp=*(0{gnM&*O6;?Qw) z>gKXku=4lsHGGeetKuIzc%id48@0piDT{18QSWyPWL66~|fB5RQjCt7+VP0@+j{q=;o;_SOD`xz@ zm{~F*OjjS+^?Vh$cg%sr5aa-q{q#2@RiBA)yp=5}|LlA?*snuzBQu`3_HN$9<`FdL z+(D-Tn;1Rso;$IP@=zc7jpg&9?T{m|^R&-LWFllit_$^N1~V!GtvC=Tbz?CHG>P8= zr@Lb-xZPMcyt8;`g@buko%-=MdBI8WMU1%kV!0PWPQLFNJBjhH%nL;%{jJ{5{@SQ- z*<~6KD2o^+m4fri^^nJNrT>7i|L5Q&yXXB=mujxS>{qMJ(tcQImjKZGkf;y5Hgk*z zDC}fn!;xL)Qa{EEcg*mqO9~xGkR6q6(om0su+Jy8ZOjm18vKoMcY<4i21>sYC{gj* zkY(sbRuBJ1q zY$@Efgl=ge$D$j4G6C++{&%MWvaWTWv$_`LSI|A2=!|J{g)bi};E?}&Yna^`{=r}D z5-cLg%pP+X=6q&_3*(@pHIET!+1rYqNsyK#B=}Z86KmQ4Sn?MmnV!ACiR|CM**s}( z%)}qrqV`VA!M4o!MT2r5Uq6oV-Z!__#Xbm<7Mi)k8xYGfT!~>onTWPbBe66`mnxtH zoW5zT8|}R8VB&-bYeti-l13nDV}u3bv{l<7l1-K&07;Xvaz42Lngab;3&RbW3o&(e z^AT`C;$~eTEN)p~B)1Sp2W|buBwK<0mCQXq6^1PQWVqUNWp zv>wP)A(bt5(JAxw4}KYVrY0x7QFAZrhI|a|Ac`F8{(+Ism%be zSkGcbD8&jL@Pg%*b%eSoe)0ReB~UFQAoKJ$h+(e%Sb$&@B*MHg0n9Y~RrUeo*)NYW{ z87SuP;o7K!*f=PJx*;A&W7{7*0$m0LZjt2N@iyA+__BqfW140g)~qPF$9^qDst&zx zwW#04Xqm}W)NAhIc}}vri@0vh=^`XlDYcp|^2MMFh<>r;P9P)GFG?bgWqvj={BWJ^ zgX23=zcWS)t~@D{@r4`6){2R!yw4TYVCxGkm5;1;hN2vjtIHpsJUrn^)SPo4=s~5#9^*IMX;4d zhgSH`hF~bumo}f?kBaOSB{w6=yWWkZgR{qX9qjvP*;fx$AJ5!Q4ECHfTGE%*D zjn(jEQHq+b%M=c9yRljVSIZRR`$ul%gr*_ToiW8MR@Wqy+Yae1Xn2(8JXAM9%24in zLnl_a`mUEy0g-_ypqNk_rq1b>@ZX^kOye z@^H6wA)w{m{p~M3-Q+jckz!?0aIDaVo@9qA*vGU?Y*I0V?Pl0~`2`?R`GuGL+_vF& zuxP9&wFZXZ^>&A2-G}@y*Gly#ak6A8*1GQIm%%8U5O2tV;8t*gZ@4ikjf;aWi(w8W z3EbyJcX64`BujbtmCXp~q~Z(tInx7S#?8kl3mZbS$w|x*i#$b{^8y|IB+~Z@SLjQ3 zp2ORQvV#_57e@3%J9e~~-!z?R`TJHQEt`4sxE_B@omuw!&lI2%l??_%Z8JzjFBIK* zkz=d^HO(nJlX@_>xERn|jKgWa*F~aIKF4p4U0gY@&B4biJm-!$h0W>Fo76Qx^_+0g zV*)T|!&Ukgkx|zpOtgBw<`37tYg{hGLH6PXR1Eu@nyjs98Hset@MQq1R~z*n%;W#^ zZSn0$KyaF7gqzEP^^q%3W_;=Pn(zyz&2#D(ZyGmC`SVCt7OH@>#+>4E5qo8;LdEff z`&)M5hJ@GdH=J?8ivery!xaPC8FvS*0?|l4f%JbEU6qkeu&50_6;6VnIX_#z=mBj*&Q|z z6&FQEuYU}DPmGAPnM_+Lq~Ns6=}LV5VjDU|>r;J(^k?Mil3rhZjx~g4D}26(V?wWF zk-%V#D}SJ?O3w8|i;m;!#fTh&BTJ&!@+V}Jb3@KKAUvnxRT5lw^05O=Q0AzV-+&Ye zs?k5T30JS+t_gG-kD9TG!l$!j7^`9nF1eVp0S-CM?%tk9Cn_E5i7>MvO$u8NQ4P)s zLF!xY){K#S>)koeMbYd+e4&eMf3GjbMShVWe2#!)K}~bM?=s>y)*mj>GS6brW9!sD z6?If!pK=0RlGY{e*HZLVOeS;ASOdZqSm~9Ejxvy46qnq%3;KA!W%XR3AGlb70)A#^ z!sAE6{q#m4YlyV;z()ZRM%zFIin$mI{Jv`aKV2sz2*dz78P7pV815+TSX=ba? z!Rq?No;0A1#qP_?UYImxeAA2E)=pZneyu8Jd6R1WPMx*KC3@r_g{fI-jmdan+VJB= z{y0mkiwKPUDu2C-@ff`B!PN{WKP-)4SSpD5DXRpq6~ET1TKO71`rQS17Lg*f)wnLU zAT>|Rx&B=V4$MFZFG}4pY4DmS%s%Y7yz8g8C+qr?aSjM; zt~E!0HMO%EINd9d?YVi}B*Hp@vghYLPW=^#fp5)=!A!fp9~&lNz|F;$<}>|gXa zuLw~oPgl4#hxOKzHGjf=czT@K&9l-tkUcU1L~ov1iv*!|d}ix^CJEU?blJEBBIjcY zl_YvOI%lj@I7VXJDXUY1GEXWn)XB0{@v!o1{FWS#Xyw6QR^B0P(T8@pR&-Cq3?x$J zwdHR_`qFxR*P*qp6XQGwRrn=&x$HgE(0)`8d`1j?JXnmSp#&d=H^vvHE%k9lOg_ft z_S9k8bN$7nP?vQK_z+{5El7uiH#$U5`~QzIP9(Z#DM@~rO?Fh7S!1=4k9WghbXV^^ zrC8oVlnq<0j!F6&wW#&+%*U9N*?ERXrpFRAmCSYo$Hub1Lq6%vQ+$ivnM9B{Uw|dz zPE()5nSG3<<$Mvx#nLHHh3yintdPfyJ?2j?a?U=xg{AK zrv0pu!x)1meY1I*F5!t#%{ehcr^Eg7SlR~sv-K}$t3MUmv20pD-e&MMxJX8wN*_Qy|^zNFKDgaOy7qjl&=3;0_!bmIK>KQ1QO=#kty%AZSHko6I& z&U@0x`nOX6-b%4ZHkqE)0~`$8L&qZ-PJDi2$%6i?^W6>sSda65DD$rpD1XFhE#swN0vi9M)t&;S(I*sRi7>UO4s-r$kRm zsD!~q&=gbncV{Q2etlDLI&%;=Eoq-Qamff7u04A=BYrj!k^HCC%%rxHn9>L{1pr;c zLpT`}Wqr)Es8ncqeSr+QSy)}fsJXuOO=>%ceg48A;=tf9;BUIqqB5Y~q}X!q`c0NU zWry)6s;cPgFZJ$SSsCy_htJqrsOta-ZL5c+p_O%HTSgvhXH~XzYW))Wzs5GcK0RZy z2Fvo&$g_wd0n$<=f+=P^$@IL3b~tUSS}YJVDy3vi8ps@<1z2?HKwd`t_J7Q#hEKRo zTe%x!ZJH&qkBY~%*?J~bgK+0;IF{7A20Y#2i}8RmHuviqjwDau76sAG1$WJV*uTJNvS-X_LR*(MFLyNteT$S^P2kvUzze^~b-3KyYeLX3coS-@13 z1@7h^vK2rC7xW}unP{!Kj6)-58pQZyG#?ps9|gSThQFY8k_rC&4DN=T3qum|T)k3< zjugxg^A<3q$Qhge_VFz19pGfPvOX@7G9{2OD|{n4_BPxtD5zz1mApQBAF#UIUejEI zQcF43ni+14B?_Ex&E#{BsPoAFPu|yoh1ceaCkk4@>-6SI{TC$r6svf0C6&tBC`|<( ze1>AdB7A6 zxnCH%S}5qAA}iulGy`QZJ~bcoi(1%RYr`gSCw-1B+L#UzXxmI{zY#1}H_+XUF&ehF zc|l>tfl3`TTUxU|N<$a(e6}cV4$DOcSC-^?70xLcBI@(-SsMa@OciUgME%3QHx$4a zHLFEWv5gOwCsm;%Svd!90q2>4@!V^`%6w6skZ7afw)y27qb&p<4f=if24xRm;&@GA z^R>L4`M3y|A&F^KjGH`a1lH31%?%IDBjrGllCI@VCCKp#=7$o(pZ$pAPwajVBodl0 z%+b15?XXn=WK^S^nydl1-0ormpxkhGElHRR9V-bfw!ucp3CnPUdVr3OL0yLmlgkfF z#DxL7#&HY@Qo3UDLgcEyMU?@nopZS-yv~G7{3X{6oL-$tg|U7swl=G|8doWi8l|#` ze;x%xvu6_=-XHNW5$;IPkDzu7l5g~Y8)bw%+N`~EaAkZLR?~I;I~jjb2=;BHCc{^>bJ?aE~zwtITu!@0)viqGrh>jhw{ke+Ftt+0Jh;~IHV#f`b)Z+$z|mfzQ!y& zZr&9e!k%QdYYvx`F9JG9{oxc0!i&57w!U`GMkl{0Qt z;eww+a|sH;uP19tS$-*&-=^`T%W+>KoP^8;+*x~laY{6%$17b@(ZdOY72M(iAck^G zPEwOGlX~+n_6@-Cs=U*(UOX3B2+@l)Lw4u!;TKNx22aZb!)PE9sdxtD#Tb3hbguveAAEsHwB0Y90LsV+cM`B&jCW^6*0$r z00(;)7cjt3ojK|_#4sEG$7Sh&`=Y2uMqkqSAfY&M94lsi%)1$L{DkH5k`rMtgb|xf zhS^e-t+@CJ{$@AC4%Ru$KklIV=w%d`zg7g$kD5A2f`3H})|R9iofx@N6MC5qyIQoE z%(}m--#a$~9AXMr8)e=BC^^07WbqEP?a zP2nHfq#jv-px;MX)gR@eZ+Urr__#VByD#$1leju;%y<0=$x8hb-G=A-C@@P*%UB@+ zcQ8a~1_2a*1fmShAr}P-H4Q;X)mLzo^s~K|F;cDHa|b_L+SBXr5TRvz=}*ntyM-h# zIgpk{jdutnuzI-zbf6ZK6NLrpv{d>rWG@42gXAs{`ZuHCq}))`e7Bqrl+0_ML>E>n zQho~esVL6gjH@Eg{Y?JfBvSt1R8ubvVVHpUR23#6lT))&lYUCULV-MksM_8;wCF$# z;)u6=4d-`Y3I}-NDD+SElJx4`0=&)<5>df9eU70$IJ=KuyA0!o^)iQC{>7Cb z&T6!92@~ablX+uTp4NUwV$zUO_E$`OhYU6E6r*sfSUrP^ZHO)@{;>oPABo-&DtaJ z^fyl-%n272li41yM3dgCO`aN5&|Y_H77KJQb5tq}m2^WSQ}JQp^>+{CeUE8vE4A;t z#)2S*v+r-Mbtg!Z$QAC=B1dHkdA03g{ zQ3x4QUq`s^`3KlwO@lGRroere{}*Pw=8*e6IH(H+`(g)Y%yTLhS9D?b>zYJ2!cwAj zWo^yintH#pYhbXNI2`R-JkS% z5B4lu(4X*s$|ck}ghhBXyD6Rmee>mB#k17YvC}l2FabnjA&@nTR#PQ~ChPo;h_r!J z^bLb;C?-^{#OB4A#IAAPNis~*M!ZBz_pMsxx0wJ~fwL%B$?pT?j=b6Y4|5xF zmta3Fc<`|WdQ%_+dTz7}P5qTi56lopxBhTS1leeod==foQClt$l*t=rzVQC1M1ZW6 z$Z7mUzOXXnOnvt#5ZWls-BM^N!-`Wkt8OQkxIyOg%EgxSp@1{ z^!$CXixUrUD@GW!`yg>>sZZ<2)Clxq%GA%$?m$|fHfpE5r!K{oLF+!9iP3hO;Uwj4 zEI`9j*|+^XE-wfR4fy8TL?_koP{hxv=K*c`ymilOKPG~7RZP~~GHlYjxf5ZmovEjR zRHS}qZ8X68VAJ<2*D=xK;<|GeZt3}%*mCNSSYv6%O)*63t5ZQxN_ag({Ct4?_#vrm z+(sYkFIKX(2hwOY=uw;hrpYu~hl4OnXBQK0&}&A98M9NDo-QN!>^rO8uNwgR%bme( zj3488Hb(aU7HjW*{obk(<|kMnxRpbj>*f64D8IsNI{t_l45Ru+ zCCI7>W%oBZO~r2P@=2(MrJFfGq;T=^-9UC~WvB{OZp ztH`~0wW3d!`Bubho!D2sDc`qfdTP!ZnQQwJaGiwp7ASe#*`h!@T^u2Y;5{6+tjCVE@7>5Cu~blCt^3hY{6yK zj1jhhO@ZRSh5Hl?-~c%Mf@`>^LAj2bdZL4f&gyW_1F{?Wy5$@FhA8sxUUNN_hhJ3n zL9<-~jH^Qo!5>3}J6c$B^9cES&jeC0m=|MyQZf1~Rt_8B5miEVwM{ylh<4`rqvq(@ zL|`Eyf6`T2EhMFwtsNo_!0f9>{adZ}%PU2Us=5|=&6`HHivqhw?HULKGbGsh#U-`B zx-p3T1pA?eKKfY|YwvU?qn|{^Pk$w{Zuo0mI08HOtN9q8gD3gVH1+Cwmx*iA0{7=HoRU%p{1y&cX1j51)Ujiu^?Fo zKq=&;p+(Mo@GJ#2>(dGt%{wg|T7T6Z9E zVYv`9azye-1c$y(j^aN<3-7Rw{39NH-h@OLr%Z;DQqxt1#$Gnbu!sI9Q?Z3mniX%! zPC3Tpc7^{Le`JRy6L%Sf6qOMxTSR_R&ix5oiWnHfa131+N<@Wmz9WC4)WH(p*lYdK+W z%_1!6#G|^WXiK%YD)@7}F<T9B@bIh~h~xPKX@fr8?Y4P-$$9)P()~C#o6TEL2Mxw6?jL1sO8TlmD%khNByy2q z>MHUa#5XPYGBm{Puk>Hy5HWW!{DDOtjeLS@hYh}K*rj_Xto1CM3(5_YQfnYYL`pK9 z&;RULM>S2!&c1}q(Y@F2HS0X^Tx;LG2{^_PrW-}}2f7|ce^IZGIfJKm-StKg$}U{( z{V}dI!p$1mwvjC0;FA3*dTS?b}NZ?ujAM8ZY03qTGePm1_{iV11< z^^1Idu>s}Kj6|*jIZw14lAq6?A0m*ehPz!{9%`fl8}SHK7ah1ME~;YTfmMWR2g6@6 z%JqB_i>8IqS_n^{4G;Y?Nwlrfy zdYi5GTaIwd%K1>7M@VsMA9G+0{72@8m$q6P<#V{aN3VhhXuD?YPyyH$ZeZMnoLIEg z%u&}b;4*8bZ+&EK0FCiVy3S^v1eJ(osSL3B1=w5rpBB;gPxd;`d2n@bL+ zte~M|JAI)hRS>v)V;DOqwAJD?qUT`e)tt4Wpsgh6(63MTM_-W2#WuK z`Hy#_h{XQ4>LOxp9m^)kg*Pog$8|Q}#Wz;u^kiMX_kH7Q?YnyZe~#WhtjRmw7XF1G zk628!u*c9Qfi{eR&TJLPHBCYSVSFWm8b0j2o#SvQE$jikx*bCJOay~z>mE)TK0?Pz zV~7J0Jp1_3sjb*bg|;Tp9tZ-j&`LELYKVvk1o;q{cRll8FRPF|&+p^D*IM^l3t?p| zvuHZu#1N-`^r1DTGgXJvH`GwRW)fB`?%%$b??l)@qMHC}F~_!4&JzU_*>+HHw$(vT4KE-LAC#dF=qo<;SJF>x=iG zS`hHQs(q{70~d#-EQ~(}NsUAEaPK6NfuyaeOYoZD1U0pMBF0ol0NWX6%ZH~oLp%+E z*Vmna&vB-fKdk9?q5GYg9vLd1fMAd@tdEAao2(r!3g)=6UPfyihj$fK?SUKt{u_!3 zc6I?&H50~$W~!ZQYQuLLU$5%uhW$+Ms_vE%RXhpW=t@gGr{`7(1oID?)2p$U_-9ti z+a|$6v8sIEw;A+mz#fQ6VE9JR$88;5EgVS4MWo09eLGZ&q3RtHU&BxJ4Y+-G?5cEm zmCL~Y{b6O82i(x(B15^DMH)giGS$~FyssdAzBEQ(uLy%iUSC?mY%>}Z`_4(M)2f)1 zy^u{a%meYMCfwNK)tFe?L+rz@!fCp%3+r@fpQD%OF8ZcZF+D2bFz4Ff0zz`W1}H8&6^@rYD89 z^R-#A>wq~dvn!daA41ui*c72Ch8eC2e&i!yd&+H%U>z~@=Eb+Mm|@{TnIP8FdM^aB ze}KW+{eAspds13iI^-lhEAA{hKk>%jqS1#-`LRkZ*}q`-Sl7`Al0O@nR!m1x9Fcm= zf-1(jy#~YL;qtC%apbf0D6>wGiO}XIi+`axCHqkyi$opWY-17gg0Cm@h+vNjhIn?hgCd ziD#X7@+Y&Q@c@aLX-V79`nx_LHC*0l7yPR-mla45x?;UTzW#Z0c=vC5aEy0i8e$-R zuS5nEMiQ16xfP+`CA^iaGMwnY$rDEWtuRur#H$OE&WT%PN70QEa2oIa3ilFgBb>f; z9!#l(vk`k^OYGh^c&rO_ge&6x9lMIu*t2Di>F37&ebQ?O z^_p&vokUVk?Y%B;6=>)J67p1?I?Q8Xbu{?)4I51f?_hc534FB)`JQINU~+ow0*h%p zw3!roJJt@TY?MUifn{!#kWH#+;IZDM(R!&e2#j<_^Cpn3ES9%S7m_kma_53GhqW8DEle$*sK9svIvLh#N!tL*ks{&euUqI&_64VG{wK3|%8yHQ=1$nhGV9kk zb;}^Hcbz5A-N?}X(9#Nde)_6B z+5@O1{7#MeuK80GSBUuR@=?byjDWhilInoky|c%6z>&@?WK_5=#CQ_4P=-YI0tyYm zzBsdCX9m0s2)e$!av40&h5AaSo$#Jl2N??>oZlG8-^g7TS={*{ppR~w)^I9wvmcLf zrU%Llj>9)j0awgjXyefMOmC!L1ifhh>;c02X!-u}*Y@Y(3)jJF)^t}7K%3)4tM(&` zpO+YSdTn@3h&(0L*9TK#2Mg{2kkamAxAVbydF6YAYIKUMyBI!#>xldxyJBLO2Z`=h z_Y94ZJ!?Z70;M@Rv=R)OjS!|oX~%_J!AlBFRRqPuc7wo{2hO|6Adsq?ES_+Wh!g;| z=_1TPpiyTg8OL7(k+G9oy4Z%N7Wm)l2DmG4yZZTd#Dv*>eLfuNjv|d|sm;9xNn8RF zdogXU}k&DfB^iPqm}s5;N!9K<|ldh*V(Kid_`!1A3h5foKfEGs&W@Lqz! z2d=Xo7>rgW1ha|nX%T9mXQ7fQQk&YRQpab3HN$HC0w0SW5fy5EF%)4u`Iomwpq*n( zpyaJFz$Swb?5?_x=Yph|7F~jA!x@wS+gZWWQ~Q>It5~c>6;69hq6dRB!KDV^xf1iHkM_FOurwJlYk9MY>)2${+o9S#BOm zk!QvLWUffLW{d1LhRkXN+1 z-32!eqSclNF3ASzfx=IwZHMvK3{Sb^INZ%a3uvcdK zg%YIcW)M}%uK1~a=3$u@-pvTId=+-AVK)wujWZ^8moVE?OWxZ3lNrsfQ)bM1iDdem z?edN%(1u}@5glN|*gMAO^T4D#?YK>=q*)-obIOY<7i=ZAz{#TMMO^nm-@@Y`L~eA`Mh*{{CSqD9^Y z?1^x(ssClFtp@JLWv3CTF7&R+M)wyI(5hgxnLYBQ*7o!P{f0-eKb?|!x$mH;e{UiJ z2(aNN!?5XR9CI5z&0%%(aJO?e)0Lm~`s{Qv)j|CQ`pIS$ygzC|zh0Hc9B+q+_t>H> zPed4_H^OXd(PUCp{V@!%Xw(7%z-&xln^7O~JZr&u(1HH`OxNLJMPMjg2^1>%^IJyKIXx?W zblxub3&sB~BYSo`!m zfo5rtNIl|3zimgcW*`AC()Gep){F7=v2}Pd&@FRU)X8pm&@_?jb1C<<>DBBDB%1tf z`FwDn#0yM-%J%${i2Pt_HNZZNB_jQZD&7D!$2PJUxK`PAa1Ogl=NB|fi^X)(u_4g9 zhg%ekt59YvV_v#IOya;&sJAYq+Y$bhm~m_<$XhDuddZe}J>(!{U_8QEOI@XPQcJprrgXUD zvUvx%MbJe+nImqSLLCRYc+2ywd^N(ONNnAFKW~!2l5i!<3t>oP(xtGni9v>;WxhWg zMWc1xYmdOXdaYjE9rb*O>*gcPDqXg6%UYBKNLez`vj3rB)Pw~{LVq$(VTG6-yW+=h zSr|AA_E0izDdd+&8HrbH)>Hy^NTu_PinMwL+KN1?V`ZqU7jOnm;4EPxlhZu_$KBR; zJh~+2LRcvsyYpsYI0<56l7`Pvt!NK^^$b#kFXku?~=js=CE4#K8S@{buH<0A8R^i&Eqr-W4d{ z(Aw-2nq2^Ag)jkZLk1dCVxOLYBB2^_@GH1tlD(+t(9oOO#kPhqU?b-eo5~?Gf1ZWx z82FmZE06yLWWbEtjYnl%(>1Gw1`6HdUv#uU07sKKf2UjhO#}!o4fB9RZ28m`g za$Qs~hAF<>%z*^J?Rg8W&>atm?UQ?Xn+dmg7yBH2ga*u;7_B}!NqRLyclLWBur-?}hWPln>CkklYqkrCrD& z`K%EHr{>XfysSd66@UvnEN^5Um*$PIA`s}^iQ_M&XF`GWV1P>cC}zTnIe2n3+^JQ8 zz@47ncc&fR(qwAb4~Gd{b7wjpGn~t4o%!Edp0!5OgK^Bkyqepl>s;m?5Q0v@T&sEp zy3_#Fjvo2>prW{4__mGd9M`#MIr|dYV%?@38)FPNm00*`Yr`bkb5?qG0ens$#cex_ zfF30iLOO<%6NqQ^AMhd9s_PS8B0;)bq8p6u>w(~710WC3IM%&UYQ?Gq8wvk?8lljq z8h<<*7mp$CmdqY|WKo4fx{0W+zk@nmA3sW==;pi)0xObR3WKxG00O@I-!6yib`f}k zv((?$K!eoN<9;zo8XSrAEBQ$+VHFnz>9BA8RQNN@!2WWLJkL6j{x+V_r-F~T+D%A- z>%qe7#LO*MjR5ulvq`1ZzGL*908AIy?aDdxAuomEpng)tTJpX)yfGmq{UDB(mgH`c zelGrV-2|3bu<$&BPd5;R$Mg%MSQH@S@&SsMY|CdNO{6m_BTfMTf*JBh=1nFKrPcE; zX|1aD7aoSv`+D57H#u%A(xCwL@KCWAS32K1^46De?CKBnTNV<&-A?1RUICCmmg`l#6g4i;cdnv_Gx8$%nwp`=i!d+UD5$_ic)z6MYje%iq zy*T3E3CAC)%d}W;0Mc$jXn01vxtXvjlK;@$DBVI=(w6G&^;RTgBH9Wd2zL7Z!3rJ~ z0YdUdPE(1P=3`v)xP)87mJE{W5rmb9gY+yLBc=LXd^4Bkwh=mh;)UUXlAwjO>Pk7?8vx zFZf$o6^=q)U3LbepIMjg(4#&xT{#VksP0j87(CBbvO?x**42*p80hR*(h9sY61#}# ztJtDe(9zRFCMX3JX5KP=w2hlTey?nsLNt*5w(j>4)Hhu>q2f zN*DZLandR?70yeT`y8BC0)V6Tm)_i&eohXu+b%HUcwyaJJEFqnTXyf1yb892@94Mi zqX(iH3WtRIQY>yUMUbj^FT!|qKxT@Nt{7LPrgfgxH`1hD8@~yW&dERIZ;2-S(_rEk z29jPQ2@yvy4-zV4?CbX7IhY_K0%zB0pLiAyO_D2e5Knp~vu-z3Y1r^%-6)RPR}YoE z`-i_~7jMK|5L2b-@jo#10459zfnlhrsSIoaOz~1Js4hxGHY;r+sCCaOtCg@_dvbso z0AAs2Yu&L7Qs{!n4SxIY(#*u+blG4)bxv0yz&BRHt(vRnVCU2Rw9RdSs!I2uiOZ%% zDZ@7sM{YQmFykWR^uzZlU@n*k-_V4-S|Z)r3{e7l&P&3}z@1g#46Z*gwWAcRssL+u z9h4AP)6npfne&7K(lQ*DyWuhXR*cpwC6OLPZoh0Q!YKsrEj0=T&2pCYO^KT?hX>$V z$#up@xbFgHM=Z~}Z6)qCs4Jbr#;jTEIeloSXSRQbv?ZsIh=qT1-cP&!R4Ke{3ygeG zu~xXg1&4Y36i}9zPx1;G40mJvlbEWLln;_2W3jfZm4*RAhlP<`_1z|`4NLb->^vOU zAq8eGep;`J_Hv=34)Byq2<@+0pPq>%;xHTYn-=Jys-Jdqn4iL5zzIohKx5*Ql!U<) zN+?5T${DB>^L&yiYazoKcp~`aFB_!?)z)$Nr0w21NQ+0Ld5nZC*3B>N7)(|}HIHSW@}{%+bkfyG>7^d@~M%=U7^2&*go7Zi^` zT(*wZOxH>oCtv~&uRm=D6^=W)D@C&S;Djy&0B(Bb@N6p+4iBu;*+5Q1^4iZsFF9o$ zVRb<_;gII7WJNUjBTk|?_B_2AX!;R+QCY-CG2OlLJ%kf@tm#+3{>+h13-ylr^#wI6 zkh_gs*8(aQLMvo6K4oi>qe6w-g(mog5m2!$IU^@AVBqf-@w-l9J;CxRqYAM`n|6t( ztbWqks6lSU4K4_n{UMpAAAt=qVc)Zd?BsCse5r#~XwjA)o`Cx?lWDzD7S4fj!|bpu zbH`YR?27Z9*2Eq7HA3nsp%Hr5Hql+zOS=)0S`Wv%$tsDVa#2C#<{2)6$(t z&2nA!Dl@Ti-|mMz8m`AvK)HjWXW#_8G_?NKA!rn#aNX-o`iOaHH%N-_>svmF91QRd z(*RDgSNO6}#eBm7apDt^zQiwerRz&yLfWCn5(c${x#E_*m7vU z1%oVER9-`h7Rp&!rm09<7VW`iq;@x}*6;@#d`>O89L6K{px?#{QjcQiCY2e+ z1{tnP)mOM4IoP6TnH7L4^^noljXDy)Dmom!ne&1W(ACu4dO(Rg(-@4;u#1s7i#kVj z<9*zSaOZQdjx!M50AJdpRfUYZ>pyq4O&B*p!PfeB3mikZrf3;D=Xisqtkqf}1gFiC zW;&p6GYWE^2iN z@68LZc_P_|2vx29bA6tz7vs^ zkwP>QGW|?cP}7G?oM|jYts#DbNkT)7BjL}6o=0j>c4E%aj~^E%eE?E;W3rQKzYtAg z#FA+jT(;}k$Cmo(^$$SlXg9HFI}{9Gf~r6grf`38GA^#+cJJ*|4a0LNkgMO)2KRB% z<&N??Fs2re%Ra;+2#;4AM(5fjvSPF~%I_@Esnb1x0!sFd(F$;p-`*UH6Q`&$Y7+}r9yxx7c^2mf%!xQ& zPA_VO0{;3~tMRE6C{Y(S5vvK8<(;Y?)oYpm4gbe{ZJ0P+=!v3NVMYKYuWFRT+pbNZLDAY=qCJGtBBB;g4?tM!rf6w-``bO|A2H0ALuUkak z8(LYR6OMIirC3-?vQi}s^<40LOc&{3o`z2A?rLM#u7|KUoZl=jZ4>} zAfHeV)tI|MA0Yj1by7n6K(oFvx;2OgVOdwT5XyQa@(r&}SfGvxTiH6G&CqJIN4Rb&|FL1&!)wF_0U0QtO6VPF<#bQVSP7#^Dc=-(lDSF*0qabe|n3!_J`-8mj`oLJexV%mL8tZ*CUbt5oUmXS))fwaO9JMqK~5I2|E*+kxd&UqOHs z__vYg*oC9v0s1YP9$D5Ch*IF|l+(6pV3!bd51 zCkqej=f|LJhZ65sE&+bn7{wCHkx!8-_I~JIATOW!?N%vHW+L?)i zd?WYIKwP3OCS;Bve1otOyUqi`c#@vjbQ*C;B+RzZ3oKy$%IhQ8uD{*^cm8y)I;D`b zUL8idsl=YR*1;fNQ@fqIttattw-DpND-||N!7rGac4h-N$2jkoT7YKbSu4-WZOnS< zPywFP=g|>6@gpNng`LK(*=DwCH{pOB(cFe+a1__UolCnFsOGw`NNLvTwtibD=AmOG zyr=R>2IhojahUO0jMlnL1@$Nf_!&>TUSh-D!brVy2_n1Y1)nmIo?{@^U25e)lq&>q z*F61-V5|_efGz<91g#Wbzp{1x07$e;yrovz1h)+Hcy`$Wz;%^$uFzw}uJ8<<3SWO^ z3B%#R&+LzZz)d+5y|olQJ9YBu3QW9ksQa~mc&*>I51qQyc@vhyH>3MJVytBKo!cL_ zi)ARHGo8n>Q1x7M$})=Muyud08Ltj&1CFy=fn|LH1@E|rC zIKWw4-3(HDx;gt9PFU3lB)~72l!7Mx|Ct<3CecpF2TjcFs27Hm?u?kt3{n^DPKfy zB!?wqq-on61*xLN+a9L>R}&(cj9tz&pD0cRjupTvO6~QKGW(_g^Nr2cpt~dFz5mct zkwx))SrVu%SRQfjL~Tyj-f7g8aeE`OvwqQwqtJrB{OkX9blU&0;ZuXzj3~K4CP)0x zvP+R!_wHZ-N%wcpBN4*9T*@LkiMyDx$DHEE=l}X|o_uOgT z=qUPAR1vON>0ITEIVIkD5~>pRF^lPRSl%yFdf+~iZ~hjEf^pG#)Z0Y@JvF^EGy7@` z`n@O<_vhWj1y3o z$J*2t7mv~^EZ_7kf8;%;XOg67Tnlo2iH7VwJMdV|#J-p9rCotZgNn^OZ?Z;?C%fB4TdYunZ zr=I;uxw={9ysKyb;)xhcFTOg6W?G(p_i`xI_PNNV*d?kKb62Z%S6Yu%ZLW24GXR31noe z(Ss>la%@a8^;7-A13j0mNKaEp{$;OH=GeeX1bO1+JT%<9r~^c(`q*!W@(=*TYZzX%k`pevPsQPN`EUU@Lsh+3-*%7E8-_ z4;!H)=iXX_r07^tv=B$oJpm|SC9fN@hszI)Mu2rY#4d505FmHhT&$zem4_attTm}U zjU4O|bRZQvKp)16HzE)lks|@RTX$$t48wV`un%Yj37%x%-)HdkhTBM^N^71Xm2k{g6!{^Aq!o{E>FP0sV9J|D(nEn4`_S(@xu4GX0Ua`rJ(G zl8w}T*L=|Q9aI(`>@d(qEeJB71Xc2X3h{FDtYNXw@ntDC0x^W?GW&kaHg#lAa%+cYRj#ALY!T_KIdGUllvdQk&;e+9z7`s z(fw_itZXH#;8i{M%@Y-#+VmU)s>_-Wbk%QkE?|Z<+}VDCx`dH{fS={m5&1P6vtjVo zR|XdfBzkPF6|```#J}N=FY$1#-ytML%N{s&DG>5S@gihzSOAuzSOzwCL7>KfrqLDt zb8Wm~37qpIeV!>Alk_;xX4G{m*5RNxnGj|$Aet(sg!J{th*bNN(z4WJ+If^tA{Rdi;Oc+R%+9XLZO+8!zJL}#o`Uww*hA7#OVQ-H%Qj>8w5yfW}rER2Fhdn53fx1Lb)&P$k>Hf1?Z z9EXhK!=2gu5n@RCaHl*Or%_Z%R9QdsEIOF5TnozI4MIRTcIGuV%9!-~b^`r6uP&(O zaV19a?ucy9LHIq2K^tNEF-%_oL~qd*U^>aXK%m@Ie}(>Y%j;W57yhni|L1r{cNJcv z+BIq_^{f6>&4(P zptu=@+Lz)50h>oa0%y@K_L*@Rh2&44B-!r}U$DS$6AoHiV z|Dy~cuE4Dax?TIxkZaG3Ki4#8|HJ=I5_XchlhLAO%O)o1gQ{+SdB5-=F3j|%*$`(9fvs6 z(tx}J=b-0yW3l!3ByxBtO6||Fs$Ov#l=HJ#-nMTJOP@d zsS({!9@8*90?#pDeT=qvBkJk_iC<(0r)F`DhJ|RcTFnmKttFr{&plz>dAtI`375+| zm^A}v#7TP)89$0{dgks)lR-+&Tk6XNZV`;retWubW z|6#gekLl@&TnGHhj%rvJl>&!n$gGxIXq>)2{N~Z(@9Op!qboJc{cT`#3_Zd7%KEYS zea7lUgY~LSkCp4nwkl4$l`c;wPvtW0z{BJJO*RhmB6MM=U@F6w_FP5k0{m zV13WUX#87-E2!>b^%Zh|#B1MPNcsshO+d(~!CO;*i-5KBXQEc|I_zbuMdg7| zt6Z) zrVu4o_H5m#Nav{~yN9U@JOAM9qLo8MdujKxK95D42>@_A_15=>5v;VD^*tOPZ7{gs zUDnE^JwmecS(aNM;?uuQNlTux#sci)xLfW829anI9TG;`xigR;^Ry~Sd)KPS8y$PRa z@vnVT5Me^l5U`S~PX3Jvh8DcJy}rHdVN6~kfTe1Zkz|kN8zAXKvl8Uwp&RcN)ZDFU z7(p3VfDFXYK_CIRCTJI}OPNE)qCF(KCx`Oxt?EQuA$!h;>oQ~YE07R2xDP~t(Y%_wW1z}~ANuI2iuZG-u^_Uf zpHQ<-pF#|Z4$}clqh|D{6FQ^&*{k|(9{{X8$zxSOS-d0Iw&iE~R4$0LN**2fO$eWk z_YzC`CUxaDc6JRU!zv<>P+v=0JD2|}0*a|?n9F01Wp;BtEqME7kzTH5=o3>sCJMtC zmI$Psx_CIqyC)Eb)=S%bZV1`dAr36KBnsIQbXQ{mvd(Rk7SspZCK{Y7TqfVoCQn&m zqCr&Oc~K+!-Lv@QV`Y8(D`)EPuO&(8+C-b@2G~Z^nfSe0KEnosabzhnT=&N?1mzNY z&?9<)<^_-Bam{Hdd)*V*SnV8_Y|*w35>!*uu%b?dY!c$*`0j73o6cMgO4$JAbGY-J z!u1#TGA-}uVc~GYvp1b-l}9ti3mW&{6oLB7igjL{Qi{b9r%;B;<1Ja$xZdKeh4NUJ z2W9`?>zc}40|qdNG?C%(To^Rib&C+a)cN z4XKR}ky-jNmwiapbsSRHl|O&71oNPyU;EMHIW@hsF;B^QNq~w@w|rFLy07q1XpuE8 zEn^woBxOud;%(4y6j6RC@&Yw=jaBz99^8sFi22p}djz_iWd= zAVwfoSuEe2#( z<-=N%`9w0KSpK92mqxXypneb8QKZdW)?y{Bhtae3z7$;nO6h5kn3~<8Uk&41YQ^0+ zGDAP#$wj#~e1QY6FrJU@BwEhI^dglY)Z`2c!9-37l1tvciNIXn)NM14CbemKIxTg^ z3iQz#aoY@dS1WynizA9lIv3zUV=PLaC?rTAsf0&OuU}LG7B6^Vz2y2hfJ3o=7%u*A_caUm*UMMIM$bL zITb+Itnn%I$2*HKE5Jo%X!Eyq_H(c1sA17&J!i74fMgrA)O~YmK1k1gcPIqpDzN{m zwRlCZq#L&!J54^P!L<>mop*I{KmoYxfm2?etN8Vo&!Hx6&~J3m9RwIKZiDIWm9~onQ}bwi8DcvmZ^NbhfByG59c z!V5-d6!<2s2g4J|fu4(!hi1-mryiSz_dyis&*ZlC9UlC9Mqi{}%V<_eWXvUu!F9Ve z*3Hol3>IxBOe;bYKWT8_fX(cCl=KoStfAL~NSeS{Z|B1koIHAARp-^Rb9WsefzM0* zlGZOO(Z@-R02yDN2`IvlAe>&%Q2fbGI9@vw6=`w8tS^~YRDsnj-|I|IOU5iX!QaR+ zv5Vv##My5ww|gk^9K*kPzs3A=nGCgG&@PN+xh|%H`s7u*L}&0Q$%*)e8N~`pcJtE8 zU>u7ke-bA0OTAq@%&1|06z@vLYKKQnrEloe7vdgU;N!!|vVwCkx*D>$aWDpHn%~;b zS_S&qAL%#_<$ebjMs+SA=$?BbsCa;tLeGd`k7W^my@{0gr&7{oVCxy|QG4_%M#rS= z>{~7|x)-*``mn|ddP|cGCM@YtCT4w^cZ@?gA6;C7Q>{!^P-=zOSffC~(g0*wU9>61 z7ZN)j-^Z?{p_U(Bg!A<)ZFr;Q_X#u!MRa-?$`UUAYEKF*<2a=<5nu*!0! zWoX~ghu&>^4k+v)h8d6sZI5=-7`~93`3Nzc5-Yd4-JcEHGJiNt|84|73naF$!zCV9ZvQcK_@*5h7Q95xYs^FWkXU@;pE8t z=W1N^k=pg85pGTz=Y1&4YgWY)z@fTuOt^;pN(Ka>;3+VHkh+wI=Bu438xF3Nws{0N zs7R|?jBHY_)zt4ag=cUiX;Y!M#n1w2; z=LM_YFCs4%1+Px_uHJygLD=54Lo#6mK^lzgK)X}yJQ|Nv_1VPGT+){e-XxJ3s|ChP z3+tMQ@U?E8$^UVnRSIg8v>V}2^AbaF9%fslgJL#}3YvpGMtTZ7*$IaWpt+3h)e4V2 zW(BBOPo%Nrrf`3r#DyiiAg7D)P&yvEEtmDito53$MDm*017L~h`CjJokcT6IjeC$Z z01c?YShEDJQ==|#Ig9HyvQW_KPKaHaq57)i2l|jA$%McEl^!NKBO-2qZ1ep{Ct+20 zK_X+)vE-94_*alRLx^kG>Nf;>1vTLFSe2fq$hw5IDqM)q-&&RqSdCd9>*^9lTB~ks zBATklZf$96R86lB)^)5~Be@t)8H0j)ut2Y9#m+8ypijJxtVPe~(XtcT0@^W|k=mGq zsqw}lO|qk6O$XWY-o9?1zfHbUj4CIGA&9IywBOSXcjr*TkGD2 z+oU~)Dc4}MiieA~gFJ%;(2W3n$g`Qdb=er*3t(9+ncY3WaD}5oW=Q&+@IzmSg4QP* zs-GljP%J7qric%@aZc>gP$;}~$n?Gua|<*RBPTE0$ox_!hVB}xRD~Wc#^EkkXb-Mo%obY3-#?NtyJe-P zt39T_>yZzG{orZ&rf$9&#sU|3l|s1P#>xK+T^e%Uv8KGk_0bsXX;5q`Dhf(^O&s3S zjXN`mTZi}-%V#a=3!EC=| zvjsYR-@1_P7ubc|1u@ozW|A>= zGx`x+$e7k2mNTtud%=6L!}es3PP- zwa)v)2`@j?G>*#?$*(5$#-R#Y26$}|eVG{@9DOHS&@+; z{lGj-a-r8$B2f?Cl3<`+QSPn7B7Nx-Eow21BQ|wuO#@W^tD!}g45ZjLo?^pa9dcjm z*J`k|3?(Y+#!w7ZdtaZp!V01}z zx|K}yZWWVg8(XqLGYWIC&+9(XSG`Y!$qg+}?G(RL5CKT~B!<~bY-B~V&o9IVrK3zApY_|l__p`c(%Z4CnI|L={Ba?iigX={zn6rjW1-a!M|fCRs$h&4 zF$54M+5trpYAR$ghT`sKXXXrox@)qTlZA6N5FdyeO?Zl>wI#IAB`V#H@%N~P+GLQU?_zKc za}0r$al_QeAp5?8@-!oiU71OX(2qx6^+aMX_Ii!aZw*?!pfUm*>M5Qtp`qnn z+`sX?tG+4|bqTHbJC)*Rwx`Ooz+P>OJ{NbWK>h1$!Y-0MmJnTlka_U3Eb0A|2TRd=%CQ-SC%F@^lAPYE!WkFy9F! zH+Md@!E8zhg~3O>^Oe-8W|epfujQKKv1cl^@!GS=LdMw3yZ`Oi%0Q@zE>CnU(>zK>dLx61KiRq?|M zZGG?$J~*0$@sD)_ppL=INgr*;YcIwn@{zL!U)Gl0q^yQ{NGzTAYqPy|-5ErgC5ian%;_gPg%IRfy-Qv~gAM&Vk03Cw=Np!O|kL<-%fMKdPLPE;B+nnKkUhHi> za}w2b|bApMX z1X$Z=E8J{+vi6x5B?9x@gZI7HWP<@E9Kf1m^Wu~34Km0#b z(1B$tf(L>{v3&`bgK67`y`H5n%i03xYev7uJzB5%%3^IBD(Hwp^%ZE*+zHDQ%dl4( zsZ0D3L-?{O2wiY@M2@2=_sq9fWnlbF!I+Z9R2Q}(c@4z}^&3A7w-v1z-n^iIe z0|9Z;TW>cfSmAI)eZ7GYe2U5RF6sp67%|K}Gve^JwoWPOa9#p4CWT}g3&ge18d(a) zN55e6f*lBU$!>x0WJ!@JWGPmHnD(4td(u)8B7Q-j$jnOSI)wd`I(RmGgfwW3R6 zwS34ez`q$XK>$fo{`3pQ`z)mr)gxt99iZrGqH${z{WrjscYW3eaP*;}anu8~X-O8ranFdD;LogbMdUk&XwOHxI!R50IN+>!Sq$nMhR1m(C z7~fsvf)`{OOWq;DFRz4(nC($)5bj2KcTXs~fd%JF5kr(SlLA0&(Rn}Bfs&>^L6ZtO z>DdQ{J~!J2xNqe204VyHi3n{Huo~w4i^&tJ0^4@-?VLgnnVBA=V zQM_xbh~5gGR))K9gxxWH-Nb?$3?+7`60hXu!DRFmU-xREV7R>th9nQ$o;!a*Q4=xH z)PTJyJd_D+iz#FIi&MxzI{rYV%NzrIz0cZ;qje(7W66V_X?xXD>kS#;9+XDcOKf=( zfgUqLyd;;%`YlkRys4CUbi0%7RtQu#kk>p>Tkmweein-2uQyW|?Qv&z^OphK@IJHL zf+Un-S$~58UfG6Xf?GDDU=CuKvIn22YLM>WeuxESM zSk&pLkN{8=J5oj0>6)^<2N_-e+XKZ5^V2@(*|wQ`;16!&D^N?|(E}Bdfx_RMP`-86 zMlj9x8dc1G0JwzXeEVh~7e~FS&0QV#&{6zGjSr=|i>!{pKT&$3D1Cn;C>n=H)zg#M zYJ@;ppU?bCei)gQb6^t$F!WcSP~s908((Bl{N_NiGJ+koT18u#x&o<`>ZpRgroeQ6s-I*}U^%Aw`UDA+ZQV z2vwxNyRAD4gE1i`cIq(C`8*1$_;7JA?*mPmxW@@Mk(V7wDD-q5*chF{BO?{qVUdiCI78o$l+j7 zS+5^|O)L!1hXS{ENYHyS8y+!Qhp{svs;^w@7Sy9+Q^ii9f4K80TpR^dO`60Ds0*KZ zIyn*ykG!(A!BT>Dlx|Sa4VYy&YX>lH@P%MHc+wMj3>_I8fVjDQ9j1wv}2mu zC0;&sNk%vkV8vY>{ycoL0-C-SZQPsn2LQqAst)6(huwx9Q&;T&Tk+6Xz{QFF9^A*4 z!A6a*6xeqryHJbESB54EP^fxj4^7Ka^I**kNOA!(Vm&Ps+wx1?9$-zmYloqGgPRCc zVc~r?@*iKaH7S@CB;s84%1|{RyT;3TKv$Uf)p8Rc7FVPXH#op(pn72D8Ihl|0@ zkDaaYTHqy$$ef>bs*Dydf;)Q?m>-PknXbC`B=D>)Gp z!@5!%mG5ak@|XL=c!VpPVh1N6j^&9t4~%(Muy6HMu~TWvV=U;F5-e3QP&JkF6k`4r zzPbf;Y%af0PN8n-^js9L`lJ)?jDT6otZ&F*s=oebJ?ldYcT-ME@~e8*n_p9%Z9Ydb z_KAx#v2~`I;ZUWh(Iu!7{>_UxLF?W?2CS_%dIW-IYV)1CgS8|me%fcr%BAz#V%+&g zWKJgBk1K?~`Yz36tm!a;g#IJ4Gjl9JjJ+IEEQihnQc{qlDblhe+%-M!eD(iN(%Z*1 zeV+T{cL?x7iiy^2CYk^?)P!!WJgp`nflyy1fRrbDwsR~)G@b#TRuh5>K_HN-o4+(X zI$IV)93Z%J)ONEKTG7x}0&WrnCRTiiCK@6lB0&_D{odL4-_1h!+FxXmzfa_zKDa1>3P~Jz%9}d zQCZN#UR}$3hI%$1{)t^W>^QZt1g{<2_1~&MAuq#EhvGvD#%uRw>wZ&{kcan#Qa1!5 z`^8UPdm42qYsvM%N+j-pLLRdGMnxgFAtuqjgJfKgN0#TS6lgMRI}>ktEj1+Z3!v=` zb-(Q5+8FtkEthmu$Vzh$LpxkO;8+~Sg{!fO6!5+sVtq0QSba->!SzOi@>m>k#M~CzSf6_a@fIqfE~x{w>TgRo3mqnd zawLxAJ5_9!y4UCUMl;7mMyd6Pm`>`$)>IaR*L@`MQtd9^PCBdTqXz)Rf}kzwi0hUb zFe(ie&L40kxVk|mVNsIWU8{b z{~hRyg)^%uUwI1z12!l@HVh+)n?)kfDJg>y|CBGqV&148z0xO|L>pGnE_8#CSTOoK zgA$=kvH^>}*9x*|luvp?dl-8NL>fd-yVm*2?uLt8zhYYGC@FCmdxQ^@rhHCN9B=Cr z;JK%S?tkwO6I#+XnT%fYDUt`l=ptW0ex)Oke?p6;F6I1&LS*%zGtyVVU*0obfZ z?4DM96H`b+n;hzXC&JyAG>)GVw|UXDx4{S96O-D9HLus%6H;8cGcz5mn2sHJnMqi; z_I$v$Vhf-)9$d&a?FNYcY&#>8O1KF;E_d;K6JqT!M#g?-J9e3uEVl2&s$DEQp{1J& z&T7Hj@10Cz#Eu9M^ZkuBWW@eKbD>`(G?Blk#NoB+ zBWL}}{B@8P`W1{SO~aw^%qe?A=Z6ryGnF*^?vm-+&3P0% zq zj!TAq0(q%%tflV=*D-TT7bv9})X%R>4q_TNYih3^?%H+a-@&;z2jO2E3IGTacYh#MF(w?XVUNM*=In{YBjEh(fw3_!{HNQ$(k)s0H3H zfoV19-9#`V@&zbUJ>h`efo}L@t3jEpjNO_I>?uDK}Kf2Va&vS2d2WBp~p+G$P>hcF5ytbLy^m;tT)2tGN zH=hGn?`32XGIW8FMT1c620wdf7&ZLd-nOvCZ^@QB6Giyd72oc$JmP_hn`em z2-iPxc1hK#Ek`&`2-Qpt<$L2S#Zk72_kkNA{M0;(-IvkH!;`Ov1z~#r)4PXNk`=XR z&34O*ic{J6c9xjS7C$_I-!}ni;8vgWkZkJe%Ru4hEq=`O zq=@}_*L)(*y?ff+$Lx7c#8K|3h`qYsD&3DC`~E~SE%a^^^T?hha3#{Mt}DIKqot+E z#Nm5F&ybe&Vh9Ac{AqP9Z2KA{rmWHzx4~+liLHH1UZl$97O}{|#(|e=`G8C#M=Mxj z`DAoF1{nqjS}!;aooZi0wwB3xm6^42FtQy;{PO5JhgFf106+6eOMri zUzH{1cRdUsYgMh3zot2(e0x}>mptUP-lJrq`*quA(HMO`sMusa8l6`5;M$ZGGwiaA z6z|2w+}sc~E+}nr#R=7Krb7y_KPT#cemmFZ@+D_6Oe^}^sZIgt=eO?+ z7X6^&>r!2-NK!BFMYcg1;Q+!novOb9cLSQH*MGMD*>H)kX^7{llBSGz|77gK%i!ks z@BkbdXye0;jkj4O80KPrV0i6r4ejt1J(}%3L}qzW$KHv)Z9JDlGhw|-o2WX>OTd)W zjZ9tO(jl*B$~t*pjrS0G=sEXEKtu1uZ|9beX!;|(5Pg;LDhj6!-~c3%f(vO;WgNup zKxU3JMSoD}b4d`Khy%NUczk^zPuq;L8uppcLY_W-Rm8--HF_7^jwZN5H0MR(S|2vS zyTOhtDo#ZEgz)Fk?1aou`wZVG zeKOwVy{>G%PUYjSmb_sEbqM-UA8(>BkV$p6vI$yV*g~q|1eYA0mK4zl8K)v&2Fg@R z8WByKo?Z;g4Td;hNtCl~t`am-C9=rGTA!X!-IwWXPS+T_@T;V+ORn60^IHgr@Bng*fvPg3=^^0*>}5x72Jo+MZWPq+SQ79D z9`5)tjQoN(4{AwvFZBuBKSXGq5n4eTfgU3KNzmHHFW=CuY>`(5M>k@m7g5!VtbIL< zicXqLt$&td+Or9S^YoCNi)WTG%H4oAJ%&Zmv9kWKqa-XtCbSF2hrkT{hDzMeTR22$ zw*1VrE%V8s6!<^rky|a!;~HZIC<$Hx3o`j?fGIw2r2V1SIG7}ZqsLFgecIUNmb!Nn zD!y|5?=4r2K@UsuVxeIT1iUL^|qIF6p~l8>iOVWMjO5>;!##x9EcvI19@ibBRbj(T-ulq5S8a?+u3Gh83;{lXl)1**U7OQYF^q~7 zfPpHpy@%duUEBu(cTJF!<@|eSsSNW?h?9!g>px7dHWe6?;7mSg4f|tJkUyIDLGUpG zi(|3WU&c_6+%oQHjqw`=z(>WIHo;bKfCEubs zFoOniTRdsW(T(*mz_5p}Cv&1>T?wt<;Lp;)yK54Z@$verwZ}k3ki};k!8jm{lz!*c zBb|AANQEX98w@D0B92o!q`(i)2)fAiDcs~#=#4A-5kR)79<%!5npc2N{_HZ(+u8=9P)ZphITpp$QhAZQ!t7`vI^d_ORgW$ zkA#8y_7v};8l!R9RBb!&_jIhD(@WxZWAqgE1)wW{^PZ{-1k%J+Xzue`(jJ;Br{dPN@e}M%fgQ*enL`aMGg`Cr@ zbKa+xkcB50N5eMDGEe8hF)iw5?I5NTbPoN!arxXvK%o>HrWRZ!2QZ!tjlra0@eDZ{ z$XL2&0PS?xNkrL^cY=0iFuPmN!uX#VDzaQ&Rr(=p(C6a`Qnni8Uc4^C1YJgs!)}(d072o@HL@Jh@ILVR%W^ z%72v49*PQfL&@b=slR?COz4K>iL4dbHr#3|H)Z+#V=?^WnQ66;47x!AQbq{OADx{y4ti5CeRMX5~Ru zX{x8r+B`40rNKas(aq1$lGpS_A40*l&Mv=Ndt8D8y~Lh-WvJ5ZC6;2E)7%BtM+7QT z`0l5c&@Oc1uKHewb|Gh(f}2I;ee(8gF)By{sI9Y_B*$zY8bSYo2(URRf{>V%f9%WL zqRrJ!XjfztQ-8R?D1)k_O@lXond&>6Qb zZ2cX?im>C8QxY>q%gq%Ke}un!7;4HMGr5d6g|ziQe-xv6uc2)LIt>P2EWXS<>RJ|9 zPL~JX(M9)1{Lb`RV}0*5_U4*~l!2#{Db}4F5~9&V=J%RXr!Am2=P;$k@daO#*kkN) z&9jld;>-1QRloO{ecoo(F5-H?ru zr450P6g$Mf?RItnkE=|DD}E$1P`5HZwb!|jb#3**-}*xTHjMa(eQ$VVKMROpmVR62 zIp2!@H7AookECgk;0x0gr@DFS4=hv(+bOuWsHKANGkSWhu4bPD0P%K%Dz=x>QbM0QQO^X0k<%ozAFuf9(~67llCe{u&GxdzI7(=_yln8u;~{B%VP6}MiXqp;~5L!MoXN=V@2kNs?CSnhJn zdy^OG`ub*!;BOYomb49$$y$J7(fZ2*(W8n^GG!iZQ@5ZR%L;n|Gh_}8SEGymfV5)av8S-TZ@TU&+{R8v}CrgJC2!gkq*o8IXie$t54s%W$gA~ zI$GmA8yB*OYTeQ^+{j0?oCbN?r@&_Lb9W|~vY*3zB#;U>WnU{eg-5M*Y^%0y)yt_Y z@ntsKC(`=Ld6JH0ByE&2+VgD|5lfx7YBRN9yuQCy_ARKaunn2|2kR6itIP(Gc|}c` zbs<>f@AI8v3aV4bPD9PWVYi{-*%MDoaye$e-4JmSFMW3Vp%=KKp3bdMD=Hyo$Xjg7 z*|RfR{@Mr}T~IYHA+2zZhw)YZyjx^#`vpAfSBf4coZ0T? z1I)SGGz%M&t@?4Wmw^Qrl3u6rOo3DtjqhvB-|{;=mhWn=`+?X@fNb*+;?%F0~u7p%T34^!aF_gbv;&Z{7%5e6clb7 z8H?-gO?)TefNtqeHyfT-O}cTm1Ibb&{rRDZkhorwtd1UPo=}5=vB;rHn^{4L6}yx zIleg7rz&R}lAY->4qPtR$_BV`4qP zD=K|z$F)ZpM`<=XX0p+qpz!ipA9O1R{{-zJ<0|Egu;$%1FCJ)C0V^WsuUWv~U0bA{ zLcM+k)e}Gy&nsc*=V*zfs}Q>gu~{ST{g!lis$tFLhn5Wuai`l18%-{>LhbYsBd|y5 zas^^+Fu(#FQQvq`V%ZDQ8gc6%2px+~-JMhVMP6GyX+Vpip#ZnS_ zGcqO#+qO$I`#;_nL_mYD2un+cTGfl4cxVp>&G|MC53AV+F!(AYeN6G6b&R7ER^YpN zhz-1rc{A3=$~UZl=3~jV8HKzQv`_K0x6NanvryrGFBy!_+3iw4!#Hj%Z3AUj7Tr)d z&j!noWj=?&u&3g)wL)51;(oy{P*Us~<0wN@-Ap!^Lgw1t$2c~yYlcE2!Fl$pDOdYw zx0V1`L0~NwUr~rv?gQqfyUg|ZIPkvPo}~!II7{#Gh2csngaRGjLN?r4gy`{Tw|Sbh z%#3&~Ha+Ky0|YRt4Hn6;ZKp;Hu#pC~<4*j-LMwN|q2G_@FprX>$xB!k=I3GmBEKc{ zvuR?H4fQ85lRnojej5dZIu$^dbpsT$IvITu|c%0dV0f1)LW^`bpoPg;;>sdUm0k-2+ zd=`}i?0C{BE81hGIA8_GZzT@pA&$D&Gvpc~QJ42ysnL<7ZDQXN?`X1ON1hF$-jEV| z=4P6U3)r5pDWn(_(D}Fv!`_n&HyUH%#!C#L;;_x{(Xt;vte4$Oi1@+MH51X#;QGk- zH!z-CoazYpaT1QdF`U=hW8)af``*w#n6u9OOeP?fBS0n(Q^(TC>bcJHg?5b@zMbb zcExMe=JN;Y3=B){PCyp#I*%)~c}dT^?LB1uF^ALq=cYxz`4t8~vDMf5#*X>>Mx}>G zJ0McXf8^#Jgy|J4YI>-|b~4=Gk0}P44lO+GWfAiqK&U2&wDxq|$^HRb;!*NWM*NHN zoTay%^iEJYBAh(x#QjFDAs^f$6HS+Ddpjj{NnbSHrLYa7_y05)M2E>1lKH?Z9s=`g zOFPf^q$0yk`s3TcVBi-7>t_#4>2RP1|F z*gM@<0UFKZt0Hxu8f&=~2}e6|FqWrPXsWU414>puhVK{hF%w$!cM`)+3gp`SS)`t3 zsFR}+7ICS`rHifiN&jqHCW^EUpPhyFN*^SdBCjVEz_G;P>Je~dcC$*t z9~Tlxge50Xa7;6|3=8$Z<2miT3Xlt^uLaNXnrTKWOO%EgID$C=v0+FFcqnGku00mg zBJp{J(`YNB6tc)N8%A5o|HbbbEr8n|#OEjzfN{gkQUds|B3ces!MbgQ6S24p)uXGZ z%uVJd!uSH@8%YbS((IXOG~0$+Fjf!+S0ho&m(;2iJj5A!;}|I*V$lXm7glBF_38YQ z+U;k3jNQJaf_?CuA5s;{{zdS;Av56v0S015d{#xN#qUVKcPbHD@QB$YG&-7*VUTV8 zYLCAk&XZ)3=<530C&RMANxXX`bd((2kLtdCJF3r%>O&dsYAkzARdby-9-74|xf1{u zp&KI=Z&y#^>@d{U`}k292*p^XL%$KthK^wTYS`Une}5xUPh&7*K&t?<=YJ5OVa zLj-yn?2uPH(_#nXb0o#5_TI01iMu*jbm438AarC9u)s=gLD=st zhywI$dgWs}&(^3-Ipr=;65AfbtW+UYhU58G_hq(+`#ZS>*L6SplYw@5{dueuL%Uc? zS*axeA!MFrnG8?UcrG|F8r1TD#Py<=>XZs%2?+mWt&xT;JN0HQb+zu*4fOb$YFp2m zS3=q~-VyfvaXr*Z^sk(vc3vqy{a6HjBTknBjnE@e;E_{u+zA!bNONw8#?a`aQ*&!C z9oGFatwxax1n?kLZ^$+mYf=3U3k;!D+;<7NQ&@@wuN-A0Zf5VhHYm3Q*6$y@RcXrE zBeKC6tBhOjy?=yWQ*H+MME3gWL=S_2FnY#JC3kBr7E0aaAl;%i+XmzbWqzAt6}8g4 zaR0y8z!2MV_q2w;3-vEylcW7*T*iSTXXUXYXiR4t4YfN08A4h{M#{@hUi7E|ub6Q4 z!J7hJ?_^4Xat@x5RBaDfek<(ZsxthU^agFtW65rtTu2;MjA*RzZLdwDLvZ}Cb+F7? zosueEQyQp4-Oe>iR%OpJGYOM<=1B ztzv=N-^ZQ(aJ%reN21n35%jgysFELv1>r5))%;GU4&4h&mj-R9J~^2trH=smmpJ(y z;luS^vxkVp;SA9lyGnfuM69=Pxku&a1c(N~Y$8y|XvRxWcJu~b$z zkv*Ov7}hlrfVRWNe-X5P8(X%@1HttGHOWGs0BmgAiTiE&vuxb@O>GSGQ6Wtx;%0+u zO66VwHnycIjmi6~>>?4Q<$Bqmm8@U^Fy`a; z3b7chJ|A!Q8%Yl1fR5!9aW1l*#{jrH1zbkUW=4z}dT2Ulz73mVS{0*Ewge^N{Hvhj zCKo7DQE1$BLtvg8>g{QgmSWiDi>4J~ddtVydPDZf1+&^NJ(NC+;(F&}W8M!C`CnpLwPM+s;?}SMt=OyW6JZ0q^)` z={2y$t~v@yRI5+sW~4IHM1wRuB}?KZy*=*?3Grhlre}d&7z{zxmSF&sBKr9UlG;Tm zy2>Xc*@nvBPQXuYz~aMeu?p;X%pxjg?xENg$h5FNEd@%n;;g56Q@;np>k4y?DMoxf zg+)L#(GP$xA@nYGU0z#3vjn)0tH?!ST9)`0NwQn)+Q8C&4~l`vb0{Vn({k7mZkB4v zL^9sB-1|qQJ}X7>Slvf_epee0aULk4Q3NxECkoe<*uG=$YPinPn3uBY*hbJqG)I^=!%1!11$rnOm?v=hN5Ll#Vw(Ld)|9QNJhE+9%oT z1N2)@3Li335TfYkrzjTMa}Oun*R7lmX|Z2L09qry-Qbk|$rg92$xU_O$IQQ!3`q|A zbp0g`rC($9bRWq2iLr)#-IG(P|E%135PM<4J(v6Jon+O`D1~pX)<3ECwu=_fMubm@ zr-zT6h+r_&XR!0SMji9Ear#(#4E|A~xRZTwSOrT`EV8SI8^wJY3b4A*p5B+0WaA{( zrjekDQjzS0`nG8|#!wZJx*%Tv@+l5SH2(sR(*DP`F#aF6>>!!n^|Sk5PEaZbcI3Jr zC}UGkMzPNrBlVGbu4B)O4kNttNoJ1bvOrJ+zCg*xdoQkufE^+bDjh~Arztdw!U$5V z3RwGIU7%#%Bd4251nU=U(E#HgVi z^$q?yeCmkf{)8BfU%R@=Q>;w4LqB<^9$J5?xIdr zpb%%~GIwOOeu?GCQ?blbb?e`oi^5*qi7@nYUOnJYRigBL)f_*I#c8-jZ3H!$^aM>! zrT2N=H=3KQ|CV{`@!WH}5zEJEv(%CeT)HRRIE$mKhd@xSvRpvsRguqhg zoG*B`bg@Kz#(kR22AvtP07T3ko{=xHCyc5xQMB55nV_dxU#k(vY3mW5wq*LcvD*?5 z`qSOo^hWGqpiZNYjoreCQTbsv+eR=Ka2JL__-BRL#%}3hq#JStrpyvT8<9$X;W zjj+MS+KgCW2;XXRmYyLw?$h{n8U?Pozr3atc?XM_;6h?z_36B{<^#3)45*E47!-|W z;I#hQ95sw_DRp&S66Z4AkiWQAw+xoTYhhzg+mOFqg|1QSyS2hL=NdB-fjiKR2kR9( z59k=lR9dz@Z$Q9R0L<&x6tzbem-K2bfoB2C1D_F$RP{CuvRYYGX$mn9_k{@{0n-sKCJ zP8zUfXwBXkL;VOBkipVC6Eb_0FdVr^LK5=MHH&_E+2BK)shv@pmR%>;T>mH@-%EnU zQF-Qyp`k*8vLbY;iYw2oWADO(EHMe8^d(#d;Fahu?()e-qmWGvz4DQP<+-q3IKJ{I$Nu@9;VUn)-UgwKs)H0r~; z)&1#i-nS#h*FyXllHC@6w>li@61s(ogeIaC$7k+)k=>G44qbe@5=~&$8hLP(jCm*J zJqMQMB)2A3mxv%*tL(WO7_`*g39rz8f6Waot__jxYEdef+g;}7`Ej&fGH&_pZhu!5Z#R&cMGKSzG5|`ovpWHHHbTsafW;W`=GZL!j2bC zXw`r-p2n%fuRtkzmTh(VhI3IXA4HS6`!!J#xT?@Eoc>y{6LkkC4>W@CbvMO!LEH^B zz+M}Cm5$;*7qxmue7MG;BqS-EA?Gj(K4cK6v~1u@yk-@=(HLrO0=An9!vdxiiJn3d zUha+yLlPV%jys@Rx%bkCd&QRBll7=m0d&-eM=sbc?qqYb{Ggx8N7wfQBz^-{sH!VJ z9;(7|^w$*&1^8VW#A*u~(IJOB!YZRv&3b%{=1sjwC?mVw(wqWAegQr-vdvo3(#Wr! zU1HvM2u47ih;62@%5}A5o?M58J87wnYjc@l7r`h}YBbhfX0n=lhtrzgGW?Ij`PvXCH zv%UazY{1(4N@yQ~i?7sk-r8%QCXqLPR(G5F2g&b|f zdRf2iP8tm=L?-Vb_SPqQsT;vWQqVG3y8#5fqpv=T^xgzH+y;)$ybDEre$DUu%Hm4g ziFjetO$*uBbFj8EQK%bAu`W4YaY9=MxAQz9O&s=4)vCOWzKJN-p9vU9o?i=CZ6hKgZI zeaX7NAL>`X|8K*0Y9XVjvfttN+lu5^`O>C#_v4t=v|#F=WLc6?z6V zuoJBA2RCSF6TW-)OuF6QAht!{AwVmDkUUIPr zlSS)nzx_}k1`;=%R`Vh@IIf&2|Zg~!jl7C*8|1a_x5w`NQKzerM z{fl4~lm0}^Itn1f@5m|mNX@3g_9&q>j!}l*b6xn%Xb!_tLm?mb@5a%vA11I2(yn+u zY`*rSg%=bDg2O-GEzg` zIcUvU=y4_b%i(BAH)k7m`zS{es2F8Eg@jwu&iB{UV=gry&*3%m(XW!L>>v&CnCw6@7$1bv}nB)AY61JBJL?hDo++!{8Kogu`PkM-)3mD zMD89UQ2O*l(mCmsu}OSq<)OFtFCBfn60;WNM3|eFdtb+@uRU^ByNd(CLmAwp61prD zCj3)N`qU{adEkhv@_sZJN~Tl(vSDim4b9d471o<$!-wT4pks@U%$oDfwD{gtB0+L_~yn!pj$#Kh(U|&;Y z(1H6KZG3+{r;p*#;1GR3)BiG)^*QaM`)>Z(Kpm^1a!W@Vp6KlhAWOaE{k1AB*6OGM zcCpe_ZBU-o(%dH3436XmGnIFH!$vE2FOcH{uEh^cb1$D^u=`t=*2&t}VSteo5p6fD zd#`->9okJ@*^4ve;HP&E@@Luw^?INXVIU4$(rc*SGZ=Z;{ z_l;z-8i&a58u#pC{Dx)BT=Ty>0R5Et^Na7mUf^{hjS(ZFcoTza6BE%|L`*kT6pCC= zGIv03u`W9;*0dAQ^{wa9;X3BNg(3Qd{eOeSXLnx=CK$56wo~LBt~SS1C4jQrz@Yf- zi*b&I3d{t3Nd4i(zY@|u;u%Y)GCuAk%M`dSEV`J?JJ;(BEq}s!^S8<5?nBJX*~`SA zx!A*Ye+3ks#&#w=V=*#)S?+7&)s52|$8jni#?Q5ett}Q3n?TSUWX1nt?eFbv3q$SL z-46>M6ZdyIP#Xk~FM~|-TD`dj(p=27d1CjI<1mPu*jo~MxAuvg-*NaluVISeluk>l()&dWno30qq(=~nBX=M;3{ z5PWPbM1Jnts*s#l44gs{_lhB`4Xj-%hBvokqT?<*z)C=(KgJky^{g|w>Mz@m9=}#C9sn@AuF<_)uHOggA z$}DvCxUp3xya(H*X-IECk6{iPMZE$vl66bD#-Hn0yBC`n613w|H9UxHS*TjyL`8|< ze%Uw%3{&N?ul=mU1LJGI{6;%hlT4k!jmL$HJiwU5RT7l>wF%wR&D2^Qmx3YQp1RsQ zNW1s@lIpmZj6ec8Be=M1e4ORbnUl6!U0HM;-#>&gI`{i8xrV3INfLjFBhC@q-!Ln$ z$Np0be{pO`2-D(MZ3i2*AFF}%e80T~BHPY0mfaM5cI^N7O%cbqLI?^q_9lPrZT%>NJhXSW%7!gT7xtX3SmfwBJAf(=O@Hd<=dh zOLW{mDWnbuWPSxBi^0dil}KCS*2&QrQI`TG?#b&0^=B{lJjK7Yyq=t#GMSVk(}D^9 z=}xPde>%haL(p4E!4;SomY-4lE>Q7R+<_Q5{}*N_KaRHgeS<5sN>cF0z%n>>SpuK` z=Y!|&9 zBml6ZU}uJ?*Lm_CntY|Y-&Jo|MOJfMD;9FRyP8amH?rv3b<*;^tVe$l^@6WVILg_` zUR&bgU)~FHRJLM{|C0L&b=WQY>{ovn{K$9>@9%0|h}!5i8Rq&eG~FX+oaP27e*wwZ z)hHlw1i<2YGu6_^$zT~*z`r&F~{LqLQYx%X%1k)8e z4?-JERYvdY8hs7g*!P0^l8aQy$&cWB;rj24810LksvtLC)Eh^_nr6&JQBfK*9&vIi zJuK}%UKkVvTw1=`V19q?#@#M6Odq`5mp`?^P!2C>cQ@QPGbpwz=k zz*!HprA3?fxz`5@??Gh?EnX8P^(UkVj^nU|7D@i>Ine_7j$htk_@wRYNN^QL-u8z zHq6!`3#({uz^!XZZ|j9;aPIUkIJxfQs?8*6VR)e!_)5-CY-c^frrq(dKcI4As#f8@ z@)J$ME6G3K2^2Sqd3JQzln?o-WR(w>q<~i4GT!)qW@C~eziR8GCk5kJo-*IB>fy%Iay{oE4=D z%}W4e20#QT*-eK*67-O{!>FC+FKm^C(kF1k2>#)GabypCrow95->QQbzt6>OEKw|A*O&>-nGpsarN(Ke8*y$UK3Ifqv}eiNDXM*f{?J1zh*DO%&hXXkBc& zm#*$l=(bQFKQ9*ZzdZ5!FB^4o`iVF))%wkn-ii24m!`%2Tg^w$15&)+UNhzHl;#`` zPT_6XPJj*nOw-%D(EQ%AI?eaK>v5u3(~oc&-dAS10Q;AQ-*9JptywFpn;O;%ErOI0)YrmkL&m53YHxZO10vP`tia_MCMrDfhq6@7f1nk;+G~XTGZY zusB+O4Tb;t?|)%_r5%2&^S7;rkmV@ou9pXw*2HN3S^4Lt-P}k13;4?45^=R&Pcr@I z`6@`5(@3tevEH@|v^Vjf3TAgCTzNv|Jf%+h#b?$!`2frfxy#HnM{OE)%x&zlkkglF zyR5;%Gf$i$nH+Z<`TRkkDp@;B-mM?g;}<%nANBmcb?2icVjixiUp4Kv_ERls|C79) zn|MBeBwyJnmo#X4J?YVIzD|)oZwX|Hda2RZ6VsHoSw^=leTbg+^Bb2<>mrQeZo`?XpY7@xxKs1DFetVD~_HvkFLqn>W%Es0KyGqbJ(p$+92|E3Hz;NN3ufoK0Vyf zfL+Au%>1AFlCi%O-5db{H$zgAh+wK{qsF>cx7wbo)0&H{CS43w38citU7<^wcprwK zCKoWdB@IxDUOoI*rl9^ZLUpO74HcbE>2iQ|bkjPSXOk(v_~!_PqeN3FlC9r?Xu=k^ z38W2~h^MB&rh7{?J&Bejxol7&YU6reeE?ZenEH9PjY`ykckI9WUqokaTK`K>n`qzN zd!QNGq%Am&JwH3#72@q+{)rxrNir;QXCUIq+XXMqp)qSYawF&UKmuk7<5OeRsvPLG zP&AiM*X9+)FZU5Nlow74c)mf7t}Duj)fHne?9aR-I7u0l=r_df9xfO4F@)YrPi*Hh zoZx4epS$K-pM7P|D52@dUKxUOv@se6&~xWLa;6Z1ZqK?NvT7p39C$IC(&X9>B^GBNve>Dkjn54++Rx< zhPUH_d$G5*1NeC1X(=KuVYmCn-74<_*6R%FY0nTASxTYbo zukmz-*-Hv$+F%;vNkxUy{_cpy;=j5PXO#L6e+f!h zqkJ9;8z*LB0k${aY0IxPj(-hMhzmg*%`=+wIJOT^%lZ)ag)iIKpuiRfa9;vV?)Gr}66yk*(}rnjQ|;qc6lQ;hxNYL!KK$WM80w(soVDHAi2K<8Wqb}S zRS}&5gD*6V-l^+NpLI~D)37pb7x}SO%(Ewn;Nw+UU7SmxKn5Z1L*Pl0f$bMm+V1WI zo8&1sr+zRBo8jX^3#o`qei!&KmiZZ@q*a;?pFiqJtMT?{ge~?c#JEryxDw4baBaC| zk}OJ!rNbD<(x9EgrKZcc`prfrsGm2{B}HT3+o zh2hTNmEqe@LEHJIzTbW}O-1dzHU76lq>}RAZ=HyoL{_X0dpbu9^;XdIo<{uhO1h~g<;|37`BKw##syqGgTbaw zWgl~k_FcF1b?S8DVFFJX_le7q@2PIr;)?G5*yVDYvcHz!IK;wQ{IxTuIEM@+-nRdC zbOsm4mkbutUKIbhEPCL6UH%kJ54ZEP!75h4aVk@*n$l^^T^9Pzy^!gD)h;`@Ei>Z` zX^VDoPeH|>pZ^kpYGes#X=DH5Tj{SSY^^t*KdvKUF=70u$ylQ?EjK^AuUk=XQku+> z?>%NsL8O^vXY=A0a^`GOH|(3(CtRkd6Va)03((}m(SzcDJ*Et|>ztd($G zX2p)JhV6B&*r8j347tlF~RdjZzf{?+0*T71`|xUa>l#f+@l|nU5uNlMi<5? zn-F^~1^!M{vcu94O|nQF?mi7IO6|U4p)D*E&nHlkyDHX3nZO`Cw8?CuLtMZ}O?d9? z;p+RTgoEI`<@+(1A8X`&%=EAwgb{d)$~)@O`{?TVbaTLt^5_1F;{c?e?~FtLm_3T=cln4Xxm5fTtBC7{ z5GWqRPg`5ktL2os_Nu{*qrHIKKyet9#%@-dk{^auRXzsERKfW3*sEDFbbr7efR`P4*@HQuM#s!+ z>Y1QLH(CecT060CAgG+Rg>o(Y>W79y!K~L9&oEI2m1~Xk zVz8r%6B&j46SfR=s6oBb(4}-793gv<0oJG=i1wLjBzl0*r_)MhPnN>AABed_v;}a0`vp89ZT0OAq-dze(FKaJy;Aoho5HWe+s?*~tLf9cH#d#D8mN>& zgua(?Zcn=mFlhJyH)JMRWJMdaeOT#pH0%!lw}HgXrR2YLk9^qA9^>g=ezug3vSJL%7Lp0alChHY143Sm=KP*i>C!q-y9EoX(qCw|4>|F+-X>k z1WD56*w@9I&B(o%8Uikh#s9i`Uez73lXa)M?nFx!jzjn28^y^F130Wb5xNwhH1S|d zoyw?M_1m`f!gx?~VrVbgs;$q9K3w`kwaHm8_tT%P`M>ws0o~72e7o$Grhs{OTt~S6 zZz60_{0D-UB$m%yjNH@^s|!H2ULEn}FDBD^|KM<%upg$`VQp7FJZnBUhN)875R`DH zE0`vBxU$I4{^Ji@pEVADN*;@F^UG6yE%j@)OlR=jwk@FlR~y1|hjqnTAS%BOi9O4< zbJ~bo+-u)VSgKfK2UORtJOb{+Qd1ZcNZ?S?;foBZXZ+1PJ?6#-V zpnB2f28A|RDX)|C{pOLeJM@4-gAYqEK<)g8o6m8Yg|z)7Jb(L(4?hXTQWD>M^2`By zp6GJ2(#{8$db#AKHD5jW*1H>OROdpwF-wP5er%RBrleTW{H!F6FMA@H5kKb^ms)7z zK_*2%NvU|zlX^LEsV+oY3jS<9+JoFLD3|J1Jeh%WtKVNgYn$5Il5H#!aekop;WUbN zswWNDFX(7O)>w2y_3~KUmQtq`kJi5jng;H$EZ@;L4a(lzXFIU;Q1#uSC-j zJWNv993Eb6PCSfg`-FXp^jAyC8hS@|20OUJfA$dBrRmLc&0FBc0}=DjGV=+{p4GBi z4tJaGXGaXPI&JmU)^HUAEeG@CdaNlk$#RsGY%9_q{4~@)F_w%Hbt`-_(e!=3RqZ>`AqQ#o<>P>cyZ=*D1F2)$a5{uZw>? zhE?J68^3GhX3>mdidA3^fTi2*;{oB5RuCwSNG0^yV-o==hjAUglnFZRTb0oDxSR>{ zlYTNyPC+=XW89?Na-Y$8$hKcG=1U((pzhFeFt1+bck43-p(f4x>TXHO#%_0W|86mWI*#n6{{1B|IteT9Q%{h*S% z)Eg&ld|$?NhGOTLu5be=j9C=!e%cTI-5Fq>q@dSxCr! z@r@LjcGF*@l2wW<0K8MO?p|C>^2B#V9BhcwJ82&>G>|KN^>Kd}<_0DE3#|ks1=VKBJnE)va2gVQ590;w+n;4cfIUA+TfhpP1mob{{yZZNZ}m8zfc!8Ua< z$rYC)-`r#Vp_%;b!%v7y`DL3US>*KT6D!3e#H5ns;m0a9 zI4vIeb2(=x$_~lwd?~!%)ypk7uEqqn;d|$pxoBM7Ahw?`B-d3;j+=F`c7xI9HG7TB zSYeVN8u0QlVAAOk>pPUm;(HgOfyQy{TRrJdp4J}Wqa8|C(z&16{x|G#=TAxQ)2Oq` zhe5Wji@#cKn4`1pE1Oh>4NAK5Oa4{k zGg9wUT`AHZs~aaj;6?jL|J{kZzq02JyZ>i5^0NuGhnaAGRL|!fJi!J(=N^T$^`q2# zQJ8J}K5Y5nMvhq<&VGb`UR~X9<7SCJcGe|;Yv#UC36^OX3`i*$dZ%#yf0EukuE{f9 z7k%I=Ljq56J z;3b~vboOa?2P?rFP~Q!le~m?KLDDbD?~CW+ERI7+?4r<}v({HO9u=w>2+yKVz2SFa z)lCymujz!CB`wjuc&1q;7VX5!D$Jh4w?%%Z==rthn2{N%>ljt8=zWGB?omX7)a;M6 zL`N%2Q}5;|Tnf^}1<&xm$9jk|DP=cg4-aQ^TrqQD7 zoZNI59S*Nj8mi>YPvmU}YFuVJ@M z%|tdHg@qI(grItcj6fmR-y#6N@dfO>2i+Q8KdKs)^oEH$4)k2`o9U#u8_(fo9zg9i z)zBo<8_`VLSSso&KJ1j9p|3pCK#Gio_=z+0NoS&RJK~c}fm-bUpXNgz3>Uct6sFxh zXucgmM^F=T?zV(>N``^H^#o~WO*aC5QF&a*{h=a1iUY&F{)>5{QK-fAMGNwT2k2TP z%D$>4XgWnge^KN?W-iVE$XmL9s5k{>^BFsG&?ZAW5Bx(czOO;&QuKPwlx*op2!Ikw zXD$NAeQDhX=*S-f?}ItvumVEzNo3I7QE^V}@lL_@wndj3knFzxe3rvsa;y`mIddvO z0*s|PCGQa)RcFi)l$b!L=G6(<1I<9!5m7l}`6HR;5JM?TI2~ z87Utc;+6eX6Hpb~%hRamb_1PyJ{gToYV3v(#>>Q|*3_%W8`z>{drC3Xv6xl=Q@K4{ z|4{!Mv&hxy1{hbOPwe3Q^{_) zRvPTuG;U7Sg%@ZYl*P{J7F*hJ#4x9M=-;X|rfb{K1CsHDAW6pfT;fRhP#US2>Jhi* z@^TPPgq!6Nr$6$-AIh@ZMzAL#o!s&9Lz#-_!+>!=?qp^U^Xe05G4zfsd*ZN@*{6~X zYjfLx;0u~@i%Cy;PJ*Pt{Nlzl`;I>Qt}$yAro(`>cqeN2S{}u}OX$^KGC(Z2gc;@Q zp{eVt4GuvgH1!cZODaNBWI_Jue5mS%PiokDhcp3h3+q!)*4>FM$nz#26V8^w)_}&DT3VL&WvfXD!+VN=^{wh#H0T=kTgUoHEgieFu?QHeTv{V5TaNjyVo zcxOwF)}C{Sd@gpJOx;Siy~I>|7};=VS3giHuB%7^=wYF;9*99y+s>}rVu4}qbbQ=c z|HTj@_}M$k@c>8`Si@c2dGna2P9CTIu^VSBN$eK6Oh3@uP|%9+H|UFo0g8m*Y}jq# zV|<&uD{$?G3lnq2t;h}1xK8{|UXRNA+|sfe)h+9U0}#CV1|ct2`W*9br}L>_YyIdU zBBDBYklT(KeXYXA z+B?>10TX|H#R&Q~zk*n+$~O|DqN7zwgte&ccefHp5Yz;NRBK0|KPc<8;OD4_<@HVerJ%hZ8jag%r>BOLY1yN2!l4Rw3<5m28=JbZruxi9voF(sKvP-PLI#~ zR#@@YPL3zpAdk4QFzRWfrcE)w89j8-D2?HVqDqiKNof)}kZF|{MnHAm!>6&4_aQ>! zfXow@)xpRP*;us_q~(>N-~qo3m0M+~#6BgFX&%Za`SJ01C!uwU53iN6*5fl3$J{V3 zG2628FhlSagOU62%SlI-b~LGm()(Jrl!K45=W=8NITL{9S@iHg5HA;yh$Ou?Cz7;k z5O1J?RyjIda~CI+hcOD-4ww$b5>B^f6n;`Z z>ENiH_R}!YVjk?-wlz%*Z!Jb5O*f?~;(|Ry77P9iN~j(j=>Ng~JFb1Y8)Vf_Jeh{Z ze0y&DP#LwK>^X4mhZud|m*Q+LCkuSi_+pOrOp;Q`n-170b9s4&44PVVL^dxwM&_4j5D>JZ(acYrkO6{)mlTyt@g12kJpRJSj$VL}f4TRQSno6`3%AJzds9!xlJX6E7+}SR zF?j0kh-8LyH{1lsjbv3yh5Sy#QQma@B&Hk^{Bi=pvw@?!`1KGA-Rc)g`hR>-;#=ib zU1Zl*u@d07Jd_jw$K<6t;V95 z1z3xnU7huFuLu8pJ1?izc=VU-9V(eEsDu2+zFn944vyBO#9VYS{L_qvREj!v#4q5s zr0Hp%$M0mnqC(UAzwX`DbDM4<8z0m})WuO}BbWg;Otmb!^|VO?7@a%fwj{Mr>tipLLLLxY7n3HPf^)@xp>Mdba?k4c z*q?FVg;-hSV{D1!wN|A!Nts*SwvaEk$=$AH1@VX`H0;xtPs_Y7l{%Z!$C3emNO-t2 z4mu19*O6Man=)7%7SC1d{?ae&A-+8r%9sDS<7NIw1*^(X!diKbz;qNVBhxDe zQ9Pe#534*;xnrD>CW8&v`7BlgCvnlO+gWRnC&pq73|Dssfz^JL4EvOrPQqwH<(JcF zJ8k2ShC}?~b1y8ss_cHl%wwGOoSLXZSj!SuWR9C{%hZ9fR1R!6POV$C9yq*4#Tn}# zi(fBI0QlAuSP^1P;=h2ALEy5yKK`#5p1_K2Z;zL!+lpqXT_$?H1cE1VoL@rxqUA7KeFT+cuTQQophA@;*@*K#^W(4|QjbPbK~5<$nP?SvfaZ9xb%2vILgclzp7uhsy31#Y^BI^;F?0W|wz6$#VLqtC(j z+|}M<`L|))z*AfWp9Dh+z}z9AQU7v4-m+cRoJ4ns#CnO%HuPs;5P#xm}FugA4VA!JM!;^d!3ka`RL zIMGAR3|>X1F0A9IyUxWl=2wJ}@r25KF$g88+Dwl z`ey*~**%&3>ZzUp*xyr31X{Ijm1AA>yUY?S==zkP{J#?$5FYsPX)MoDBAyquGR#aL zG|`Met$I?If3Jgrg&mDz2>UE7#&wryV}U5x7X=M7Wh!67RpkepePWFg*tNDObm0 zbFLpa-4$3>MqWc_ftAoRUa#*l-ijgglxeI2V_|eg_1j{82;*0MHjh2mk~!HWu;H!J z95y@q^omgpO!en?oIDcs8qHiz`;p$aVMLJ<@oo@vJYe&FlGoYAbq-!X_+B;-i5o_y zb@%cVF|&MNSF4nhl5m{-=avURDW&hmkJWe1_cH%*@Ux#+mO5WqWt;XHUTK3#)9ayX zTgt*L;71yC zQ8Wg^UsgC4=HCXY7*JVI_5CL^LMDF#uh}yts`|vTP(HlJA?cs{%1@&!04=$CW4|BO zo5q+3AH4p}-`jZxgo?Pc)SgLjR}4$0nkqxvSF5`daezok{KP)~bfTCyh0UxdRr#m< z!U$aiL}a!vI4%C2(o}G_gY6~M{NMceKMfIEfR;Bo@Qaj%uAYN%o+xRFcp_ z7PyV@9Qi&@g0GeLO!8WEJ&dne6-WIgzzg9P@^cw}*8pn`4=v`THY4{W!cnYpOEV_7 z{c$#TeScLX0{N#IXyT%%1((CI78}zw8%cOh|Cb?5T>=jcMaq%T$S;*-D68HJunsrdC0D&;aV1Dk_dw8K2g(!^jg&VraZPsWmfW=0p8uvdHYo*`YOYGn^wq-(A5ms-;4bnwFl9z9^U0y5 zv0GCv00&lgxmsxGy$JPmN}V*WhEJr2a(Ujku7TXs&W@%o{f0R1f{32YZcHgGA2=w^ ziW3^suM!a$)zU9TA;QB!qcTpW6TywQYGu6Tp54)gWn>+nV%X+j*}kuJ^OGoVvODlp zr&Xa&sy+~0z-C}$Ui!zn<}dE5{g#+N9t79c%oJv3Z$IEotVv=Xo=<0Hp-f=z*KNvW z&g;Rs&{ZhYzuBd990_4uJ}MIOc3lxUg6LLv8KU`5%AYl_3h$RVNkJYXo|WhvjQlS8 zFP(hs{x9e?`jRdPwtiBP^_*=zhe~3$WtL!tB{Hv8%^J17RTU$Q@;@t;_ZxDB1pQx_ znH5y+jE~CYN640{6kk#?`V2=~fJ5e)NXHsNOZEmg{>ERA3cfqrH98tcEp1a97QsM~ z(!bEjS@dlovLSg6L7TL+nUXkLaoDmG&FBwB*-y==5xIlR+}47S&$rw_yLvVut~S1x zzG_Cr4ETb_d+kpqu{6rQXDjh^QHIM#tRpxZvAJXwFf2=NHoRDlTpc!77&!69c`&QI zZX4kK?co{`Qy269=YQ67_-K9#J;#xBe#NM7Nhnb!9JO%UC-&(s{+GOf?NJQuk{>&sG}}+Hl~^Qjb4H@H89RzBf;KTCfKw&U zp0zkjyt<4y%fZwAl8m(?5mXrDH&p7ggZne&y)TN4Os+LTd#y|CKMO3&DGG{l9qb&b zK;aPFJ~qT;?)rSoEe!2%hY$AYE?y8c0Bf!jlfDh%L&M5m>MKX=TRiVL+c7r_z!1~O zX?aW1*e}lDPcuZZ1eMRLZu<(6)eMI`3n6MOc-c{x)HN-d(JqE zTKApmRf?8g+Pc|r0jYZlS&d70Peit@M05*TQdSkaP(1ebX};G&2>Mk~z*dnB^x#z$ zBeu=x)g;eM!Ps*MH!k{>V3I48j&f4+7<;_Ygcepdv}0KC?Yu%$t*@(mW`e4dqu~>? zdbTC-Ng|{M{8E!n4EFiT169B&%<~zQ2xUMVQ!?4|h*olTqV!fInw~@w6aV|fQb62O zhWqs+bR@a-H80n2MCH3MwaG?70>J)ljav7zBZQC=-hCZ5!o;^5j(|1cKrJ+VHI+Fi zovs#_1&jIYsWG=jI`2i_6kud#b=E&>vLQrzRpN9-=E8Y`&4T_LenvMUi=`tBbFF#2 z^Zwsu-qP}N?-YFBL1!@+z&sVX6tKecsMUpEeS-w-i)^|S=e@dZpdX?p*9De$#2hsdOz$Ex)( zl*q^0x?rji;?!!DebW#raEFJduke(D#u3ttXpT`hXS&Pu!vlKztOGm`!N5L>I|lAM z{0-avZ>weAqQzXl2ctI@QLTfPc3qW(h-|G>*sh=O(^f=n*RzsVonKKu*Grva(_&ld zSc)iM9m0>v>#BC^(wsp*`_E5BGtOEZ_uCq^b$yClCUON=Mv{VW?W#Rn-)Cm8lSM%c z*<929d&5H7N6)fP{!-rbK;~U=!qchEl~#8b>0msR5`PhJa{a6t2R^<6or}3d`xk^zJvQ*Q?-h=y^7V+fQzrOUi3Vm=u z`3cd?VVU$a4$_y=@d}V1whW3!$_)d=hvo0T?fC0_x5S` zj<70#@OCb~x8E=Oq8E1r8l>QHqyi)}~3P?6L7v1!dcI1Ef;B-v|8(pl`aETL_zvHm&MCeE8ZP1X*JI7?qiaRD;vb}qs9pk#X3A*C{%`n6b7yo0U=1lfc zy#5y|k*324bdBVZ(E;py`*H&|W6jX?DOfZCKf%Ed8P~a@$04K>z8VqTsg>>v?Z@@R z=C@C0dL6Pxw(;7dgiK(k+O#m`LzN%@yLh@|^#WSpb(gY2!`2sI*PgDP@%xs~u51I8 zpex9yX*|GOslwGVQeF&1l(P%w9f(Cp%}LT3F$uNuuIVD&<{AO1*EL`iW^q3MsfJxT z236oh@<&Af74i}krEy@!D*QY_MJpBbo)5?N6Py( zFs^J(kHaUgENa@EMp6Z?OSSmE+3_I`DPAk;PW7jR0~eEeptGHDvNFP=)eGr+S>-5r zHFDd7xIji#9*G%X@53x8jTnoQVCQm_bUF?Jsk%6Rryl(#l6+E7Y(za{iBJZ3D-K1V zUyzU9)E$(d0e|q%JgE`{OuHkX5wjWMqH=j@jKz1TbD%-Yw2W4#V+Y(L(yGlH?SiHu1XEmbv6ma4;;RT8LH?C#5a+9 z{XoAogk-S9&9PY!%}-uOsEwbYJiY%gXwUMy*Q?~U7}*tfpPD3?;r(ZurO`Yz7(k;b zKXO4mh*-39cu&ip=D`OT_ZBChk@+$NzB6m85@GXAtV^g>KVqt2zpPfm_Z5Am)D|au zaT%redHPuY3A`*8SRv&eo(HC+ zD1$_bPM75$ljUwGT-lWJVS@*I&wm5mSeu;d&zs-gcs`KPg%>QUymmbKLb%;$&vJgp zQ9^&ZlFgtE_YbINXg7|uv?e7Yd6g=z(jx{glFHUI2 znCxoXNpxeB)CyY+xPMJqqcX;w^?C!^{|2AjwLf1FvA*jT=-8>1n9IAAw5SyTGw4jli%u7 zv2WNa2X^c+xNtG|(0vE$68Ncbxa)cdHWQ^ zXK)+5gva9o_0#L}s1sr|#uCGNnQ;ZcCf}P4)6;2;mmxf0{`&g|fMM3ehtimwy(54P zHoik(sfI(;dQOsF6Akv9^N6T<0TWkrx*C@VP~el`fd%%R%YkFT<{1hmL^~Yqz%se! zg~B|?u{A166*X&`MvLk=+AM7cVM^RNHWn8ookDs7TC0ossdvVjTHWl2E)(#V`v<1s zH(>7RkIa#x+jQ^@9k2ukqAQxn^Jjj2V+KP@SnQh&NCROOb{PqP!Py3VFIDdHGwIm! z>nf@#3%x$U*M6M%-mM8C3wf4KD_XbF>(#9(9T+BJ2YORN7RKQqNFQe zaGjd#jb?CDO8(zt24#3BBr8?Cb678zd7TjlRnfB0%Z9{YSJ&b{RDQpcKy4$X@5TKx z5v(l+0LxpKITHl^j6$z3Mi#%zj8VlVbsT$gG+IGk0M~G-bp)1dL!cy2$F1$wg zEOuOs?Qm<3*dh|1gh$hh|6xZ7kSN9+w~3?)5dJ+;^bGnv&Z&zJi`q}p>&iqE`R{*U zC6<0KV^h(4*&KiVQRZ?Hb*Y1h3)agA+d!)qsFZsAGJ}UFRr0tHMU}`%3;s|0fw4p3 zwmX>^vZ$|y^$V>7_iGr>i@}ds!mCM~;qqUG-rZf3h#Z|oo}HmPV}1!oWL%7kB% zhO>)XoMI4E(O#|4`}QC597zy7hiid|J5(_T>(0qcytc2Z|Hd>Fl;3ag5mB|+6NH6p zbQk9P9Qr-;N6E)ebHq9Q9^ZU5RL1qU-s@8>d0}R^)>JkaS>4Wnr#J=BAlW+7!rMNIlwn`g_iArPsuQ|@Im&*s+sXt6>bB`1H1Ov`ZttYSQN_l#O z1jmY;P(^+(nEN9JbG4@bbv=uf8id}c<0~P}PZc+JPsD-!Or@lttJ2F=sFl(6`hYE) zYQlBs>Z6_2Klhq-?Vz!8EvAV&5k^LTi(g|jNGzb_?vMEgip<12f&pTj8S6ibjLGOU z>U^P%I{Q|Q*p3YnJG$5wxiaNlxGf&a%r)}yO{D0ZCqIp9LQKIqYAk-pEy;g&b!{2e zAPjaX`oHA2d$_yn=r!()6$tAOMKOSp_?##djT_G3 zVnD#pNK!Z2O|Gb$9ny0V1w%KGOKGkU2wcENJWhCa=cULZtiqXf;RYHuN`Gqi-=Z&` zg|Dda)66DN#qJkIG(pjBxC5-CLUFSujrvgo5sxepN<(6|A7V)XhZgfMo>?b!caeS< z7FjCJ(>xKgmlS%9$A^i5f6)D!8B;5SUg38SZ2`+!r-b}6Vt;|dzvogUxuyfer}#Hq z3)kbT{xQG+hV}LJagxT{d5myuzpFuwqvm$cngZ}=tWU+XJ(RrZu|ug=)vIEX+d@sD_8ObwuzbNSA(>XgP1PK932=n= z8lHLb78@jACungqZyLm}g=5MYx5Gc!R-FztXj3*g7`~RSt#_|97a}V&0hYo#lwYm@ zV?gz!j2DnhXXZ>pF{fnbCk-h&=_x4BrW4TD7f9g{v%&&dE?=t)@5wA`{liXMUSc`+0kX`x#6PEfKuO;XwR?zyG}bsH`SRDd0qLnG8$=PCT4 zG5xcL29dvi=P16YwD`w6vh}&w8{}ue&RGeHg8D9XNm*deIRfd3DZeWXIm_&QAm92Vbn#YNx;Tf)@<<0M=@UwwfO!S1>;LYU{V?h(`KYWz%d<18{BmR%Y z=)hTFsRdCvE$e>y@Ef5JC+Kah&yghQXjsGy4jD<;T_7Q8r4sgS5Pjp7>ds-EYlS(u zf|E=(lMrYUWd%?YQ&=(;;E)=bECvrYa8sVQKkA3Fd9ar$5IIl2Nt}ESmX%U8=B}eI{8p@o|Hx%+O~0F zzL%tkXu(50h8g`cjOWg$e5)c}EGgi~u!z$-1Vg28CfLjhV1?;9M5_(^rKL z928xXuq`9c<9xwyLc{G5iOb9fIvCaTckPo>;|`y7k{wV87JA)ad%ZYPWB1iHvXq#c z41akU%93>v$YxgDJgEM76B0W$cLfc^_o<%Ua9z~-G4|ZA*xp{Y zW_@_wyCl@?V6q?9_XnLs`L6A2SSJx+-d|n~&3Jf^yGhKHUjx1*EVj?>a75wfLS z!Ma3OGkpqQz(N0j0o7{H7jiE|ZZta9 zJ>X;ZrRxXy@p+6;S+L%XuJ6<5o?Z7TAzU!cX8|G+>PXpST&J{*rDBVNG0Hp#SFW7b zr$gQ}*sVfVHSCL=Pg;Q1l49G|l)b7~PdN`|@^lxgaP4-rAPt$qQ@`|wkXfJyos;mk zC^6FPzt}565R?Zq6<_Y;j9(GT@~GwzAV164d|!EvzL&hJG2DJf{DEY`b`N~v)j!UR zrg)2pgN9dWh{z&MsaUc(Y1P4cZLanc)$*Wa?IOr(_5DBni!7)3;gF8M_?d((2FCpa z6MH4u(j)Izk0oz<<%KODvnc@)=d!e{h32P}7vYg^s0oW-6#!%SOCkUDC|o-wTbc7v z0!{HB-zAKyIB9qjwENV|VuSG{(*XQBZnD!Qy02ET6!mwpi&`FL_L&|_;pzWHiV7oS zo_;IL!&6=N570Wh9vC>cCk0{XJL};^!}hmrw0+?X=n|(V00TQ)$v$&u)rkZ?F3+|J z+s1^mOsJ*Yvk{hY-unSEVoWxw`}1OzJ+bjJI|LRWXj# zETu+eK7pS9{e^yl}Zb?Ht zUJZzN!jMe&h>r%;?QRP!#_PPCx3YjE(Imzz&bYcg@#y2jK*ZA2_xrta^KzN_U(gY( zve5q2fbNWd%#H)!?t=4j0UfD1`9*mN;z1=(BkB<%!YT_cN6JgDy~uPV?1R=%NlI#E zDh#$JF;B4n*Zykrd~cHDBpAWso;^FKHiHDI zO$=H&!Eo+Sb@J!1vL)?5Y4!CzTfKyc?v7(MJiGFpo(V z&uET;hefq_hbKUQnRdYr+pY`XLoMe8#Ep(81Pu2wuI()|O;N%Ybj4R_t=9F@O}m$5 zT%t{S2xQUQ;8cofhAI7t5;n5=H6$`>wTo$hca z0#usm!hl2cGty%$oGeDdNuB}=u66AuH&7_oHO6Y0pIwEB(`tphhjV*mlje1byxuRRng*+>ct9(PxPnl8(^s za=E3&`LKH#31RMH4j23Jj@frqo*|}a<;3(+RL7f5|JRA(sgoOY z>xa<7#O(cV1!VmO@N)1(2-iyatosh|MiQ-x6PssR;LHuF<6IiGZ?ksBZ$t7wwG>F@ z>V-ExEbnu|>Hu|695e{?(aTkghbmr}vT)hDxEqcE*yH#JN^JpK<>YA0KD7G!oPdyqjJm>_wL28rK7GA` zjUz|?j}K2_7+B&k0uB;7#YrWibGJLgvjqxtQ4AB!Hvi%mKsJ7Ftf z+lO{kgap=i+S5Fim#&sc|r z^=Me@6=)DaP|(HX<@9c`{Nkiy@J3SSV85Hwxd6;5*tfPnAbFRkKTpuwr&=*sTdymB zEuJg31j=E`!mOywHam`_Cg-nOpKv5f5aO{RS~hg}uhL_>4f>p;cHp*m^if478mxFYnuFKmC*m4Y3`y;a})?1;1a zUI2u;qc@y``!nq>vkT;(xPir@<|m#nZm)lb1gIn#iuztoDmbaNvPl%XB|lQhNj~v% z7ubrb6fcP*QUwATE1H+SAlZ$Tzr_qoK%URE!3Cc0o~@kCKqmJXLiaSakU-~&h*8{= z?22~0=K#zY+6?==ViYFwnM&khixAF7qL7MP2x%oK2$S7W#cpF}-qf5RG&(!SkF**E z4d1@3!2Z4$?(=gfI0;5@Ez-nhY{`3405Xn^VC}BxWry%#+1u9Ia%u#HSB@ z3*RZf4m96%x_-Z>9g`g_CFjyRJGZzI#>ggzj4kR6)VPkow`8fd4-2%b{y~t}cq*rp zEB~RQYHM#@uc;qwy(fb_qJuP9Ro)wWac#3W;uhhc+lpJW$xH(-3$6Gk_HAvfdfb2+ zd&1F}`Zys5Krqt*RNV?XGTVvezv4QKfiT-h;{g5A&_rP0Hz+AFwL-a8twa)$c(gmj znu_h5NWzxR83VGK8VUScvU`cw)pdK06Tai z$-l*Nr%Y`V!18YN!olimyjEApR8JgCxGr4ljs`*`MyjW7(M5J+wIzq*0#>@WPWly= z;nA{FLhR?>zrf0p z{H`l{I)ykvBl9bPUw2f_k-L>4j5y(B&dDuT-gF8fb|>L;yCYTlf|k@5?~x9!eW4YW z47U{nJJ`75;)asA&JGdV3k@wUAZBrN^e0+9Y{1GQ$wbO}NKi7K+PJP*d~rOjT;J!c zi*rH`A$e+PAKp@>pXx21)X3Ph{+MMlB{TPd1rO&t_XOyhLMEI7^ggDNgt$Ym*1^Ej z-^h@kKd-n-6y09`#<4m7#p85;eZdfm6Zb4}^h>e1S*WS~xAmq|`1zh>cFA-#rmSpN z5I-+%_pU#1aN%~}b_7mq7aP8~9DNZGUgq-TnoF6z9|ZC2xYy0W<;uQ_p$GCY9NZzL z78uMO4LW{LQ(&&dU>N8?rUVbBd4hft#yG9eseWu~>|D#9zkdq&}iq_hZD1Yr)so1YyJqpD1e|waIA^RVz7Mdr- z&oDuJU90tZ(a-sUeW8~rS@=!l5Zhi^@$u-_L%qz;x`YyT0Vny(@4tUIoU5HNdV2n` z_ufm6|Ir_HEXBZUWLu3^P-?9R^}ZL^6N_<>XE%rh17X2`b1-LPalHiAaf@eK_|pUK zD4oO)sy?)8%*;KE3(Y_HWdssGo}|96@}1mG3Ok)q~c0DQQ|5J8!2_1YT4FmuL1>`_7p7rLs$;<~kCmF7wV?)% zAeg3Wu>&#%f-d`99@kw4aShY=`K%w$d1@9wAsnJ8tsSP1!Kp7=V|`+QZdjGB*d6d4;EI%wx#AYKwa%CZQrz=#LOK?O9z#2(ZZ@o)&`;sk6|HrozW=8S?4kO4 z83$XU#dR%-U!C7oIZ=9B=KCfQ2i6V>+R+mC|D>Y1^Sh07_Lugz-e!BYX8*1;NM}{? z2ZN@?M>~7CzxoS z@Ld-dHuI>*L;WSwwOWHXlDsmksjauL45%dhx3N5~c(52~F6LDh6?NHuap$SD4NqI< z;$eXP_kCFag#-xzhmq!bkZEt)3`AGPt6|Ar{G*0%)H?BR<3Xl$nT(xy${1G@NK?9I zf7XZfgE20$Lw#PmqPOoz(l1SM&)RSw|6C*)p0XIT70=G6QPn}z$+K&`dV&l$u8>&a zC6QzQma6buH`1uRQ@=p5WO~bpa`*kl zPG;{jB@9#OUY^M6b=^; zP5+%EU+8aDzWAh3S4Lq&k*1to7%iC?i2}cdDHhh{!8;en;_kUm-!?Wa!h<)s*xxnz zuK7{K=pd+IV%mX9)W}hmmeN)*49R1pc}VU|EDF~U%aoe>hwD@DygC&Cd5vcapnl&)5Oy%#)|732Odm98VF=p)Lao@Dah zG}l``IF>Gue|BdA@L8krWgOfgw~~{A(S@>^ho*Suat7b6c$N69YrS5h zC0H6z*S=NgZZ!8CgXvfJfL)$KSVO<{6f7Z;6Wc!ut}CoqJd+!JS`D+&_~1pq!h*|K zN?5u@^BG0q_p&EbYRg?(z8bp_l^fw zrG8pi#oz-&_xhuLgTk6PwiykF|IK_3@ajjbJ?zjuzd6OIEqX%|5AZ}&D~{8aF5$&I z>_6>-j?iTNQ%oaTpP9(rn0mY8qx%WYflrMB`zlUrFv47$S5j2>oq^mJXGn=9HZG1n zxdmZJt;%gju&dH5ZW)_L28jG-RjK_Us7P06ti(C3kvV_~qI2))S`U#3?Fx$@yS+z> zhLpFXFocH@w|G1rlPSOAsB>3z4y;zfqMn(~;eX*oqG>KMJ;E4Wz(HKX=l~dmk=awA z$YXX^hPyFUidPJLRtI2Lw}i`6Lp(KZ=*VXQj9KEOg!@?vSZAd~#2J~?Tv{S#x4xS2 zyKb)}u2y}R7A@A(B54G7?p6Tr&TOW1HnvF}IIYyIi z0XH>)4W6MmB$^1vntvo>FcS6Ls9Q8N|$GFkQ@3$!v}+0rwDB z8ZzUuVbF*eoiVy4U=U-2U^hS*2F;TeN+foo`0u8{no=pGR89L;*)JzDWR;s)?Lv3t5|_fp-!}>3Txdj$+|bnZcX0Nmt}Rh zbG+9=A%1lY5qBp+Y`9m%^fk9gRJY?rc&~$MdtR^6>bxv`029?=JM=l4(YLEws~glh z4om@z9vcM;Lw=zLgdy=uNI*pREK(O&ek56$dYp(A-Lft}c4iGLKC0JaO0A#nmX6-q zl&dPo5Y`50CIZ_KpQ-6}L4oXWnKxAu5IUCpswj{F4OJfFa((T&-5qP$BBy)%IaFls z4z=e%5aV4MxIBzqv8N|FbGY`rp)~-jHc#G;`Eu_K&ulTbM;Lx9>Id6f;=ZH;otLnW zIe$#VeVGr3BI zx+`eVkK8G#Dw~kLv$o747W@SXlD_424D)*E8>~@Q2Oy6H9qjrO=v@dwZ23?md*dYcHZpG=j-97=S$N%F; zh6ZE#f7PZI_t( zy5lM#P6I0{tw8C-3$J{lbhX&2yVxlef1ZBJqb9dCU@P&m7j=h3%WQ5-PfEKyg|oFl zNOtf_9;#lu+|Dn%@@CEtZ-K1f6F=nhq_pdZT0M?gVsllJG&F&?7J4sYEd~A_F7xz+ zgp^+)gX?O~@8{B+{{F)|$ZDY+dP<&t4ibMDg!MGW zqo|Cg%xt)vD7T}U6EM_A_j8tMeR@eb)YwsMfdA|eKeAlOlgBGTCJUU%Pc@?5mlOk$ zPvqe6c}qjA(MS{;OG5sqe~_lfFXt=~JU6bSF~HLW+dgmMA7#X7_D2g8podHML5a$C znRJjGuzy=@sCXA=JA1oE3#@^0*+YDVY6J0*>oQgb{1sJo_pkH&J)@Is{_xAF`1%b{ z>mNt$Ht}*#qd+^5f>43mAmQ+0QH~xXIx7TVs|Lt`XW%h8rfG13;~Gx+ytgz}d6~CIoX=d?)WgP;EZ`1s`kvBgon4b}j6Do0;ey%W~#RLUil{~0i*k)sVm;iyZo&csAjM1inGj)rYyC_#|m8JyNlm?G-!)>f{X1& z)|ac)j=+k^9SM{7PRkhP zk2Si&LV5fH|ISH_6QN|nJg-TPuJ4q7kBfp7TdWnM4F5+)IBrAD((adbu z!*43nzpvHE81@`#2r1^`*SD`PSVfsu&6+fX`-PWNZC-KxOJ`LWEjF^A$V6R|Y}Xa8g=dFKGg|O`KHWke zvmGyjNi}~W^B4W z6HdsRjvdjya7d4ONKW6<7Z!v?^{---(qf>Mj1^YTv3~zoeAV^6cE<_m6wXmWwYu1! z4(HtXVFLy}53g4xMYBw$IvSU z?s{-hrTwH{l%|}9WZkj1lNJUW&Kwjw+KuFCyFG(Zg-DaNq8>qqA_;4=N34XD*4E>1 zd8dS-yOWzc1y!oB(&60wWW&66fBLiOwqht0GzL|My5?sG{Ps!zt)(+pBYj-d7-$Y( z-+hcGxpN#drr@A)#}9k(;Ho`ztvYvRx4!9MeeKw0(4iSW=LABV^zz3~kd#@yHLlA6 zjymh{rMUfg5TZrqd*uW00wAvhQPS^|u$g*!Km0lS%*q7r z2==~SNFt5QjX8+&7nv=95%!FqTx-X+d|0dQoo(J(0tGxTM^fqM!@+Mu^G}bD#fxD^ z11$PP=vmXk_w*5j2=yn+CZ!N73nXILc-IHhFi*DBdgj-W6W4k+~3xWJm^L zRHCNvOVrvv70m!_T-_a2P;m@g1x)Q8(NnIg$eRiTTk#|o{5EH@$YQU4AY(<51W=|R z5=s3Y$$z2KkRBAg0Ayc3;QEM%XtAwD=s=ot zJREua=5-;#)mChxicL6A^ogizx6ty}?q9z?yc#^TdVMb%?Of?m+zD9^-GSSty=Es3 zxY|v`>~vrv1U$eWnZ<1~?`%>=*tUUUAj!}>(LcqOqJJDH;)AijdA3EWoWc)X*c(Fg zv7^ZeI4~MtAbpXB8Db#k@a;o{L#C1$T5C9$Gw1$8 zo_bgh|9ZpMAPv1i1Pm_RG(5MY1*&Th{!&7o~sin}d@cR~ucuVsxIA&g*< zxe%k@ejvD*8u&!z=i4caO-_G=^@=!N43EFGaBDPgF_IC;3;6rkrl&Kb)hQsj<*LV0 zP$#fY^g|9va?C{Oa3yFm{xh9pqnql=;rL0&&d)6KisU%wANy-)m@%r&fiGrSZYI)3 za3^2Qw!K{j&M&LX-*HJvHrz{A*ClMerdD~o>8LUXi+{5WdTNNjEJUKFM&pitAj)OT|yGbu-#gtL~tnji{F}6u(T>KCsRBuoE zNcd<{UAX4DilgK+Y*6&~Y0TU#4c389N?JhsJvJygO7g$oI>{dq zG2g0eY9VV<3N9UB)eEa4&X3BL4d?DEjP}mAI}MEmAs!NKVN&|ScYkmOt}u^aQO4t` z;)Vx4)B89Csx;<*=JIm=9Y>GqcYSOzFCeV+gY9wRbiHHE;qG29yW1mbGCRzFth43) zJFQT{0u+3&jx9Zww(@7afRl9tExO1i!b2l=hE^xnBSE$xEJbls^Z$|b?r}-p{U7l6 z;zBMp1S?vQfEF&aw(|58C@9uzo>pMGR=?^lOYfpQ~jo_8yQ- z>c{fU><-P5PYd&VLDsKl7rp^APzGrTQ!^2)N#eS&U_qrrgSddx|DB|3*kf}Q`M2oB%wx3Rfk?K`oW1u%)zKqNpEec6-kf+3g{V>T%}!E+2M-<1z^qNy(Q zjQKp=%JmIC2Rg#ZJ(YT31d}zp8;yxwG{9)}hoztwAP?n$2Mj+0*svJp?%d$R)5^z~ z>1MwMP>&>Ya)A6xI~J5M6RI%SS13LmTmfX4e@qVk z4|;CILk}HZ36QC-_kBu#3vBrK$aKM>r)8w_3|ay;$$Ur*L|Zn?o`3;tx}G#9PTYs4 zP60=%&lrrzhP{T>=AZvOg`9l48Sz=xkzpyZ_!53r362U|dm(5I(m>3gl=vH`9!^^B z_~j58pEw7faO|TOeu3ps3q}cfR`~yeFx1=>94ygh=t#`4+c-`L@4Qjt15I~0%L=DM zlIC^j1qf`okPd!8%yNb$Qim-<^so)WHW&DL-n{Fu9|TA3Du)oliA7);g2b?2!Cp-m zVM9}c9^sM^yl@s(h94zo1j45HJ)Aq@n1bkpYcTNkL|C8S`dZo8zoPIGb{?cCKA3fH zZTo1}>A8Gno9wA%7R{Lz_Ki8nZYR;b?Vc)>MIOy|EGVN=*-81JD-w1@1I7ztuxKcObmqpdW+Dpd%n%+1=n8SW>DmGf<~y z?B--SM*Q#DWi3Uq%o*{K0@ z%qAp&@1?#^DCFGnu_tgNM?s6|{pa8)X?~?fUsU58Nlq9s0AS!lc&T#*RclO=x0glp zkRtTja%dmgTjkI?7yvyLlA#`mdwk%2ET-QR_Dv=3dFU+YRyOSs*BcWF9B^giHQb4J z{awIP|f96GLw2Pt50InbQBuiplKInU=zY51K<9|zcFsaBE=VNr;MaX zX43>X)o-dGQX^3q7K2uHCD=&ik)m;!t{Q&1c9GaTevfH6IU!CVEuge}6lw!ff6h$i zS0qZ@>+I>WC$ZQ4b;WdR3W?t&j=HmQA>pm}h2+LXEm)j!h|qV={Kf3$o0bmum89z5 z>+RN8!%@VaMEq;8K50VzJFC%FrG-z1pu8#D332bH2sUe67O#JQ0hKG4&l6T@Q< zMA3-BR5<#I*9l|=Da4*=obnCSunkTy;1-gXWBKiNI|4Aswn+n3M zzMF1)#-s!@KfGx+5;BR7*gBJhFEupa~_9*4Y(&aij0yv6~BE?VJ6Xo1SWOgV=o za{3#9Lbcb+k_~0(xHq*T&u6*|q0`&;8(2qB&dfJ=^~+X6(S3Wc10rYNKsEeda6KfS z^#T3pP=FJE@84?rhRu;Im>36IK+1XtiUnni3TRsfRu2o4LW6?m?ZQ^L5(gg?ot}31 zY5}osOa0=>l^i7ul*n;UXCz>-$TDF@7^W0v-rF^YVG-A_F#u|iXsoOQprpmZdR_Hl zH>#ScDG2P)MolY?153J@RiXP~?2_$@@$Uw(8s-vJJBkoej$T?>jsl3Sk_BqKNlb4b zsuA8(E@YWiDC`i#)Dy7C?6C=(iGpYwpgFSbj3kI&$|;Ey_CwBD;#j3H2q_;OJ;Kfh zf>dbohw`s!)-2neTy$Gktct}7Lh`<-6quPt{G61i=3n>0B4`3;TVS*y0=Nyl&T?j{ zfDMG7v&4*Im1fv(HgEh?Kh_$g+NP$m4T0l9Nj5;2vK@!R4KydTB=1CU9Q?68LdG?` z09LfX9<>j9I@cHNwlOZZDImP`$zfW0gwV4ZF)`WZYw@>dx1(TtJATdJK&}D#%StL< z9fWKM|1dxEX~xqy6*fF5p_^AM8)u(>k=m1ORT|7t+Vb)Ll_h5G+8~ekBxffBy{}5J zo*HQBA3wd~#9wC4dAQw3lqgeHC7GH~5e}InLSC4(Y}Vt&D%hLUk(&}Z%6mGV=aRsl z1?qWG{ByM+;|s}#;MLxC{AvQGS4uZW{xWqfb67%m{X#AFy@g7e01wZ5B;koayz;Tt zPP@&I3;9nQt)D^04vtqH{UF79cW(Ct4f5k=lcEDp}+{a(FRzV6QQTDGn#!C1)sHblGJm( zxvJd)M!Fv~36GtQHDItDmM2zhb| zu?55?qUnNd;5^^Vl=ie(ZQz;mX=O3*Ea^>{SwMrR^SP{tW4o(OSzaH&#FxrZ6i0G= z8v&mk3NA;e)pl<0U9-3z{xiXIe@J!a*0R8!8?R-iN3JyZaHK-e$S1>82!amwOU6=8 zAUMu3wRgv?Y!7%T);#7;#m#wl3nhXmD>YP&>Bo&+NIAQJb_>D|rkDg~KJYbUn?dT| z>@y6G%Dm7-y{9x)ur%L9Q})h7SnC9LJZVmA(?E%YVr_&U|8Ry&zahH?#5(SBp$Qs4 z$%f+q;_OfG7q=I?p`qBWg^DCok$J8B&gFSU5pwjCUgdWnp4h@zHEW(;4;VcBPN`08*Qz(7zu1U3A zsAbm)_<+{&w(k@b4n!{$TDHI^CMUnF)e7@h}5PZAYr^3TwGyPDwF3#(gOP0Kyg%Fs^* z#~tT_pfU~)&0Vh}obLq9gd={PC^;<7*Z&9`3Wbd1=LqMHU;QO(asKE;`8&qtZ;L#Ay&Y;3Zn~n^v&a?l|K&0Rs#6oQPT3SsR3MwrQ^=>)`9jvaCFe*|APggm_IR zcI08-5>o3U~V<~jdEK+miJ}&@)zc@n?4HBxk`E8*W zX!oap2xPOB?|lJVH=K|?jYt;?;bU6{%jK`|AatCaq>Rdi^>y~u9HklO&4(Be@-kq zmKQ{`dB-9`WT!&d4FT;-FzsLBKqxjG__kRkZImrAM~f5 zO{L2=pa8|_1TGqp61uAffsx|qCUc>RSCFr*Qqh4Xqflg0h-}^TUZz5uQ7e1mZ+)=! z63lrDmiZ4o_lM&A3oj!gXet1nE~7k_(cs@5+J(M9%4`c!Hgoc7)bspx9~er*F>8qy zlFkg3H>%mmy3#BtUdesrLR{hi*N~nagh(=+lRaFXiS#gCxsUcH6=&Jb2#Yq$YngC= zY;wCLs@Om*E&;dozW?Iwu4@`0OCQwaim^Qcj@y!D6ObS1c`H%d`>MLd(M|xFSR=Ei2-@i%$5zo$l}TU%kv;jg z*6-Hy6mRy^C>xB29=|w;`3^rQV=vuRlY%_`LLf^0?=oEb4_XCyj_)Fc?s*CyKMB8q zZ2q~#{uoUB4HxoMmu?8xeQTaZ4S5wWtHi_&@-{UuUEooe^ls*w6IGP{k~TUg1uJDQ z$rvAabaLMcN?@VHJ=7E{6$h@hw=&^xe9FcTxi~)Dag;pE#Zo(+QiP0kX}8KhTS517 z7aEBr>t_iz*Pm1m80a^w@a!&0@A;3}i~8Qz+v(y<#q$Us()_k{882R&2cjfxfd|Yh zCD<*h1~*7Jr#Egt86tH72ESf7bz{+r%5IzmXK_a15s~51_)fAr=-(v7MTM@B(azKc zDB-lRi8M32J1IxgV%a0DLC84MBRA3(U7{!DBti_MJ9uihghvP-DKCuZW5MlZJYQ{! znl24UeI=TVOP~&kwm}>YR;2Qy`GZw)#hKsgClfljwZg`Jh_I<=C(-Kd@8Xbzn1YlU zG%BfHBwme{D%+wRlw*ztGFo9=U$ONsw_J8`ucxE)`Em z>N#a@*KC3)(5F3pQeXcr10A$mwL#abJo7$7eEczEH(&JL2U!iWv>J^& zCbVovE{(R5Xv+;Qpccq-u0xZq=zO3?g5h$%m{K=H^mfQY;8mazgsdf1;`1+E%DFaK^z~za(%p1@VMpe zJHUN-3JL77&X8^ZA4((O0w03qMGTJ*!5oI!Y#6Gt?1p*}qwE zt(nvTvHDi!x+*bW0}k*dS$&w<5e<52k4r%fE@5pDj%nd*4Q2I16nH?~Ns-S};A>O# zyp7AmVYIuD=o)SohK?r!5{}HV5Xh09ZOYlxjFz6Z+CZ*qz1+$H3QCaZ>0=S6HtzV; zkn4s*Qd+%fGX8?9&2!bcW8FOrY(c(yV`)fviF~eLo8k@<%cXu&VgI_H$N#u`(PMs^ ztQP)^ojq@&OO`*{v|y?HPUC6qi6;Yj3$JS~!7tDIyS#a2oJ%QZw84j;rjb7B_-5nY z?f2t5rwJ&Q7<@C|x471mXTw?i(I;0A28gqoL%NdH0ObH*8r3V040^hzeRdHHlqyF7 z!9CRA(_ayTvf2nvaCw~2#)X;W7-!!%wCi2<;&9-pK~eQrn2(K-Y($fCV8dI5O(yYV z2m96FeTZkifI|Chtm>x?;S*+1!unWCv%*fy7cgC!aBRY#V2S3yC#J^;LlJx$J zfufi=`4?biuTOFsH z`kzA!CoOLMPQiz-%Giclxs9Uc=-%5bPh$K_3deF&FdOkEoLP;M{2#(q>_cewh?_$= zFK^@Cx!T4#bQxx&_e4-bZ~_3SnV=N>ubvc{W++2L1S+7% zglm#XYLYp$@{uONItCdr#NNDyz9#!eEo9i}%y-@%P8IgShU$tUhI_QexY-roLth}f z9pC8vGI(PAWW*4IYMlTf>xBcNrHQR~yzCAEvb!RXb>x@ya8e_AdI%m?QzN3xH@3!* z#=4SkfEM>99oL%l0u}*MG)RAc2VI@hS+J3l>qFwViZac++I~=WI!C`YI(weYTq@nKSX*sbqbbz1lTq^GV&&^!gbR;+ z@zUi1Cdys5Yh1RG zf$m(5eG(9DGnBP5viB!o-2hxBI5M|veQ=cjNsM-?9jD)B6i1mLgU5(u)7o#!Ad5dt zYw&G!H}1Aos1PMQUR%pi#mo2^U_OL5s+)U);=6tpu?YkAAJ^m?-UmjT*;Z|hn$z~w z6n0aWYEM|ADuT6Sb!}&8)vM|-!H%6Q4&+>ohZ(|u{0tV_ClOLytN3TZDak9 z0D%;-$qG3~)ytpZ1=-IP;NY|@+86AC>K+iRH30m0y_EXWD0!Pob5l^8DmSkzSqEkb zDZ#W19HxcH;mKR~b%6af8N-Wp#UN8!+f_le;>~Wis?E?PD31hK4wXz3P~vb#i17p2i^ddEwqgUVfqoka%YB!Xy5LZwkj>sbZiKbE_Zj4Ax+f$+^+y5@O{LUVFMK24tpt{o+zO|X zvN(GBAI{d+XtfFp*ygCT`$5Nm0-CF9axhN3@52<0Jp7`s_zFzKwLyDe2JT(c21`SL zaPaZrV1V*{>`62yW^b=6sNvW%%fDpuin9hK)Ctfov8Rt6l7E{&AYCT<<_m}}1>++` zU7*c}gG=70qK~r$;DTJ}7pT4P1Mr9MQ)Nc5LfmlmcB`Q z9{|yFkVET;d>0A}jkA!Ql)=AubO>Tg(Z0!X#q{YPpvrhw{0>fFPZ;-2a|JNgW?ItT z8nw5e&W)tbgJ3qm*g@xg-t!28JP$1VW=YZaUWNVlG<)z7!1}qBCBu?oI5cQ#ok=w> z7WXIG%aQbux3VwQ4+lV%ChQ->4F|tBJ!um_Jp$GiklH(o5S6@vcx3Xd%c2fNQNIM^ zWKBiJD4`&|#Yj=p=p>D@uoIL(_XXaLxABK^|JoJ_LLBYyGfT6Z#ZrMt5B#?N+snPV z{v~ng_4Bg?t&J45x5o3z`GOo$@4Wuq{~(u5y(@nCga4f-@SZwo@3(lwv%hU^NA~4M z*w!w&_AmdqE4f#C%v$!@jSTF+;{P>wU3HX4yV!@6hxc2HqnUkC1EAw|-dybgLXEOR zDJ{aSaPqL-ew>lmHEuk}PR!(k?|F^7$`=<-)S{+KwCLeZxYkbbA$NpP-z)a>m(Y3| zo7fYzShvE$v_K^MZhq~(GMd~^KHXfXDD3szzV+SjFoB|#8XI(0L_~lc8XntGzjbNi zQ(`)~VgpKBajK-05p?Mh{=7r7I!=ONByUpr~Ul6G4~xC6*^(qO>TqZ)*2U zYKiIa*g0~AmM&}-DF-TU!JKYQ%H%|f;`6rx6732>LW8pB*=fi5I6wwYqM?OHB5^x3 zFkvDG$Ua*@54M&kA`#uXm2A91CM`n{@7riT+^51YGg|a?9~tpwCWtQ5zAKM%y@tah`na-0NIg3d!$h0YUFQ ztR%fnXg-DgoayJ0#kSo$xX7Yr+7lrv20}t@d@o%)B<}7&9|1Uy$K-2HdB)!>uKh#5 ztk==Wo*vsV*0{wM-33h~&bPh>laAg!mEm^%s!9(;eNTxlUz$X&tp%o_)krB8{X(2- zT6pAFji0&32PI_g`=)t0i(F#zZ#6n$U*J=r^>?WHSpoE&HEP=O-7Qx8NK_L^r`~r0 z`EIgl%NSwHtWl;E^lyR9Xj;AeG>>fqsKoh}-)j&)pN;Y|IACj0?S^EPV`e1zx#ZZeTmKD38N_Cl$#B!io2i<+K7-NEi&zg zBayeKl6l(PKFlv5g61ES&F^UkQ3+lH3cot=0f9tfDNFbzJ1#v-3~c;Ho%tcKyIa+Z zbrKB91@ewNf_z#A4Iz*Obfdp~P4Qni0C}#Ia9%MH9^FmWc&?E+N4T&Cn+0%-IqG1z z-P0trz}xw~HVOyL)|~kEL?QVrT`@&r4!wYuWd2+ECZo!v=$;l4HRLbuzE?OU6zA*) z(WALxK2U17Wz*e)Tp6gAdTE&F27gnuH5PzyVI>PMEQUB5`>bHsRS+eM*hujy1kkz)Hik?q=+*H#dM^w4>~O;G1^R%twHWCe{WiN=o4F%E?V#ugurGA_~TS znHt4vX=NZTflA8b-}l1$w!C&%Z45;&Ac$fRl1e&URrDk884;D4h z`Le1F1m19A>L4hL0Jg5{ZykF?0#c%6HmF<_=9t*PAs9kk@_WXr?I1$LLcs8l-ERO2 zaydJ>JT7!Us9N6Ip{09*a~1BrjHJcR%ukAH8 zQ|lkC0gHtIxjOeehR+_cfmj_Fd-~{LHHgW3*BezaN!inu4 zU;rL~L7hhb2c6Zvp{VB~g9rY&Tnfzy64!_<{<$-dv1C%yuP7UvJ<{pnK+>SP4`Ah- zZ-1W)aWZVJ^ZV4Ckv537kP#3ocuk0u?XK|!Y7#tVG(j&E zJPX7Jb7k96=B9Vx^-2|bu1aR0pCEmQ_vi$SH$a;>HQp?=2-;NfjW?~oOixh91NYCb z%RO{y`_<6uY*8xvXnIfrnw{Z8a*x_!pQTFb&?Gql(*pS47+cZhU_mX6JOp%+z4lAe z6REVYYXSk}DhSuyS#tNfy z+I*^#6P#cGayP2g{F{e_lMavhV53=fx++aVQhQwB26opCM6DlcVU5G&5y=yn1XZq) zVZLo}@sn-rL_RziGW3z-A9PMato5eWG5mQ|NoO%B0dT&o?hr?`SO`Pq- z!)P%b4jb!K?^yrg%!e(#j~<@F{NE;)#N;YFA4A^FKuc?%516q$Aqq{Okf7}aC#b7{wor4ucZn^%HUdTwjR- zEx+O&>TpvOOe|}!aBrLtEFVcM9DvxHbS=i~E?-m7yL38#td?mCQ63J#-$T5_vI#Hy zcw?oJ^KMMy9By}-oZTN2A&b{Tq_?0|;5f6q6sI~^ugI}ju|McFQ;o&T@+EG9ULgn$ z?h9|tegnw4mlDRX$Hz`JeJDfMb`Kf^)%~qDKq5lp4T|3Qd~Zb$DA_bi2;ybG^s}AD zonFa@ksMb?_gfg&`ktE(oy0j%?c5?#-qSDBj1+|EBxrL((T0`aD{=0nPF-UwR`KCg z*qUbJ{T5+PMSECNkj!L**tX-RSRdl`iPuv|zuq8I2yOP@+W&m=JKheD-_xNm>}{N; zt`7+ypVRqWCxLm86VHf9l5H77%pJh-do1{_WKC#7l~O;?`PM9@9z?PKHFfHXHtbad z&YBZ#@ovYNq}u^9c1c9zpFh&Qwa(CLtV>f=l@$DckDKJeG1 zr_7W`*(+W_Ra0>=bO{Jgnm^V$v*kWm?Nb5U@t`$)Lf z1S~Tk<@NFSk2j|(3CsgFr^6$?_@iR29%c(nGA?ZCOL0{cNLAK(=wJ1gf$|l8e&GVv z0Dw%u?WayeiHrnkw-cKUfjIk75aUq9>6SAx=Bk?dZ0% z_ssEy8nz~bf#&clHtgC2@o1>QBI1tMzg4s;_22<@;^*Jfq|<1DV9eBmeK|<4;nsuh zwjkVgukc7Ca6re<9%G^!jNnM0=>_?dkF|xTVC`<7#-yx3Q6PF~fhI@phB&8d4G=NP zX1s)?ku~pMyo7JZ=c!VQO;oQr$j)5`An@}>Tail6^1cKJ2X!Y~F>vo#c)u2|)BTA8 z;j_Y}VCi&^!1fJ03ltMpuw(@xU$6!`j4 z1>OiYArkO=#M`6Lq@1U=`QtX$v&QHwe8!<`Vado7y$N)4sI zB?c=1o2l$#1SqG~!y9X5C;leXu8o+h8HXS6r`8_rHN_pzWe@}RE|%m&-aVCiM|VH* zTns{CM}w@P4SxIHe*x)cRa>tnBnEw;{g21i+Zfm)zf|1qdc_g3NSTBd| zW@n18AMqkGLGlXe`T>k^fwxldBuoZ0d`&FEa>m< zj|l7^G>akHfs*bQ7PIAvS3z!0JAMV~j2_6dST9nrTg8FhW3^kO2^MUyw-QsBJaw*b z#t;OT!0aY5OTsz6e%WnGZM~5|QQrCjvFZckMrcIuAUoFwLOf>CBDZXi6Jb8)8}JhF zYqBhxk5!glHUnX&4#u>37oN26ie;}saFWdI17Lv-;x)wlB$~~Brk2>4u%DnwTeZ*T z!VEfj4$y~+uF1)cWMPk9`xm7340pb)VY6wri}2RU?sVz%*4Iol*(zi)=xgxVfXp>r zCX?-jo2f)&&1|f0dU%1Wx5LfU-_+RDS~bIzQILWSG-7JJK3>)qj-T5Ch&z_mCk*-! zEfyG^#%<5=g_^=)LxCD=6*Lm%l%dDpqKF8LSFt1h|GLkmH9|bVUze>4eeU+<>Cw;B zi-ivq#l((RKMv)+&i`riQ=r3m8G0+EYKF%)e-SfjId$yjau`<)Qlbu`b9%@jp^Lb@F^uGZI#| znP%M5M%JDittqZbI1BDH((DUtpULsR%TKO+nZ-#DhIyBIiy6D`V0nWifkE@ZVgi=A z^fstEuKGKFVJ%wIVkB;Tdv@j7=r%?33iLLrN2h;f#J*7BC2hWue|RRiv`jOsf8;*O zyB0SwI0>0b$Ayk>A-c~ud=Ui=z>DqWz@idV&$O5MHNi7smAHgFQoSoSq)EsVldzV# z&c%n*1M|a~+oEFuw(VXV?Om$+N~c|WdHjVBmYHVbL~;(Mv?6;FXaGthy?snV)=3F5KT9<2{M?@& z{yaPM2Q^VclM9UKVA!&d*TXB>Ll3ScPoIS?jT7PnOot(_@C+h`tXe@+m4*CL2H7f} zFJ5L>w^FN~Q`oPjCqX@PW8(()EF1u3*4K!O`}pJ ze5I!Dz198rFv(8y15LHm{FD>Yalp%302hs1nXgS1LZXgewz+me0gcZ2|D4GQdH?d2 zr@L6XloHo3(v{;aH3lhMc|KdV+|+>8*Od~va$>v+UlcMEP@$#-LhNr+L}h)+u>hmc z>*p{`#Tg>;k{;NKPW+xg9l67|VxaKli%Rcv0;B$dY7o-Se)T>qO8U1SXyOHS&HGdp z{7SS;Ma4jLX)y5Mc+YtCjM8uc3C4I<)slJrAcwG7_d&h_Jl8S z9%n!kOI!MCU)FOKAZ*wRXc7+a2e@c$<5p94pKQYl=NG`{qey{mVZPc0PTY) z;u1Yze&js^&vQ?~h%n`Y8gVbrwlbP}{IzgeC?U=biIdYIv1NQXd=i!ph=ib@y0e59 z48BRXD(awU&#PL_Ujo)35JLr#Y1so&av{iKvwI`LpyRRg5~w{)f>$RcU*RGkH0Guf z&*B2_kF3TsS71iMbJJMI_w9R{*g#wtDL&^?|9bECh<;%F;ibdD^hau3Fd1 zv~VFi6B?Or>Z$Ar7O%nQp?0n}jE1cGzQ6kK>DLu+tOq((&VDHmA^AS9wzCBppasI`EW;<=`qS;Gm0qlQODTTl_@n4im5qmfMR@FOkqeZ5glTMkd>(<2LH%aQzfRMwk`({25 zp9WgI@q)nCDqhjC93U!#6jysLPsaY`8r}wv65H_jjDYILY{U^D@eD340`mg!akggE z89%)NQ6iN62B#&8ZjW%*10jfQO9fDD{J8-eFu)~(28z;B72VjcPzg3%QUW0jA0-52 zgX4#Q6kIj2@W{Fn3ox*Aq1|u6(erCG(*}wA-ujn{_Pd&5T0}NrfX1S$=CeeM#D5Gs z2;_tOX-v_8$6XTsI+ZPWULjXdq+4_8XphJ}8>Ofbb_Oo7|j-rAxq z9c8C`&ks@=N_e*uJQs8J7q;U%3G3TN^6pk6@%y?VzeUI2e$R0WWX7^}>MSt+TT3jV zW?j^bIbLk-Ti6a2>ZoCJ&`%eB9NG#+03?cYxM)dw)PPK$jy6lz&jOr){gyszU?#!+ znygU63Hi)Pu9!HvQIDL30@#VD!cV^7$LqPN85g6Pm|luDXgi{69AAIb7tQ#ueuzBt zIXrwbTenFZhu3JdO6_A9&8|xS@0aL4|2us4n0W$J)L&@0ej&E8DnU;vMFsQG1Cj&6 zIiiB*dR*TDnLDn^xWt0ZNV?InrC#Q+2G2Jqq!E;3hwyae)=KzkXiwu?nuzPUcSu62zEP;oxe&p zJOYhRW+{OiTTWg-&aH*;dkZ;}qmY<{{N7haG^_VJ+dZ`mFEu*Q-|9J$Xw2~DnJ(;R zwxt@;Pu$1TC{Q_GVv4NYfFuD~P~Q`$-{k;FOA3qW#kId?48_=K3dq*M{y|?>dh3Cs zsxImlgG6$?sVyJ{^qai=$D$@@~uK6w3ooFuz`kJA|u3tm-nTZ%Rz*esI=g=#x9t!G&6g_Fs9zsEW&P34IR;p77I( zy=ycO63`2Z2{PA>tTa2|$E-M_jojwMkqe2p5afJToL_l%-L7c?5h;4HK^e0SB^0eH zEJrJ5PMo39hz=`L>JM;5-(3Ckl{(Hg+&b$f-ghkv zggH7-A2hqLl^^t@3oaz7*_5GcA9la^4&353YNrouU)z}(dayvoI=4IzP7G4CkHUfS zuu{+{WLfX(x}SPaFhSjUJL}V{=idHZ9u3JOCP*C)o@fy_d*+-AZ-H6)Q@x^dy!I>+ zdzlS9Ye?p8?14o5*y^FH#}(IP@`?rQy>BZ|kzgt9vYK44_LnQC%mo#7jW| z&V)_lsbE_qgx8nJD|}&Mvk8TsF&Q5EidFC$u@9j5E6D<2knuDL7R9}At(1oGq2|ad zgUQvyvoE?h4DXn|q#Zx3NLybJ4%Y+jF%tSd_4k*N1RFxa)D2DSqL@vbhLZo!|*8U7TFd(?3y@!3G@-Q!qXV z?K9-0(tQxxS9Qdjo6Tfavu0eS`U4dpDbyZg&qI!jk)Xd>DErgMo~TAeYsyi&hOHJ3 zNKyW%4HA7M_>Xt5rpe(q)6ttOK7Dm8zg~Iwyu_u+t-t5Y^5MiD*uyiE$-5_Nz9YXj z<+PJmWw$c9AAV!hrU}UtuZ*sv%i^`d7uzpX2t(9DTSmGws4@S%yrxrA9=E^v_ajR} zhv73LT0f2uwD#ZeubZnRU1q_GWDHylG*rfv3Ew%A6nHN?Uhws`vjxjZ}uz`u@ zC4N}QmHZO;(%#hl9Bnzbqr?77{quDmvP%uf?zC;=&%!4_A5NWgU-rR z-IpQG=bnwRq!y*txTk3_YR*Ni8Z1tDp9=60D(%*JBYA6ULN zh7vhj1opFT)Sr6}60s@h!O!UsrsulZIzdP@MD^Sbxz=Sw3MQ8c-t z8_wS?t0u=6YUT2u^9lttvBn-(XHIs0 zV-t#AFHpN{&c*kyMkQSkgv<3G?q+wmJ<`)(r66(S-ZhZPmkGi-Vn!E=6vYM?!pJk$ zS(D8RQfhxzOvWNgraaxZfd-Oq9M{q!H+ANRWX{Ua0`-NJ`j^tBzH}Z8h%{x1WuHna z1QL({OVVlcpKfp&{t`^Dfv~X%NU1Q;Q2(8vsaucXgeq3)xx{aP!;Hy)P|pk0UUPK* z<|7(0WP#L0+3Im!pIM{M1_BI>^ixekitaia-;U8(2VW2%ff1t7qr8gP(x|*Zd&p zg+lJaxC0BuF|zIQ-#WuJD)dL^20tsKMGC;lT8uSW??M|VNZ4#3(6mV3npUIvxPE5- zWastR2q6m`w#YNB3RLDW6JK*0u+;c2vyK(Qg%UWbD}l1i9w=W1GEoCHNZ#i-K+@o; zmTn=#w{@Vq&WPA@X)eZih1qC=e>Zw%g6=61FQB-gw3ax(KnsXi=6bUy*yGi4(S49i zhPcZU&!wfX-Gh~|zcOW4O{&9dH_2JyM#|Y2);$ybpI`c#Y6HECO&w)ssAs`Fp_I&C zJU^BRh=qCkvP*lAR;iQB1V2SqpHoyoR?8xa6olPWl2l;w@9M}u@(D>pwE|*WPqhgp z5*DvoFzb(&Efu_z40JerEiN?Cgm*&J-_{p1UO{{+?`cn$o$a`^eXYoscLM?Tcxn+v z9%Aoo#In5{;xDILCqwoSLme*#)-e!qZTj~{U6U!ST0no|HmVMDMd=PbwM4eEX-5Ls zoJ2HA@Sa-b1uHC2I(=~#ey=P^n{7LH2C=~IVdVHq8hNlz(F(QW$B80cRqx7f$ezK-%(gt8U|#CO&j6jAgbrb2ATp} zr?=6E4BGFsutjSgivZ=IMK>|Sj!mUXtnN9}E@y9x7!5WvpL5iZe2ZRE+NII_8)?BE z>N8E8NiNhTuv#WwiN&YyH#r>!MUa{;Oh^xZ0f6?}9)pm~PKIm9$F$6L?}YXtd_{h} zRwy|iwC)C6y^h6!Avv)SUh{KtjM2}0%&4UM#&)#w5{v&E=NAlT0*_Og-O^&DE>RUv z`;DtP0pYCIfO&cO+VN;T4NA7v3>(Mnd0>8v+Y#3k47ihUPMGTosSxaiETCkSH#^*A zkS@<@;_GMjRL>uZz(5|-hCILZArwp-phqE`Lt_JxJV+?Ql-k0v>P@s`Aj4nXn<(mJ zeZw|f2TaMkq*F^v4J@0Mh0t=i!Q)0-U$ku@=(Y;7xn1`*7mD90CDdYEpOCC}`f`t5 z1F|WyuZM2I6YZm@Q=Y04HKRbb`2x0RzN<^$!(Tf}KWmH=pm7UH&GC8P|Mm+%2j0Nl zuwU5N*bfq+13KQ>-m&g8p*|6IFKcG&z?)d7U#VMV5-IJopyq2Q`HRPrT+zX&wsoW> z6eRrcMeE!E0Uz9_km<1Cqqi6BJJ)D0+@n^{QGC;-1dG9(ZwRG&`EFjhvf%vhXYRiq zh97m?m%=r=W`F5pzvNH5tC4rBCcrvmmdaDS66UlrLGnBA)>A>tj4te+-g#5vStUH5 z5j{u5f%@d)RF>bZuOjP#Hu&WGy~UCyH@$;Avc}I8-a6gN@nh{D4pzJ-XH8uhne%5b zq`$3lJ0xEudEOhpkKp%_;dx&>QG4Z;Kn%yKk(8F>854W* z`j-OBzU^q+7?Z8_zfvMuY)Fs9;k|UlG}u_KZ%ncf!lVF|{(oM&{Hy19u1{KY0%I}} zVl3v|G$;o;N?ucG?=cIA=^o9{tLFJtZ%jb6pUP2!)vZcSCxq2rG$^J3nTZ=KE}XX= zG*?w=6j!wp7;<0o?*7M_4BCDQ%I_Kh&4@Ms>XBzBOgE*`&$G<{Dx#1>52sFF^PPWa zb8ZT@((SJqMYY=_thsmf4$#$SJxI@DAKtWt7VbR>2-A}vSh3=MhUfQb6hbM%;*UH) zVY-j|WP4!;f9}ZGZ zq~2Y52|_}$(u}~SZB}R!VMQY9-}{E`=k8mC;<5;qk(vma8QDSzDnMTGnhy-<9G)#4 z=r)cUUwsk=ihXI8gHh{reNB@MXc_{t@l;D9VeN3t{}2z)5A)Rhoxs!uql}Jlubjy^ zZXr%|fmR}||LWnnr|q=bTHwy?Xy1lSle^nKgQU5o0&drjlCY9^h=+& zo=ZWYO1M^MGlv$5W=+RKVs91&7X-G)GQV>=s4xRq_{99DM9>DTUN6?f8ooN69ya5D z&i#cwhb1{w`le6O;w1rscaOiNh3}v}^p~Kw%sHaN+fO@0uNMN6SJ!{_;`Yyqe{`f0 zB_)rb$S-ZK$Z)LxO=s`Z#PDMCF!bZ*(861I-g>3%oAU`j=e@?o{u27=TQk`P&3dhV zRkVgS&>+WRAxFW8oGQdGtt@I&a|*9PicjG^2($X1JbDm}n6&lkx8l)Y&+W0%O^c-I z0aE%?Fd2T4Uvb8#ZKP-~=n5-qXxXN~NBYk>_MLAJv5dqxXZh@Y8Tr(X4}ero6}FP? zuLfm1W`$d7;Ihlx6|sYMq2Yi33H|W+0CFolaf`^mS61qw4Fs2MNxn|kA0aylcbW&y3iwT1@ynuq)Pl3g?3)vNsdcHH}w2Y$TrXZk#d_voe!im+vue zPnmz&4Eut@^Yz^pGB?{ur61E2f}|LtxA>PfEURNSO=|aTK*dyrRvi4S3K~Sg)m$(N zEkmk|M*%7jLY=peZTk502L$u8U-)2=(5yaEo@KSwZU=*-u7^b_+YQw3HJgZ(n;K=s zN(?Rl<0hYO3ze+XgEpfSm*3)=>)97OXAP2sC)2kq; z`}VJRD+|yvV{5AbFtGTIyV4^9!Y0R2`(J_=A@dlgH$l(qLR$;xedLBz`xxXw2!=w< zzhSw`R*3E(C5$zK41)gUnpOw54D%=~Gzo4054B<`qMj2Poo7@gr=Ne#`SHt(3mzuv z>Yo!E&|%rq+p&j^*#9=1|D^ao%iT3F54bG(^oZ+~RhpdM2F7Oqm{V&PqdueEb%dlbJi6CD*^n?O1xPRZT{(fWzAvUOVtXA;IkjaGmzI+25M^2M6;ZYMH?5TScY5 z4eQyYZ~;ecblMSErS@48KnaXeJU%4?+`4G-)sVqQb6NJap4VS_fU?`yIBqAV_{=I(eZ9Bf6x=fl=zRvW_JNXi?=pXbiggBL7 z8**&p)grcGKZs`zrL?&hna}X~HIdK?)+VJyiTnMFu|GZSeQIbJY36rKi zOP?ZU9l_^mA#i*1yWA@s3d0-XB#a&tuLeHX^@0gJ&>$EbH>y=!m}Q7NuYVl|n}r4P z91yqgqF7k@@vnyKo!L9rKSu7%v~Ymmju3{OnQuaaFb*$5 z#3#MlV<7zfQCCMEf9`~Qm@^97vVv%4VXH$EOQMC_lXt1&+h6)%@A4$+ii*u;9e#45 zII0P}UeZ6oM@3V->0sba(L4ZuIT`lk-4N zlv}>6DaGp8jR&@EECEsIoJ+IPl^R-ESJQaw1h8?g9(8HT@^q9**#Zzho(&FeTfcPu zjf~~A&)IAUrb~8Kj{4aQt3B5KjkqKSEcj&SpM|@w_^VZM=oT{+0OBM6FGpwN&-DKP z|JQ5`F+`^=+YmL>DOX3E%h8EZx$feW)1f&^nx^6D zOxlI&ZCxAaZB!HY6KFHR%e&cnR#sP&(U8r!>9JS9K^UOlZOCq4^~A96d73=Ow^DNr?oKZ|HeSz$BcO}t5zRM8=y})Dv7q3pRXHh5 zq*j|pho21h>O_gql18(?9=>T$aO(9k#4BOFmyCtksQWxn6XFDSPuE>M5CbAa2zVUF zzS4cBUJJFHR@(?nGh}58f9=bBM+dBXSU|^nCfNkSrAAG>ao@2}7M=v{RSO$P3|bGq z#`+IT78a!C)KvMPSy#o`@hJop_BUh)R(e2Lz z*T}{@ZAOGoox~Hnctx8~{61yrr=GS6HO&L#Xpc*%p!;NebHs4nz`P@b-JRuuECiQ} zFBgR-qVM)M7V;MsAKeKWVZB3K*#0%$*m!=&wU3varHNSy^4*c+xd6XOZ@z1tXK z?l0js!+%ye+cO!XiJ0ZpqL+^4?ABFia9ykkB0|0M0wjI*dUbZOllQ}f^u7mw?RJ&R zpQ+A_Lw6efqT+tT(M8RoW0S9|e)yX`V4=^QZM71+RwAJTEhv;L>V$8)V742?CX}qauzoEhC^J*{NIM9=AKzTJ-sE?sjKG_<~RCu zW0rR;w+rGeK+*Ca>?!q_!v(QIp7gRZ%CG|!DWSEumG*wSWP2)R-s1jub6Vjwi+i_i zAis5H)z4OO<12Onb;Z!|Y0zdrY1xed{V@052yDjQuY%{Pyu!c}7n*&@MLaZL|9Xqq zz%|MzxBb6gtk?{;-I^%drVfYF$eHI15ww!Tri1 zw$!~VOn)ig&#+Z(eQ`fH)aM*k_`7o$9(o%V$@^e`NyNm??L2dKf=4dIKTAFdLhTO8 z3hPFbvEkd!W|h+5*;pl|GwYpK&gBDS>W0$eXFwA@u*@K*rALJQS_R9dvKy2HBH!C2o$G<_&ic`6^tAT$?63-ljd=B{L|^BjE44nXSpOs^*;S5vRktw;eOS znSO_mwC9NZ>p3mjIvz@ln(;2!r_XXhPyLd?b542~_xpwRcViW6D9Y3g(6eck7l4YN z=NA6j9C$T=cFqm7q{D(Zp6>CLB_leVmeCf0xkST~J`M#6$4ZZojZvU%@o4%Rvz70I za<*{Opj^or5ioX4qRt^AXO!tMsjh-XEn~R&S*eIQ0v}%kONrwxV5c}N z8)X1Oz)-b`T0+|4c(d zz2L3cQG~EjFtLRwgM1z=Lrbm68puRAEMrVj#zK1Y4B>J7)NK7dXHD_&XgVkceg`xU z+%(H^+q+@v`B4-=RTf&G55EJL!BMlj`w~@8hLp2-jyok!{6q1LBs%ztf}7M^vLBS& z67b#9%jmeb?uJ!*W4Fysz7^N6PF}5c&{XaZMTRrMub7$%31I-JX^{`Nl}-8G*n|Sz z68n_rub?>sL7|t_9dxoa+lPXK-WUQ1%Zye$1TWooGZ<)Wmf^R^YqfsQW-Bp{Uc)}* z!+(u{J%N2%il{mOgj?D6XTpp@a96-d*ou>Lz3lHCwivGkurQgBv@uT91f7-oSRZ!X zmc;nS0ww7lVx0<6%;}Nsh>zQx`z}kSZ|9PBSSd0K4V<%`4riP}ErX7?zLs=NJ_Ogy z9Mplv3Ih*f;Y#gh_1U7t^-)>6+9niPeXYZfTiL}KDwleR;4;nZatUr42@9Cu2Ge|` zWhj5ocFEYc#CAMY0=K(sSq^-o_myWCjx$WEZIl%#{r6*&Uz376xdvuy*l@pYAt0?Z z+=HF*OrNDCj=*T=G<=C`JJAk^?Zx60*yXmQ_J2t za{4h1gzO4Lj31Y!Kd5jcirrY62;3=FtS9scBJI;-ZTg3N5BY~8;i;2PBZ~eez7LB( zI;{dtfXsqzj~7*35Zzp2U`RFr$FcK7UfOWQ77HH1z#u47hM%XUQxEifkLOF`z`w2U zn@Qex|C!{pYHJgG`3R^~jKBAmXB;U}!WP{;epw$C z=fNv@i{8{5p7Fe}9zjQRLfM~FB@RbWyAiWdMXEuY-A|Q=lFov0< zj%1<^^N@pwL5c#6OvYwRmR}iqL?4l4%(K40APzmYKZ|~Dqj1BF$p4Xkf+IAyokUYBY#DKnano1)NP*Q~8{qO0zqaBF1XRoPxMvj-sGlW1-2A%Mm zqn_J0*aM?U{5V*uLyQd!ZO85femzHU3CBg?ihtc#F@F{Q^-oqw*RF5arl!Bi0lg$j z@JcHY46_n`AztTw1n1F`xQ{+jOSh-ZjKlY6Jv#m^$R+85>jdLPn(~4Sf3np+apse* zBElrg-Z|I@@(piBP59WNa%I>)xaSlzDQxw#DVQ?dG?z-db<``CDJ}-YJ1d5m7AH*p zj$l&`4Mpu66XfyC9WkX#cCKTPfnxY7APR$WcA>M!FI*jU7lOOhbiyST3vbJSyt@Yx zK@#m*ux%-rC*kftj~V5HX(u>+No$trWKdb6H-Coz)JQjGRg^d9)TnPe9OVqAmKv%r zXrfoLF|1ZPyijo@dStVpLpK43^cNAccGliIdR3=+mK9NjKe)-r0IY#jhl_x`d?Arf5+4C5M(l;op_TMF( zfI%&2*=zG-(;#p9Eosm3KFqsS$i*bjE4RHi?1`683ZE=25Q8xXE4KC@FYA4`Hx zxUj%iIS~xH&Pph2aDbgP5mWEN0^uqvtgal$%_N%|WV5>=Kt;)p(!HXi)W}ZA@1>?) zF1jY!Zq=`+;Cl^ze$Ah!NIrfGqxLH^pVz!sdH5l^Z}H?vgnM9KjGz@OkxVdARIolv zQ)1?U!hC~*!M8m40@6X#iuaM8tGPj51n+L_SkuI3Ol1BW&ykMBd~#=NWR_;|Wtk{s z=U+v(y#27K-YZYmH|f>lul*chrEzI@Z+;BZ^OR~tm=Mv)EAbtJAXf9iR9~}ANyM#& zemD8^3f#YLIW}2s`@yvbIu+K(X(nwa3ZvZoQf8lBCiQ-Q)ocx4Dnkr-Ws%f)hpC~p zxFb4&6qW?SX2iE9eLQgy?!(>$NA%m@xsU5Sw%ficV8BITnWyDtHa{mdofXiRC)+N) zj9M-XG>hFoMa{GQV2GQ5nAaYGsP5d0;?>ieZMRd8*JCnG_DHhX9uM5Y2R?crjA-S> zK?@*7ZTSTGy9X@dM|JZ3!~H^jm?^TxB%Avkce=4G69-=}=hB-Wb!VMO6ge1xgHZc_ z?~-KZ0SXKEX}&&$bp0JwJ=*R~M_mi1`7-fLE6ri?}ls1#%2&HlbVo zn#qTngzYtpUDDt$?8VEDFjNv}F&_j&1;zE*Q6ci>0XZs;(K=ld5$t~y9MViqAwmqcua?BWkBiU zr$X$w&Q=5V7DwW*a`NY>iI!sJ%E#b;^Zsjt$mw{f(6kNSMau$wD0Pp@4xcfE10&*! zp9F)cNVMO0eH0NZWc$M7cH=`U+hTEl`qN{g`FTz2H3{9Y+1_!(W4|=LA7OEbyfg{y zcm+(mg6;rY-OP8Z!T{o+u1CN$>l>HsyU!2sbu!lwWNb)BFqbN}+lpMmt;U&5JZ_xe z)s2ha1Gsg@NuNtCmQ8>zP)9QjpU{6SSwX#4pM2!EW7H7vxRcMe!TUq^=vaU8_&Gyx znR{N!aL`H90vOTVp5Df2PbN86N=VdDW6bdmF!}vj=KwB73HgDRI1Km1%c$CbMjv>K zj|^kCV-4s!cucO-46t0PfS;3VV~ z!@U@fm5{%5yHw|;zvWFZFan!g*I43YUB2qYrBaYwobWe7+2~H^bcf7v5wlHteB$=c-4|>NhQTFBdPVooKJOt< zqir=*!>)DafWbQ2r=_>?G8H1hsxc+;LCMX=>eCR}89!B>-x+pHpU@c-@ z98H#HoA6>Ts{aiP8G_ywqfA?}?p)H%DBnZxAMwpQ$`>8N?V{H_!?FRIh8|FRu}{UF z9K)}mqRgfAkEpj0(pffmiCH=z^vly!YX6&QYm??Tf=02nE0gGPyF8nlsF(1!?|g=q zbh_BYq$FK5;HbB&ubqQAc}dhplf34eK07`XprsGAdv#-4Fb}gMOH0z>RAx^TUOfC< zJ-jjj8MeQIFkXZ{wzuq2mi`7pQEkFah9e1 zl5Xtky-hTcoV-B9q-;C{?8B5pW>L} z4lT^9=IG6}G?_lhuogH^-SomzH)|!DMCd&a$1$Ok1?Xh>*zkAAA`sVooU(~U(P zUDh^jf^cgkjnSUvUxSu~y(G}-w)?%}yes#-@}`5)#>{jgZ-Z|KEF9@3c`i@L;2dSo z?S$kJ?*R(i(0CG{nr7=-vFAqpor6>}tI3|O@YQlC`q#yK`&p-71xZ?p6>wHnN?>$! zt%Yss!7ZmzuR^qH#tc7P{6KJ|(`R)fg9iKfJcTV)OBq_nzgfbaj{_ZcsXP{WYDt>! z9u}|TQrG*P^7rY+v0on+dFgI!irUDJ;b%f(nz}NfeE4lrwTtc7YORoJif~8731pkS zY`7nWo+x_uHs~Fp4RmZxptdBl<9#6i$@=Kr4i)%$g!6v?J{>u{mMjg23xOl%ZIx7S zqBwW$kOVUXg|YbjyK6;YeW|T*p)Z$6;{SyhiD0@!In53<`+ERbFULS-}Dwu;K8J?@(JiI~bXaBzJ+q3ANyNL`XR znxjN5?C$m^4bDtI9oXJh%kXaeHnCaVrYK!}tY%jv)6~|Hd{P~MZ;8GGp8>(o z=RaPRNc0G9NqUstrSoCj#RTAT+l>^9#o_=TEF2J+7@D;ClG2y;KAp3uZ4eCa7OUx`9=J}~nhmTRYK9H36B8=OXjcbMDm3|o%j zh-ejB?*EKYC8g8J(nO3k=PB0^^zYA*qOr7b+;W{CCCJMG7Lj@{w8Xv*{%uT#1pQHI z(D+21L?$W3NnHgL9?vFLKTI)XWT|WciY-qip=yKSejt9?pn6;#-$?(m7!B}zP&na~z@1v%2&gMu$qYaOJ= z9Wg2y^GfwIrAE40y=J?=srGO`(pih;_vt zZw*2V!7yx<;d|+6X+d&n{_IMrE&kqP);zNINrAW8b`p3EPJ>X>O5jI_h3E&o|J1El z!o@H?OVd_22pTK|B_T2cgE#34^0wL{>oWH|S)7{?W+L=8r1u9bj=ZZD$=~zoV8{qi zv~O_GU@P|gP68I43H<XU%I^TmkVn<0NO)137<`lO`v=sC?y1=z~x@aYE z9Yaj;sRY-str?t5D|CXp0sA8qj><)TLwE_{A(~s$fVMIe)XQp@-S4usu+H5rJeEGL zJ!g=}KH?m-fjsus5rUF-%nks@jx?p*Wir|!(m@6@79?_knS5UWPx^xn{9Ah9TdK~> zayTi&*V9Q3mC>PeTiy8vprcQx3zWXRo`Q^h&pAdieMC4gC~fmUzC56(xVKwRBIS)h zHxz|yq@CSKOE7>2(pgnGu!Xx`@~LJZm=A~J=4m98?T#qDB=hrxD9HvVI(84amH~zb zR^x+v3`sDu-)xq!Gq816e8Y6H;$WsL9P=XB5;j>{R$$wdBB#My*!Ix74hmza!^4WL zBJ0zD^?C7M$+?XgAHOEuWJVz3jon>vDBIaQ64HEtTsjnKWUYJAK;W)I3p-a!&Fpi( zrH6bxxZcKoE;nljTK8{UUmCa`sZ&?y{SFXd>yM50qmpdGs^xv;SA~uD6bcx3QvZGp z&orIh(UGOh7YFI&2tE9=N*CWO@63~dSGB?fv$jISZSM>Dwb%DHt3KhWo-|A!g^I~0 zrNw>o_7G#z$0gd=3;92un~xK@hi=5GG~L*h34);qWft9L5Np51lRTKOIo?YgDnyC# zYKIc1n!jYJm3C^{1(|XFpDRrHEuVWryjR41(bb?e7nbxHK zmDVQ!t5CJG#rPAoahPv*PZ)-50lzE#{eUsbqicy(|5ysCAjZU-@XW4>SYLMp zR^i1VI0@L%_z8BTL4mO7ku<@ zOC-#^TLi8;Tu zbQ16m1ZOwHZMNWec|3UFBw1$zh*;yC40UU44B7Z!?h8Fmiacb>1Y)lj`A_>P!^iSa$9l0w5cP9|d%Ot9V?C2>E0ubDs~_kya=h*Tp?oJ20ODtNj0?sa(gvr$mubR#D6KMsa@=m^ zqf*KoETByAqHFcWMv85=yI>$T5@jmXT&E_wZDW|=3W5@yQOrm%n$r1|p_+~6EXL=w zpsDM0R$Hp zL9zA%C_Dontqa$X`ZD*iD80)OvoTJ?&DLJSLk%sw67}tJ;&OJbNn1+^CWCfgIXqcI zz6#QTpfMHWcfG7dNBru}G%ERoiZFNLM; zZmtpx6GQ6KUxrB3YY^<^SeI~lV-Lhan`5~m_X@;2Pqa_H&oVFw9E792o-jYc5HG*& z5w}9MFUtxwKMCkiULe0N7~E}wC$&R3D>_$N#ZMQ$JV3%z=x>k=lb4S(=+CwgIS$S> zYWIZ@W0@Ad)WfS)Bpeui&9CaI|$R4@SyUM}*Gg-HdlxLuD~S>SaP*6!w911A}C2`+OkTRdLKw>lT%aMfKxooo?u6@t&*7`|1C z&-BMSvI)og3&=v4X8rKJWMjhKHsM@+8d#^7G0EFsMLF7PjdL^mDu7d-oQkcx_EVO< z3GBUI?hkjr7xOCi6ZJDyIkI$wMrr-LN2cfc)oue&883y_{ZFV@6K)Gp zPRtj<+`fX*o02~-8+dcQV8_JxyRLyYZkSgLah5%g&mwQ@@V!z4f3H|-5ICmM>EY)j z<(jBd)4Gf;&@5IsCTOGyZreD;!m(}kS0FZvQwkzS5pdQdX?Ao5 zA9TL{XHn=`(Ob8DFn*-v!FS0ED@!v`@}A#)=2;k&sm8Dz;s9|VY3P2seap*)RAz7AiAgy&TpgS4*1jb)W( zad49eadu|jy^|MowrARzQ`2PH%=T0OK~hFirXc zNie?h2;av2(8x%3%6k7lYDWf0<=0arY^bFpP0Zy!=>`6_)d^vK`_$kzzFq7|TAIVd zx4IlANeNt_IT)4WbC)*krUmEh&247hteE{ObtU10kSLiw^X0vKlv~gl?MeESOpS%6f|2lYA`NUp(VmbXw8B z+X*rE$oY`xzKs_CU@Fe+Q`Bh6teb3Ml@1K)!cyHw0#H6Y3iMG>L{-Yb_?U3d$&_;@ zD}GTpc3fSRdLRP3ok>JoLg?qM$4+@_28CK_I@2trm#Fp8(QEbB|FLmGO#%&dH{ zJ%5WJQqGnG5Oj8Mr6oD+R-8mBZ_jX{XUhY%UCNrKt46}&TT*fYh4nH+B{X&tg2F?0 zOww&rDI%r}?}@&&>{8cwlzh_NIJXwLmY%8~&J8`aE-c&!oaEHVjfbm+Y`?NcttnNH z9U}B>2dElPJ!n6#*6V@ek=7K)Fzh_t}C5>{QulTZ}75&vV4pf^I#gepyAhQ>OS9k zOtmCeoU?|U@cr_+PmHb2!FzYhP|!v=D3~s+?>{Zq)q!X4ccO-RoHwyj#<6Hv%?-do zk^ygcc03Q**7an=F6L$!(W9qH`d*ShTGf7zG{9%)vM1s z{IjZ&jzzp(;9@doGBlRcOkXF1h_8H4c-!l3_;sA@)RhzenD$hhu#oQ;M_|be%2c>l ze4@`(_1d~RMweeQ%*gbVHbixLSM8{@_f3DqC(3Eq(aO**f~CFc_=iOhZcV{TAYJ*a zB1Y#NGWgB!`XsU=_FbOn3K*WU`a8%o-+=)5DU(u39>?`I&NG(Je7Dc+8nW4I8+oL) z%c&+jQtXljwY&sYf2~#A9*9vodb-9Id~cb(P+W9PZyLMN9l?=ZP3t!cMM?gNco`5RKDo391$R-#dhKr6z*x9tIMF$1z&v z13pFMJv9>WrNsp5R&dLR2~pD>xG!KmcJFDZV$jKBkXYvjJwIll7(hLDAomiuL5Y@PN{Lh{5a%lH>S_&yAJJroZ!IyUYI#h)&~%oR4C2{m`GkwzoiO*#AIvR{MISP zP*BdAydZ+3-a_uJgi+VDUgp+{8>(1d~C$ z=eYKaf)*gzUV-LuJ|+L4usQrwTI<=*MB_l)CiNUel)mK(%uH|K%8Wg`VrxDO&~@w{4G&0X0NCB)K8A=9=bA;Ngzu; z4oy}kkM-L`KH%Ha72v&!%AEOg?rhdQijZj?e8ssoWKqMtuQ5pO?-c(y|8=#9Y|sJl zEiTY_#uYQ4ClnrqHx~x+MBgu?d9654voOZ(rEXrU)vn@nC76NkYCG|%F_MRI?jyL| z%67ih-dELctP+e@-Bk4j1q{Mp&tb2XNgXc1;#<>iGzA%|+Og^HE4L(Zz4Cl-BRfto z&aC~^-BC_2*~MqK%xE;Pp#RVE-Tm%w*9Oj?P7%32x56xXc_3kxcE`E&E~mfx>}cij z5~|K0g4648E%LRR#X-Dn|E${AmdXvsUO`@Q*vrSBTK9?Dt}ML1!IuZ3fy{O0PcdgR zdDrQT64IuDSYfp-drW4WB9c+vE7%+^JY){5<~)}`!+q`_L@bTmjVUl4A8R+8=Ug`m z`_qw`>uXZF+zXk%oG`wnnVSynZ#cc2E$PKk^Gu75erj+8MDx|#9q)`@9_!k;_&%Ry z$|Q(fUa-ECOA?*8L<%(`B*U$luy#uJTzAkW-`(U4mKtDRgBSseiYUyX$ODW1fB|4* zI&*G&jn?sb4uoI7J+*mlyTn2OpmP(4qq@fRtn!KX=jIxMCvvDj$Y4IMI+E7%-rG zP~zKJ^~zZZY(S3`6@i7@?yJ70ZxQ_}Zots?J6J6k_#=qV`X1AbNQ^-kq7Ha+bB63m zCkZP!u2Q)Sv%!7w{f^}5Q0*f6@5$SDUPGRdSA+ar^>YFD>bCgiJ6}91L}ilGyS8L0 zzQ^P;#dC?eqN9zFk?GsCW^NlU56_DgSD|m zeQd!H>>2J4$xiL79JZfO19ZD1!X*}Ny4M^|AC~Q!0-w3d3`V8U5^M9Hs?4;Iz^7|* zd%t~N5NZDNHoJe>&>t84+tJZU+|=NM)f471Hf3hDU{{^5f066@;BSebk)e(SjLQ67 zrv}Y2gWVx^zx+W4KZuCvZ?hsS$*BERd~f5+M9eQ-zBH3UFw=!J2r(Uf#ZVry?kHCRctD1-xmadP6mUn z#8QUbqUP!IAiT6-H4g0yBP0#Z+m<4q35W#<)R{n4{)eijO-ZzF;e-%&$A9-KmW9*p zctxRXlacmy$ISOBYmjR@H*i}0NNCs3WKdQilQce^G4zn#RO9>$zrRZEH;G*6rln1E z+J(Bf1lJ$s97NgQ1@tY(Tg+PnkVZURwlF#%gszqRFu2Tb2V3vtt1o7b?q?72F zL;m+5@!{tWflGrY5rKN&zd7GQD}qtp`$PKI`Ym3&-{&$hu~ZE;Jw%-LP!Gl)D@!3^ z-_Qo1>QK#`gpzFF($DKH$)pMQIT7c5 zAI&08k1#Ru^4T^`Nlw-rJzuwQq70L%Umo_XI;yO1I>6>Vxf^$(=-4~3IN-`(76;yL zT-vUQ+6p#zeFB+I5&Qwj+{b0Mxo<_2VAwTF0Q9}Ki9i=%!+i`$;Z53h!Za*ETslf8n z+X(4!=1gdy#7sPXR0@z!2sR=OtIbrgjUVgO&U#}8-aWkln`InVpaGeD>k)hd?Knbl zV?F8B20%r#R-{nec<>revyU^$LUEt(i(SZUQWr$}vGjN#$QH9^x?EgEO*-HYAL$_3 z4~3p?#)#nH8G7fS?Frk4!Svih=IyOEcEoqy=}e>7jyErG!^520f~)U^ ziZs=?3K;cg|xQ7>pikoAQF6jw>ig{$wJy3AnQ?|Q!6$9ejF zq?eZSJHziX+B@(|nAZpAjrMbYey7lY{b7hXmsR-BPrjuz>J>c`EjnThl1^;?E?lu< zCOAHDWFT)wy#{`E>lbaNWZ4t7R#~qoZObq-EZ=%m{0jzatG%{-(Lq`o>Gv*bZDA$be&8a|BPME=2Kr=4vztbq8(I09Jw zAh<^T3c(4DMZXE8+H1aS2N*eVaH8*o6~>k=Iz457*2B*b3nLlB*S+YG_&Na(9M*#9 z@;CT~`VEzTQ@miBY;2O=BTCm@)lvQ}jK}a-MG>!pne$><_Q?c${wkbWugf+ABf6rPrCQ4H?~;ZMoN94NmZpD-z+9miDD%SAJ-_5Cj@mzu>|Ks~&~gu}48 zQ@Xa8Krk4kgKSS=i|3JZD{s~&mfj)o%i|R?L#~nWiu8`*r~?CYSYb>U>%}kr`X}uU zgrNdPBIAT63A2saqz)3=gZ+s%uuLsNz7SkRUam8T@byC@w{3QP;O&89UfrTQmVMV4 zQ2Al7~ zY7a_k>Z7e!Y2g7e+Debwf1KVhD4ZM~W}4s*T63<=kPNDYhi}0#IGt%4^fXlL@(GWv zoAG#JQDyChXtkh_#pK-)A_KE)b1IKfA@FYO7JIdS?BN4_C%Z00`R0f*Nhx7E*tB_^ zx%-3$W{@o7w32jb=FnP`b(|Irya{i`bDPVH90+S2XC6wM!+T(E+$uRaRQkB^*Sbzp zvh)eS@EFb-kKTeFaE|3BM)L)~QcDpSn1=)9ar$bsn@b^gQ0XvJ&S zCg1(-bxNNZ4oFt#j9A}eEtg743&U-Ipl%?)l<52-iu^?w<(+iaVjuYlqy;T?grN}5;e!zL$aA6sD% zDNd$o&{U4q*ySCGFdBj%*V+>yCf69iq}ve_xo|r@^0@(@BvA$5%T>VP`hk=7JQi+I z?rC<%EK1wGj1%!YIs`(^=WTooHGR-g_xg23#&X+)1ZaIFY_W9vfhrIcng| z(q?_`E$I^wc^XN5t(vg_yMx%GT1N~qk7HoOxJSe3N}#CrWNxgTr)RmK#+m+o- zaP5Kjk9!_VcA+D(dlKB=HvpmEL95$RhNjR57dqo(HIeR6v?UP@P89c$r>p1WxvM69 zBJ>2kSLg)yRydGjzVZ03N~?S>K7dIW;B)UNPgL1BYR|KWnR;JO-Dsy+6!0yI|5}CV>r-uNp6oo zoQU}}!$AW=Nwmwo{Mb^aaoTIofWC^@c*V0IS0Ml#6bdz*E>G|*i!0YeV7r$+Kl}0duq|GN~~{(kwJxcSBV@sknP9-sXz*b_-^1sCxSvfb%=&e^NkZ$zf816QO{x3{9_1pgs z@OhQEKbI_6*QGIoRQHEi=H6S3RA- z=YG+qT|Xb7?SZrL;etgftZxu6rsRi2iRC9pkfG1Zi|_Jtd#9AV($^q;`)uXgGyw1O zbJ18`oY-XpNoT-EpHjUWUD zuNI|&4RXopALWc!MOOa>SgjsEJh%r`9O7F=UK{SCdXsr+>szp(!XvDHQn?b)cOpS4 zL^)>Cm0{K7sak3wUiz!!DLiPIjq}3ZywZkc)cy z5Y$Cix}E%T4v%%{*1MG<_G5Q3X_F`IynpS%OTjGnOMK_(+j`6?RX(F1C37<0!!&Ym z+5P&b8`2oH4#5n(cR@}WdJ^)csU7pVN3o$ujDbzq&SXe!yIl-`F!^?kbpO#BVve@}VXxumJB(+Z=Fc7NpZ&e!p2u0mqN1=#RJohe`aC8YX$)}N9=3Fh@ud8+@9jSS^xtOo-SqeWumGLWPBRlqHbP0RS73rc z6yZbQ_5CZECiK?NyStpHW}?VqA0_NDM(ri?geVL9_=C*Qy2Q-hZ}Ndz zeDPZrO#0pJp(Fi%`zP+PB94i4xC_I*gh_NadF_|N5z3GQz8s6SF39vik5cZ>v-fV+ zPSnW_V(_oEcsv{}#TPa>GRD55cOnF8zm5;;8k7cBlLAN|;zDTZ_uk z3bbrq+lQccgPy>d4L_Eruq=Wj^oc&{vB9yY5`%Sor2ihC_lbe^w$e<4>Z?&(Gg(9% zH=FQCriGYv#Eq$c;nCl7^tT0nx#+r1S64SKGq|f^1rECTrxorxU1HpTWvC|ap>F2x{<*6Ef&rxJLLxN({8(yl9%Uv8ivK^=DGPNN5bMf+qr%p89rKgft)$k z+Lgf;v^$+MbbjJ6ZH2KjuC`@FceK_k+UmE4m=?p5GjJlz>^HWgr$u3Po-AZrp&mJ5 z{*S#>C;Z*FcwBKG*xFO9(^Zpo=HW-aa;s+hAeW5)`@v1|`VaO_cU*}9n7Is=F|ZH6 zji#+#v@R^?{cq=?Wjfk;P6J6(QQ~G?l>h{K{8=>r(A;2@dqn*}cDP0L%SL{ODiv~l zA1$UuX0q1Wi+*$F`Z8YgF=aS0foGnb{U9s8A%Hwh0mUK)D%E^+4=Tk58?51E;wtFg z{f;499OU_}2aZOHt}~%yjVDh2x~`+#FLnaLoT=}{@8dA79mIRi4SiqQ?aDsGQzg*I ztZ|Q5>Azyfe++@w!*yMR=QnLL{hia_joi$r{r>_lwII&8YlDMJ zJJ-J)(PeR*&Y(;*Zu-;5yGBEvZX1AUoSa!r+mq=NRkCiLRbGr3EtvWS-`#5AUPXO}X~OYHSGzfRunQERK!Kh%m_fY|5t8{>m*4%N9N z*_$*6(S67j=zv~5f}FK_Qx?-d%OdXkLnQyrHG{j#D{P^Xw51Bn0>mceol96DXmBsM zo;nA_1WgjX$cYOw3rmYy7WH57ojhe8ygAI7R-Ty<#tGfLGz6PCEP#+4$LG=&KKLc( z^M1Bz=7hbJK~%?ZMn6njDm^Is{4mSem~6Ix2Z|KA-3_MEHASIZ*Isq25~WulVR2W; zj3{hhQV7jvIavtHw_cjiqF7BCLNhCqZ&sk46bt|)qHz5XnFHw#B}_;%)SKv<3~=t^ zOq^gM#r0&co&Az$se(l-m1&{9tvcbD0Q|SD_NOb)xlYw=4bfoKVEr@9w9j}<{ioDH zt|NbzLL$U~<3*jR>IPV81q$fU2%v*`4-A+D2!5+csa^}hjCXzLu#Ldovnk)mH(wpQclc>?O)0=56@wV)9B{+#q5J{T+#r?9BuiT!h=3J2nuur?A8B;xa@l`pCa~D7aV3)C`xiubv9WIx5&F=p=<-cs3_8xhHQgug!ge}EKD=gzV?<$4 zpx-*oP@y46cE3D!`dXHbtQY2p>8w7#DB!Jc+V{1K@z$%&!olOliI(UOl!RcfU=8c& zxLx;<;Vts-A@)~1e!jJWMAUSn`S3pwNQ%SSU#Yed;M3Z#dJ|$QkI$svWAW<^2e1ea ziq*li(!5Hb1;Vz%)?aXC%aIAnAgo6b1vmqn}a^INb%Hs@FUJ7CbdPnN6-$9 zSP#))&?@@^+wC9foI15Py1TKNs^tXqR(KIzQohEC}U%XW9URa1|@ z(PLa(q*6Wgo~ZbfvEI7?&w&6^h&9&T;*IaOA*lLt6{Sm8A&=>;emmDtrI!^h25wm@ zHF&$29V(*lrF(!K`s)zo;s98O2W|eQY(`N6ahjlV3s}k1{MI4b$QGkL1D; zES65!)dNd4j5TwRcFm-@!8<_$ip+--mfLtHqAZU?dtQAZ1cBbA6qM5h&mYyyX^(~2 zhx8!^_vd3;;jXk%QxP8NHo3C-)HiEv0gCPNo!abJC%2pJrfKiFDcw=HpV^bWJxY5$>aVZoK9;m>DF_3qoc zHVjDWTPIH((zmkAXE;oIptA+5aJrp()rohgZFAZk_3$%&KIUi8^i;hX2WEVX^W3Ub zuRT9GmwvS$ADW54HWGMa!}si|Z%^m9yqo`?=Nh6v>6>rZUpERB)DJ>-zp$lE!PBTI|15|I)y0BiEyd>W&#L>fRt(>o>y^%&pOE&`z$My1O3kGy-ORe! zT>HI>!IFdLvSfxGv~&sY$DfrBd!Gh%)1bGsU-7utz6pB~Gn@B{6fAkmgub zu)$QNjxrqHu8q&6Ac1J#b3N(ciAj5Kf4k&y=3{A(YiM{XxDhV%i5y<8=(2FmvY5GYC2TAf zZs$4yW7~7_LV;P)&$v>c>cot)+kFp){&jTEK$Bu79Tb~{!t3l2G&ub^dTZg&gkF-8 zguMYL2&<8$p8W7lyKII-kU={{uht(hH{xXJ{ozpXj7iD(fm}dRK;Mki1=c|kqifZQ zJDtVyJxXX1y>|&Ody`o$iWY0c_a(V+y}L0BkJYf{yc52cLOCXT>z_V9>VYoaJZtaw zGNy#fAWeO&J>hzjtxXLxB#S_q*jN#CbGvW)r>L2=m+m?6trM{)ASEB!a4XJIF5e9`sPq5);`LrgOG;8nq7`OH2`Rhq`yWI@`5;;WaBmIq+^zO6} zFts=xS7%>{8n*uCG7n!9{7SgpXE!8!yvj>SbK3ih9~J#8kK#?j!XcTrw_I_88`94u z>bZdJMBv}ANg+{dk2h_EVL1ToyQX8WJxzjRg$p+7y=U9`Ohv;?w`;xaIgdIQac@>juUk6wg2s-wY1a zhMXsZrQ;tJU=8VzpC(WGl@*dMlo=yR(`^*QG-*9faF0t2OFo zxVR)|=ce^>5rS5A-Oe2ddJ=SY=^`Ff2L_qcx-rQAhagT;LMbFqX+=(*a0{`Qz}R4I z-oa=O-n&(@+4M#_+jbHR3bfAC9h9sZdyCr_3fzKEg3*6(KKbgiTU#qQr|4Y_Aeh<~ z=eLsgr4O&QE(k2A!QX4-AMnGVVMq#wR+{t>9=EwQ~gw2jn@sl~&vmmCd zWh4mmQXrrGzOHgxBV7$Jph$femRLPFJiQ&@WDx0=8cVWyUb3hZU>P(pG^SeLxDB1R zi~$#bS?-4ni2(Xx7WhCu>X3UAE*C=<*bpuCp&){`07+w!(RlR=HG3(4#2^9$9l1($ zSw;;$u}qfijA{eByqcB66)$5aywE5QI&rQC+T)5U#oU#;Qf0~7Je~>d`$UnyYX9!S z9fNVAMN}`C61D)($47@SJ6B(2JRa;vb#O_^5^IQZ4DP%Tm|3VO{C|Lj@jwiOP@z+j znr1C8C{3mYk}dya&zVwrmoIMW#6lMc698_+We22VXO+(Oj`0$}Z6*rzh;iWX+ePCLFsiV^*yctHCR`Mw=~x$m)+TUSn0 zP8>h69;Oq0#7DY8Y3*x;XVnja7$4Dfq{+VDCI z>_ty(a4q2%3Z%+l_flPC24}19(&btZOS+KH=!I0FvX+<~i9hVGLfCdM0 z?#h<1a&tLfbtN|PCB;GGDC8n`E!}Scl3diXr4FlOQ&$%Y>1d)Ivp5w?P ziH6!kBU+q3839A&&%Q>L=X$UiIZ4Ey&!VnXk=}%r_3#!^eJ3v624X;M)u*1aHBn2R zTguZ^!PpPw0?repJo7S$E4IQ%OPbR47dKer}lFqpY z?6>=N`ld0|TGfZH?5O2Q-EIImSLDOK>r@T5{GrAHP z)YVu;+u zZiXf)R*QZwS*}mr9od^F8R%?gEE_OS-l`TkdWuQ3$})XX`f~!Fn*`*RzNhtydvYHi zn_gFXO7V8QC$)ye0+s9zc9kb8i2vs9f4WXQA~@uYx6UFJM`;k+S#=9e(JDm_AQdY}{w`wZx0lPvM@rm0QrAhT! z3;_(6r6Usi;^cAcoV^N{rTUxp6^#GAwXdk8HET!&90oP%2Bw?Z!^t)=I=ZV}V`$B<{F!;1p`NnkS<`)L~hY!=L+FDf|gNbelu1n>vh zkxHme^H&B-tRQ^}OE6bmXaYmvWZFQEj+q}Gj`5z%dJG*Ex48l9wLM|9-s9cm+2)dN zG6)CNj=1j&AxEboE#&Oa=p+X4_;o%LlryCm3u~q(mpWbH>FD>b>HRTk3f)&`Y{TMc$FU46uHXq5DhP2 z2u}8%#mprEh4|q;r{_z;Ab0yM-L5D=>-!?%uA<0~sE4UO)YInoEd2`2CwIQeiTwut zPf3@0${#VW40R=itC&s9o&$gNsg-WwmbaD94GXrk)C>aIlh{dD_7}>Tc{zJ_#EHTk z^zBf{1;I#4(Y3a2Ck6-)oF;E2bmR~UhkY;cu`@SCb>p8TfcuBYh3{|{(ssH^%Re`W zYFrduuo05w9u!W$*ivC>g!P))e|dau4uIfqnbt^P0C|Jq%{tbiea^^k4UBktVb|CUFZMKem~UGm8zxgY+E zhk}x+UQ3E|ydMLf$5u63C2aNpAdwT*5f4mpL(O6%q&0yl6c^4-rgUO3OWy~5Per!p zjy#5pU5u0HysvBD%C*kD+{O+9d-=i-#qXqueuH*O(a@sWBXHxMI(6Z&YWj4V`C$34 z_Zk+uwW2*Ip?giUXfF8G0c<#}D)5>+eMj za``}sxg$7xUm@YEmCi!_t!qRQNSlG1>F!OKLb>cY`gd+UGU$&ix%g=~+V?zw&j#q; zuNN2Ip=iSpVhK%F=0!eQe=DC3{Nw3^?Lp|eSN;!*`AbpEpHH0tzxsm>emGm&XFp8e zOZATE+rk0Ecw_QM+6N$$#D(C2%o5ZxwWoXp0-JI}e{D62F%>M2$TVDplZuu35TlzH zvon$spQr>qN6AP?xoKH`3B}-LoBV?zAbiYxo;O)15cQ*6pKbu7NAgQjMARROK5wKw zgv%8p2xkarF8U1GDh`x?emOj&*>YT`tdH61SKI zd;6J@CK}j=MIPov0*S7A$zIIIGZYo|XpY2l$VuT==YWw4mZ@u0+<_x!>_ddZu(iVxwfJfG(dkjVJB7xi~Q)mY`It-c=GN}T2>^aViC;0Jo!t) za}aMc1{-->bY>!rhxh>GSdgCrQP7KWmX&G8;~BXOcSsehR)!ej=ty`vLLi?n1H@`% zV;pd_A}9P#dMC)Df;3$%evMmrrEC88ukNj1v2qLh#8{{ zC5ux}F)fc~Oo(9_@c@9u$}W>jh}*16MN6p6|M~0v8R0MFuAM zo<^S>;-fA=m6jl2Dv8odo}VASzO$C6!!HH>34N0CMvTmx$)(4HaJWou2npF6|5OK% zf#dBE9R{SFlCM>?)nLN=Zg4%>R;me*zr%DpUB#_PJvtMU6iHGW7j;(LicYufJNoz8 zPsFc4i1PmO>y=0hT{My5SuJNGR5KxO$uOEOP}`Uv&eIu|Rn0mP#}%)@!kv2KNLADa z_1Uo#Q*_M&x1CNu`8lZa!xBxyN(MlMBMB2)IlQ@Uk@xGRe?NlWrh$X7bR%b@UN>;3 z$b>NHRw&cTD|Kq(B(2BLi7OIQOq;x#FXY}Z9pihxT-;udak^HMOLn*9A-{(B*(bI; zf+JL5bpkHZJ9Kb&8oqSFb5inWTZ{~dW{@xZA`O8J_m7Oqb`0VH5qRb>Z3yC`TYR>H zu_S&Fh`BU!*)r#sMm-sTOIYtVf{SR;k&gylII$$%RCMtRF>t&KJABCvAHUb1F zn2yh<^~II1Ld;@q80){jb{RuG){=(D5`k$(L@L#`s zZ80N$$=DdXFmbY6UgvA#k14udgS5_U*3l<-DxLZZu>WbKbEO1b0!UcG8#1vlh{lH8 znIvyvo@XBKk+jDRKQ~HQud10Wu(%#rwww)i)tUm$L=XO@0TUUy%fHUUFj#i*+T5Yt zO2I64WXBrn3rH5c`r>DqUdgeFkwuQy7tMpZuK;Z(YB(Aio6Qe;z zz5R|3)9yJ#0d!!P`fK)^%{>Os2=Cra0)6PA4+6{~(M9*}(gtR1uem#1(?_4TV$@a@ z;k0KatErpxjVrY;Xy;8NHf726n&*CF|Ha>yf6~CeUX^mga_Z~se?C@C49^LLA27Pk zQYEsS8DFicQuLGF`OD`0DV+vXHS?~B#SgMBWvgONPW=zs4iOrq?h#c>P->5Uo36Px zs8M8YI!3^_;07NX2uHC8bGb6 zs;Sw*3HmxeT#lN-;CDvna=_Ql?CNw3Yrfx04$hY`G){;FN|H36r^oLm@n(zSw1UIq zNVuQdA%@;IDLIAFk`p@Ul|D_BXlV(U9a9VGvV@S74B}5iWa=MNJ&aHrkud6?luWs- zTjO*y^48O+RIr1Fnd+=;8H>;Q{e)8OlnBE&04;p2=F_EatGXVYbP!YLfK>kg#C9Ir zcVr*AHY`rHF~5P>Q`~&{HQiV?imZIweHTJy%iDJYX?|Og3A7?8Y{gz}G)pk}+ht-_ ztq!RE16e`tu&z%XYk!%#WNzzg)5_6?{1sCJNnHT9Mm}MNOXh~4fY1DM>46A4<*P?? z>t3> zhYUZ7WEMvWOs7>#$=l8lm~PIHvQi^Hmq(gE7_1XIWVk>xf?uFj3&5_|Ts)bQ)RjOhKf|bEQ#P_tZp)H~o zE0uFmhc z=YM|NA*#MPm<@T9Tn7n5;y;#oa~bZ2@=a}4e=r^C=<#hC_9d<$cE=sQJd|4!=JF#4 zj9B(qrGJ|{IC}=06+{-NIFQPbTf1}6aX=}fVPGvaN{PmOFZc4LL5)M;b1 z1EQ2G<`W)rIzc*H7@EKagf3suQiDs?sA90Pi?TN^1cpgI|hyd+F%R9huOVr4-& zPF7KFzTaPDemD^d!!Rc~DG>X2{S9ox!uSoYm~PBxDOoM6I(zvUY_QJy{~*x*u^&JS zs9NHZ=k-HZk^Z?nH57nJwGgu7u);=XIR$T_8rW+&r@GVHFwac`l6*WJ5bEF!*%&^? zj=`>S*67uHWD*z6S3=<~Ti$JHXz@{Tva5tUcGGI#@Wkxg?sxb}dTS zy$AbIug-o|0u#8*J12yA1Q&sf^4uyyWV?Y%RSA)=1-sl*6tQvuZY&NgP?GSiWV6iM z^aR~)d2DiqI!XQ^1=+Gzx3=|7 z=^(7$N1Il)EH!X}r}PzAh|X>oYa+ zU*~~%vZ(V&@X8pbPuj!LByeLlOE@mPTpHLR(POJ50yBt5TY84+20Ubg+jMrSMub~a zb(>V3SfU5-M?YekVK)Nd@@1^H=(wA~t$)Hwz}Kl!=2acYE6=_dcvsBn4)1IQ3C{q$ zv^mAU%`1okaSkkJqWaPX5pqY+UOV!w>nn}IXEl)w`J|F~=t>%9ztp=v)9G|mmzQWc z1ZX=9-g~{g)>vc0dYgg1bVZ8;Km?GZ>ulGkEy#+fuhZjS$irA)1ymViVyKhhfpbV% zucG8h=bo9pdjI8eL_mzLTXQS&yYBn1ZI#92ss*YsIz${g5Bk-T@4ZGR_2*=X(rxy?pQYP5zyP#g3m z6uwO+tM2VgAN;<)leUzt?BL%tvhq7B#iR=v@6EWs0<#P#iR%REg80i4f{$URuV5s*CtpWe z$=-+dPcLQjIV3@)@a%9x!{Nn6kBw!r!m+9X(tRi0mwPK7xqs^HH5*y2ooytx{n`)N z*U=AFLjnC?E8Zuc{wJmB1>d@pj!{g?cTII9Ro?L}8jD$e>^L`0*X8+ksmzMyp~IG0 zi}%Z<{|9MoeW>wWL2-_({57sRI30TNyDq_MuDT4iY5zc+F3&p*!~WWT0KG3*M#Kh> za^cao{a!zhP5TvEVaDL4#U9Qjm1$f zC1!E`2zA5g{9k8!vT#I*$oC?{DKT`_0R9WknrX`7dbod%Rf{{~ZRn9m7`X{aJrg9k z^De(jdoO|m&@SSydj}9v+40-|Hh|iLgGzuIvDZxB_t)YyVM6%kX2i#CD0|ZF%MW#J z*iN4eyU3SDZHuJxVoGn~=YLu+&d_(fdxZ2ubcz`ejKage(7^P}UQw!2FPob{{&*(B z?;l7-?r8WI;IiyTod0-a*w-b5=Gpz(9ON2}z;AV~bEj9ffifON^dui;yqi_ z^B!>8U4Dh`Lt9N5DshG$sX7pXDSXoABOk=)!rAlbFfbg@NXz|o2I7!4x;cd@1chq; z9TUJlX=zyV_ruPv-*Gvnz}h_94?dsnXC#DNJk{!xGNJ6B`UuM$&{8PWd7b3%_4VRv)4bSnQ44eX$`;Au+t@i^30=S9 z?8vlh(YVfLcs_a5PGAF`LP9FEd%{ylQcc@`$Y?f#LNSlwikk*kdh(1cgJH)7G2gi4Md^u2oHfW~z!2dGQ}xU*7{^v(}p#4f0MSfW=Z&8dP*Tm?vH`;_&+HhR6Kt)BAuQ5WG&X(j8z5;Z}UNgAJXqq-qHVl-Z<|!b=$E_qV3P>cz`qcZ&kO3(`HMuXeU5=;) zf=JEPjT31(CjC+n(Ng|xt=gI8yz?u|1t<$}C5FV0wmj^8|)h)%fKC%gFfc zXON|iJ z3fbN3hE=a;M6Z}Mp)Wt9EA1g52Ae>hsLzOs#lIIo#W~q*xy;P_VhiCBNhzS#5A=v) zBtw0E?(+r6V0xt6Bw30q6{AgRLq?KI<020;)|8+r*Z#tQKGdBv#IqeKwH(_&9b}gN zJ=NTg4)L|V2UH^H%YAuFpL3ZzuZUrLwI!t}DYyN)mx59c6_`F;HX>qm7k~{Nmtl*|*;k%^9hx}*m+8YBK?351 zosR23Az-36@*Mv^NK)@@$+HqEK7lKK+N0M3OzxCtPrnEbL1isho)vyDn8r)R^?>A7 zqhpezoA;XGw;IM~4OIRl9_;X(%+d;*MOBu|4DO#fYopaoH7bINMzeJCs{B%VYyviX zb8J<3T-2U|XhSW_WtT&%nxSoiv8+2F#4goci;Aj+iG)N?XZf*Q8u=t`tS_C?i*L)_t|^O)SoRsd>VthKsN_52&h+L^ikgruc@oU`Nt9|j>UK-%quR&tyvJk{mN^B_}zU{<2 zApA%VJd_ok=zrMi*M5bXBlPaow`!pKIe{?6Y0vdxz0TS)g-C^pX;Wz98)aTXZROyh z3AKCZS3Ho2k-0oWC{~&=TkG>U=pW>5L47Z@`Uss}+2(NYNh9;jr+xp%FZF&eOadOp zy%Ta(xkG^D!D%V*nyq&SPxFhj>}{~f4Wztx?z$)C!yfU11mxC_={78-b?=P zcDhahnqtY=Dtme1z&isNGQ!=>bqH$cQ03MrU)1Pi)ANh0%EW|ofX&cd7SL(Z|0^u2 zY0r&peyD~s#5`nuDL^TuDWdPPbAN%gv49c=A;qT$St_@F1OO4tS6jXbcLzFXznTP z+hMHVrmuZH{~}p(Y)zqQqkZTxohK(tS=l2(nNQo9-Rl)KbPqUAkf@^CBBq=U_tPy~ydHWqm#-HjI)c+@U)UoY~2=8Cq%tv|;Ob8f<=@bVbt8@&J zSEtPAKD+Jz+uc1DJA3YOJaiuS$TIj85&xfQTL;+QWwxq=^7!Bt17l;f?Ju3u7#aB_ z0Zzew)?$07y-As-<#UaEkpvT^;b)j@|7N3*>7E6G zM7P>2qvC@|2x1wML-Mx>0umv=%89BnBe}M3Sw7FeIz>6TO5a;SB`Is(gA$WFTGt1x zVKW&2$f#`d3+0}OyTK`=7nGnk1Q9#O?Sb2~qHVed(1+<@=yi4alQSppW?ez2YoO`0u= zsBz*6aJ5K0`;i?#80pkDoEQcj|IjMqcH-&UmGf<+uOP^9LNffU<;0f+9itoxwFEK9 zB39kE)0OnCY;0A*0n_F|R^Y0Nzdn2WK{@_Z+0WbR^)RzvEvV0Kej^@f`24=sR-}4prSVf8Usb+l07j5?WIp=T0B;K$fAJ4RggGiT1+jY z%C42sR%D;>N$Po!KRSlSNy}FfYN-)bfFxZv+6dDZVU)Yz{QL%G8{(W6AR^}|-^~3^ zb7ehSXg4GNY&dr4=@zx267B}!L>3(K!MLnn=n#!u=Ug}%zYUs31rA}rfDHO zf1{zmvJ*}L_faXxg~4L&&`jB1B%-& zA5Us@>9+|F+)sK41Qb^iql{D)0hYPe)||miQFEG*f}(0(TVzeSlFGF)jR3|B#){LI z=nryysqe}n9lBv)QiKa{kVO7~^=SK5N*OxB5i0}>}b?%3(GT^ITpg_$)yw&ss#&fUz^T}-N4Gw4Sc<@rE!Ljp#Wjym4T|-(D zwvlx1#xcZwf^L^;4Fv`T2lpeUY#Z}&!LwFdtPY>iwDkw-3pjn3%+sh1G969bqSvuE z!j^4l2^Plw8&}Rw{61artjP{R>z~DZstN6a3=Q?e62!X3G& zZVAH!=%|gsh|$fQebK21^-UI8^7N9lanlBDXVjbWb4G`pa66l3)xe(S*qOqF4aZs#C6pIaAjdre5(c+fZk7nsd_!Jx-y#dD3igwjI^6f)H#LFM5KER8nh_)J@ zQDn@0$!z$2h61n)dHV!1c}dCQfdAlx&71}9qf%cv6uiXhUZSmF{CRC@f;qVSFLva_ z{ap`%%owZcR+_oJzT3)OqK^t-B2@oZz}qa3XWm$=BzwLLw(?E^TL+mg$E(ZYLEmYP z-V)tgZN~WXAQJvFe?_H9P4>+-${Q~YJ8=gf%t_w=Nka7NRS}zVbZ3YfDiJuyjhf)i zgmpEY_rBj`CO{wlD%lsFGCf;y^g<`B*Vw29Trcu^K~gQ(=RTM+mHsufi7CDfboe)2 zuJ~1UWi~XS9=j)0suJ=)#&RWh3r$*rv&*oruLTYN(*h;s$bv>6PXI>EE3|zc51k(0 zoQ_I1$^u^e{gHQcXvIyIK}Gqe6ztOW%Lt+yBc6PO+0)&*S|q_z#*q^$ev+<1d25!? z5X?M%vhI4+mV0F)jp-v>U%rUBt|c?>8}xBMbiyc~uDGg9L}7;xYlUau4Hm;PZ;EV02a zG;r#_4nq6TyVT4SKRh;!4Gt_jV}Gf+%ViN!`Hc(rJ~!5^n(jASZWi!wNH|tb zg14>&eScU{iC}79L&n(pZXK2&{a%NRhIH#r+34^PZwjksa64%nt5vdG_d~i_)>kr! zJUrIT_t!jjM#TdVSPxm^=`cGd^;@?SmaCOD4C2+vT*=_P(PqM>&&1(YPZz(D_NG)) zn-UDa@93dKX``-P+$chrcS&}G{pIt4^qu*yFWqWVzuiAz8T#>vk)Yils6n1Zin=@Q z8hXtil4PBI)Wo3y<+Hh{GEV_E%1yQyGBUQjqGw?4ufD;BN|9&%ZHfH&VCN1Cbw*h* z8b}KA_DJn0$;)k`6D!Xh&+NE(jHWLx8;}9*@@z*De+;P+ehR3>Q5$pUd~IL7dA=@R z3uWm=2|6??=~kf|3L7}9n>cx& z{bGGmPS7%0CQt^~5OcBrMPmSU8x{__4u7~_a;Ynb0B2qtc(g5*|4jl|CnbpdZWE5I zmCi)?zITyrbX_0?J2n#-yU|M_J^w4)%&;W}1f}@-Z(Mt8nH<|^|J^Hi9IG=Cg3@{b zLe^JQAVU+r+7{x92&{y(ilB&S4f~Cuf7ompISM!x>qLy`kBRxxyIeohm}ULY^grUA zi^gGch7)wDl-~W2Q}RG6ep+a`vIVi&EFc8Kt$@GM-?-a^o?v8;z4Tu!(-mYM1TAJw zo%lt%Wz*!$770P56~vPvEt2jm=W;SP`XO=0gF+J_?gau z+nkFGnd;@K(*`#N z$D-aQ8=H`2E*0pogF{v;OyhjpRM3)N)qplL~5 zX?U`$M7ayxSVGV_3bwvtHbyYn@Kl=`9o1oFI9mD+wAt>bNj*|fB*M~ z5L;fS>e6eX{|Cu0krAY^G=4YLVZBQE`b=hS$~Rxm9XdwP>kh2r52igHPWXqQAOmf9 zw5LBZgb*2;zuA-*iAgg72oy>EfGK1&Mx{);74Bf$#s|@@-W#dWUt+sH=^m_jse)4k zqAfzEdy6ERj4{1=z>JEz475hI`ro%C4cy%JK`cu^BB}xW?}sa+9o9zq>iIsuqxCl| zCn-kHAvZb7wGXrnZ){NI*UBQB-3<@Zgv4ERb-YN%|7%71`$mp1mn;z!ggNyB9qCYh zz*IFb^NWvjq=RKY5?V;FiHS5$_~f_^eS|*2>T`x}>a~h_eS*y@>k6ab#^s4#U_35{ zy_|3$nNd6|z6?aCCU>vEk3DCzcyJ})p->^(G%)$w4+1Ds1$ zMjo~GdSK$YH*|tF;7rhRGw_)!D6Bu=Lcrd7h(D{b6Gq#Vn<-(_aM%#glEnS3a4Q5( zj&xe`neRa_aiYi@9K&3{2g@h7v1ohePkW$@8$274Z~L?)%nA8 z{7S*8z!E$&dJXfnigRdqN50SNy6jN+XsPeZYFY{dG4M!~B}E*qkY@Z3IttzJUF@8(Iz>62=CSAkR()bt91qq})DT-R6YyoS%0KKp0> zBF@pZ;mR`QOrJcelKRG+V{}~vN*360!I}(~C9*M#{cOJK+2na^&lKQWtdiv)84i~z zkhQGyx321YVr3phV8KA(uiNEqI2})^tN5nAlLI$*sW`%8fHlnX&8Mi9N*v=tW#P6B zHx(Q^hAU>LR(`@v#Bw`D@fqoImn3o}m=mu=6U52%sS*`g-IPwsDEbD7gYnB>H1|Gu zJ-os369oFzpr)Pe`4Mc-9)oGvVYWJuHY2r=*&caR+4wxM%j|~nLhz4M;B_fYmh3#K z`Q#I()TmoPwFSCX#KaCmiV4Ti%{1%ZeWOi(^3Rb3R!Y~WxH49WltlP4S@QfR$|e?D z(p`=jz8xJ?MdZ^4=(q$?XAmu4#G2#e?$hjx6^H|MicjV3ze^1(Vt@0hWoS2jaJ~;k3 zSkOn>?0Jl*I64tmy&eRDO`~qaDP3r%V>olF(65!89i;t^2d1X4q&8wlDGo@^+817D zDgPy9^jqJ3h~#~HBD+tePv|~c6`K)0$y|)vL6?7@tc>HBtLywzQeb$ao5tZhqf6qc z^sMW%6X^DVTbY>S?9CA-8nh)3k0r`?V&X5pviO{I?6!XmxT<6X4&6aBc%_dgNJSLV z;j4t7z)yEWY4bh0BHO`o=-3ZY4sd13zPxEPQWK_g%OWqo?Zg$Zqp$SQH;$1Z_Srk7 zv_kC`NwcL&P?jpb_==n>{D;^5cw@sh02(Gb);zztIICoS%81ztD*_^ffP5 zR?u_kuXdeL*&YrVQ?W1Ux@K*>oEYT0-}_pcaXidhZFB5%%5HWfaDscAeb#VwTf5UF zZbnU@gV=8e%da3R*v0t5q0d%f&uRvBC${j+Q1MXUqx3jG^p$u|lz?e_F-kNfSzI*NwOOk;b>zxv;!yI)x(ajpfc-PRNhbHMaj=i}*-^X*k=ycv;Bn zj0emNS)2%~gI_w|tFy#B;EzTh2KHPu)z}+0^liOtgXy zMyAN`ViY?BK);D?GVwnG^C0szK9P&GNc6nOq`6M~0LoFnS*>W8+U36Z06roA6X5Y0 zYNiVa(fAE_7!QwU0<#SYVwfp?Q)&U&$^|fIE{%+5#Cz&&FcJQ1#IDUyD0gYg-PHt} z;vIB8Q~-3$(ozzq{VECPH+8q3EPj^6nqLDRl}pcg0_(AaP`$e3uiqAdke`1^E?!uE zVEZDug^mYfjf{yjyfm9ivd()O$Wm5$fn6agLA5x(CEST$ZtnYj6py8wqYJs>a>`97 z0a2aBq@v@1SA!(s;G>t^p0eaDwV>)g{lo#hUmoUI==r}X1FSTM?l=s=9r42WmRkGb zMDFHX(*zPx;qhUdE@oqG9Y}ktCi}$tFDXM1;|sKuF)nqpggsU|QL7V%I8G;B-sJIz zXjgFcggpQ(#vj<(FI>qtmO8t6yag9E-VX#A{rcWGOr3TE@>s{yJIQz=2ISlIuH#bj ze|x9@XCx5^mW~W}L?NRRrE7Jj{4l6c!JG`*h^~=PyT)a@`5Q6R>!C-BIhst-dG$QK zNkYW@_kM_*Hw!$k2jp!oG^uC-pbK%jRt_#Lvzp@BBbNzV2ZBa~sB%U=|G965zxu_fpX`XxvV431?)?QqDZvQ-<_gV%Ir{$HkI-@dFHSNe9Jr!c z_d)H`eh+-b;2*jY)nd6xNGfWj@Aseb{k@Mf%DOqbvu`8V_*#w zF^5jTG(GRHEOeQ|te({Sww>kIpyI>doCs35TksiFZD2Q8@J#KAk>GB2c1V|vHZlZA zgOu~GoT5Opt5q2+=}z((D=7oIOMtzV?00l@ zZ}#F4dEha*>=15w>?o}4*{`&x_9~_eEc;=4)&IoaED)kJj(Tf6314G9jQbo4?`6MG zd+%SqU5?LUEc*ln;0!+7MsWcAwnH0<>U2|?_pGimpd6?~`mdQnGnql;8^=3>!7G38 z_^bTWdcb529qe@a$4HDyhGG_6$@A)ab7D~$P}p<)-L1p#HMG?d>(@!6zG^LLXD>*<`6VzZ@geZ2f4RlJ1#J$Yh%y=c;anfZ!r1$5z({?AD4x=&^1Gs)xYN4Ant&& zDL)9Up6pI^43{m33>~zaIlk$iTrIZ88#Xmcq2G9P7eSd_2SRNab$Io{JM}K!#K8M( z;Q(DxQ9R(=BmSJ%5&!zMePfTIAOI)5frVN2S>to-*H4y0lKRs!DHN{O9S9

    {;A?+-SLjN#dF9 z$deNTR7TO)5 zK>$@)c3q*OB0OQXZ8{5OqT}q6&G3WB2j2EdIz@mElXwj`nvdF|A2p@PF2y`Ht?M5#xeju$OkNe#Gvlichc8H6?g4DlyS zfo_D+H;8I7_PL!geKZ6|+lw@Y3}Y@tU{gO`@D8-`-l849{NN$~9&ecH-Y{IQD?+19$=XFH2GE`6Wb+&u8_{rEnat&{meWj9`h!+` zZg+3zDB)s))bi=EwebOzeepR-2eQ{l%xIHw`sE12VEOa5m>k@F8>MnEDG!hMnHYj8 z_)HY>XkCch=X9xHMp?n>7G4=f&+R@7pD4f|8$p1C^~Z257|h!uW}>ka>$%7R92&8D zt*e-xLcWeWz*xmwEbR|}&I4uQpyT3pe}x0@)A>l>(A%x3y8NiC#jwWhIY5ZwzhJgu z!ZpF`TY6ynaR}tFm9)m!_zJxCY<1Q5P1HE7tsZRt8$^CcRTo1#bB*y8nYh+`)&~u4j+x%n*5F>Or&U zDh855>SI7`7!yCx%iuGOx`0J{cU5#mHDj z_+cs6dOq0+fQqQ5@&LGPt&L+U*~4Q2!sy(S`xvGkw8e`vVbd7<-r%*rG9Ch|zhYkX zksTr@^o_l`1&=XZW5j-t@+y(J;ciF_AZR9bATlZCdh7qda0|Qb2@b5UXmR0f>wcv)DrYh+> z*zcz6M)f%JEVgfgy1!NTdL7szvBRT_#Xualm}1hW8w?mZwq6Q3nOOzJM|MqwsoQfk z`vTnL%-NxfQ5&${Wz|PG+bJYD682Tit>gUf^ha|$&yhFb;Gj6;SwxX9?q`91Ktf$q zJAVZ$&5eoz#eD5M*Llcn+iU##9=2wC4E9oCc*$%W?nbiVH~-3WUj)|#naVQ}<6{Or zY{D2RNq5vTM^s@T;u@6!Wm94H@X}$;90KuX*suaO_ci~72tyNYu01A4J8$p_&Oue+ z>~ebJ?$?gQp4M+B)Pn5_&k2Rc??@m|>a$iv0s(Z%9DF;mZgLo$61-WC_f9vRnu6{% z8#!kyX)yLf;9c3RME7m(iPx_7n+qF?8D}8A9CWbcfz;A91JU?nG=bKx)paDMdrWsLNRJBr>GEavVncTy`R-Lw*#f81VX9MKkbA2ed!d9Ide& z1T#uDu*5xvgh_@{k|%E29{DH<58rEq3G5?w9^(%U)#FiLtcY%+Q4p5DOq(I-(Q7JV zo6WS~rTUVz?Rm(lhkBu*u`Ku~_X`KWJ7WoupGz2B-F)$Wn)k;rAxaGi23sd8qdIta zb|-w0Hk^0`IxtI$0;w(%DK5J~zWdJD7KmyQRNzeld9^)U0M^;Q#Xj3*Cc3N*cBx|R zYy@5+iuq#W?Io(Fx_m!JfT!FdDOR4iHWV z&(FxXvyB8y!zXyzB~B*q6jp?nXU81?kLzhWRsrfGTBSos(k zhqip>nNwXPI~3HR6qld=wdz~O9Gu5n&CC@L^y>Dd!y$$_FY;ByMU%)ekc9$}d6AJV zECUev<4UoOFT^hvf6JVBEZ-1Ae0=K#E*~ z4^7JsA*_-u6U9tZ4AM1h=S#6`-T46;8!4zd!WGSZuAo5O0kr(zEMdW&{~QwAMVQNJ z^d^v!kB(kOmV`e$z~yiJzxet2%fp|3>&7Nragq6Avw|D$A(d2?;tzl9d#2H?5d!}v4UXY)MZjfh;W zD3n=@0sPESI~7H?Rxf;L1_jq`z1vUTz6RfR^Fypx z+di4XfXswf1zX!2>m!6APrLJ!11U!Diz|vNHgN`)E!JNSFi*>d-i8DipN^OXzX)A> zJUZ;J!*`v)aGx$ujk&aEA{UrxY`VgeP@tp$juDkC1go-q%>YjxgprkLf2dGP+ueU7 z4aS`3R)77TT=R404P#IivlTIiuYY8<&Ntjv1mf%stA9@VJk;kw?F?ZOj(0BEKlZ~% zQUDmDs@|nG2`{vk?C;xfMNkx_{azmd-8Xr8cO^CA((+gMe6mOJIM9J@Zt*S|=6EQXz?f&Ied*k>Jc zTE`hHvqm#Meo~>5;|)EWT?0`Qm$6a zgh&a)F2xcp#}u8`WN2tWGi#SVZ=Y?jplBP$qjpxVruvZpN$a`r=?XMmamfL0ArO<< zSrzt&;>s>~NC>cYc{{r{Iy;0Whwuc3xp?@o`B^l5&M(_A{3|EctpLX;mMi875w*@QkP0jFsu4O?(^> z_X%zx8XlQaaTI-H4MSKEZo7{vsM%Zp9^vtyTk$)i zgJZ2QlYr^ZHmD9B0@rFKz1jhw^9}h!lN8Mb5F_(i7^A`;nKs=xg_`A2e3VF))d{*u z<-8NY=@>3`OeNN8iOvG)%NT}Z;PzTtsU1eHrnF8-A`Bb6k^{bda<&8{=;v-hMkf!aSnGU*aEW>bm^;r}t=`!Jn7lQo^_6rfztV z{=W@AJ9?r#&!qW*$()0uG>x4pK1HBY{c~~wUS}$q;`L(4C(0<>sSp`DeVbq4%IL`3 z2Z$Q@VT4IN0!C~It@%VJC^%{}PM;)i2N{q~Dis{1yPio;lF5Lpr=k!I!`7xIoUF|* zrg?07DjR#Q5oE4TS}X3nEX24kIRzfUv>->2%2ZBklmBoUlQp^eQa7$a_fZmif+9E${uJqO&9z$4UW*F9FeZ$l{VLv6}01bqk0X0(? z*IkFA${Up2uZ`6ESI&WxLP_UlIcJ z=SAidH+(jCahQ{hd3_-YKta1B`Ua3z8F6VtPXzLI^?eUp>~tIpqk2Gb`9W*_@3+>+ zlwcq<7}x(FE9Gyz7B#wdo4~~ zaQ4GG*@An^;5%=M)I5vFV+!m7(bPtQ%V78Gk!=@u&!=G%jtQbrH0`)w(+Wld+ah+} zDr)8i?8n8Q@>zO(3Fv=zokLpE6x|Lk4&kF`^Ch%^#jn!t?2E(q;Id4!4lbfTl}i`J z%>iY#iRh@#oh^B|Uu+GAQ07+UT9Ne>rr6`$FM#wfw2#F#fpH>TyvF90BY+c!AQI>d zb*x7<*#P^IHSpy*7KzxNvq!fmTb{QL95VTP8GA`O>|u^i!-K2;4w;WO}4UJhV7 zOmS{D+QG6>J=AIN>oyMEudmZ%Xc6pqHPa%1j2C}Ren~SH5}^zFTy&72z9HiA)%BNk zXtG}9C5LM;>+dpGh|F*X?ol(OQ}Gyk%g6db+4URLnik-@C*k|FSlxak5G@brnMox1 zf-)ZcZb{d}T72Tz-H=-GoDe;erkK-esG7w{UBO;7iP~Y00itTBE;-`@4Ul}vPk}<> z$B^U8;)sJl{b`8VgP@{2EvqFiv?_3WDni9s?978-+meM#J-Szq)O6h_|A-Fj*D)F| z*On7pmg>QfhNdL9vxxRcDDby6;wun>6Svc>i-k$*I!xvWY@S@{#jqbSe+uMSl?Ki( z&*z%k@gcZSSm}*tmGZpB2@X~^YyyN{E;?|2AKk8p`>9JGVc7^qYoSw%7t#s01;Fjh z76H`yZ&=T)zj>SlV(rNV@6tHi6dRHu1^!Uc!5izg@Iq)GnvBY!LA?jL2S&NsiRjzjO>H@0bg#R&8T zAR4lu(GE#caE7g0P~jIGqt;}1omT@kY15$_G7$!WVi%E%qW5~0bvNV zaqOT#bmA?4yI_hv#Qi^t)Eb#o+`4xEn92j_Me^mPPobE2r1tONI&RlohC^5hk6Ec= z`#lfC@b%z8=E~FvQJ)y79Q^43{05?@#>v&QqJGGL_g4kVAhxQsG34G@bMa@-pM@F+9*t6%GVA__4`uu(XMr8*_9o!jVC&O*?8fEj>|7hSy z3UO}-arldQ@~d`)6E2R?T95WED3WOXE8MvN2duw6W_vTtyALdI@}Fj|%hMw-CGBz+ z?+OXLQ~SX}6P=Zv?SKy`NWrS&#oSkyxr%FqObslPmJQPC9^+mJ>4|b2_skJU-1}PO zLf$dhcnT}rXkB$>K|36{ANO{Avr=_sf!T2txh^`W2| z^gID!F?+dbFF#TH>7&bB<}9fKn9qZfXPXVz^W)5(Z0kU=IY%-JT3wJhrc=}u5GwJY zDyB(Rt1__{k|3T)I{%4*B9Q#>RAtwD#6ohYa|tgQ$1c_9V_lBu%DZi8FTqC*ESkT5 zi_5$U5t}OP9T%L}Nw*{BVDkb%D^sgpdQ8my6pU#)D@P0J{T*RIMJ{=bvz~q~qaCHi zTEP%E4GlqUJIt+ z$3~$ai$VcvZGDi~W&NmyL`&vZenCPlY&Q0az~VY$E*nObySF}R zH>wIs-QM6_h?KZRUTUm#uF*9J1k=qAVxV4P3;0zI@d~Y~(l<;h2WCHxddp4Mmvouw z=3bEYQ=r0x-^blYH6P!wi!nuNdY4jk{vQ|jNqq~~gr$N>2Mt{dUg-ShuMOdixBsHiX z*^BBjGu!l+Oq{VI=j~b$q}q~p%xh#Gx=GzDUlEvXUgf3eIluPrl15@4zrtnw0@n^+ z-&3fFymH4;xNnZf<3YiDletDs8yc)vNAVYr(nNvt=rrR|kquqVU=!(CSlG731)kgV z(Z}e2dql!R6;*0_e|WsR@iUd0!OdnVhfY2;v?eQMjIKMT;RF40dgFGou|Gvc>w&HI z3K33kExb+XJg?BQ?Bd6XX3x(jUT@IprZEpMLv!`GrKcq55#+OoN~Clu@RT+BBZgy- z)Z7avAAS#7-U^Ypr+gFdMFbAMC&Ia!Yhd0l^pN#p?LytrUGMGYU5X+UY$$szZ6L=^ z>YIda16j~%kd)%Iw&GhIrXed*3ag)&e&t&Oxtf~*i0&2%Gfz2cl(IL5I04(Cw8Qgk zR=h4XpWVzgnpB{ytz8kj{-Yk9P3HjMAmC2w>krrzRv>zzI~iE(9~ba^V&Ldh>e-gj zgoMPiAv6_z>fJNz?~RTEW}#V5DDQOA4c&kf241uNqL1R2^rCpSNnYTsvei|U;H11M zvJ0x;&0M%Zhfc){=)a5Qx1PHU|35Ca(3mQccHr_8+d1N9>R0!WOg9*nPje;5`eQP* zLelX>=IR~aYCCtFL@Vri*^|(APM-*gHB~x>%I0bzb$x!gdGA@Twtu$K(902=bt4bu zLq@x)F>S-d@BdeZv_PAo%Kl&QG1oi}6-9#dc1HWTAFDRZT{;_6pmQHn$QmNt0=s9NG$qtw36tM6+Suisgqqj>nHQvv5oa)ObsU zum~bo|3na0$9iFt#Uh=h&6rSW3A}to9ON2gq5Y9Tj81XAhXG29Rr%#gOQ#?M+ym-F zWZdg|Qs2rEqa1E@$|Yz!uW&3Vll;?#SUYeY%UN2$rk$Nmr=HLAyi;W!Ll%Sgi9d{!m*KjHY7TSsm9qIt_k_gQan}=(i2vSy|cn}VF0=e4(H0d=&EPti?6H;ld*#?p(b-PZH3LakF3z54I0Lw zgcBt1Jsye`KZLuFqo(2j42M8biuX7oe&k=|KS{tKGW?c^dxa)6Q4Hx~sHMLDsT>y> zf7EZ~7RC?i6-MQqf%Kds?y{vB8AvU$&7iLO$UlaB(_aVj?N|13TcFSLU2HXHvLv@iCF5oC`#}*zf?&b%o90h*7o26!OpY3{Xa^ zGde}~DKnvIi~uSE!H8a~PWG`}`YgH-1WNl*>t|7mfQ{|3Z7qQm)Py_e8Z0-}>R%qI znMooj5{&2QP)l1g+*Mg?8MuD#EQYx8fr3qhyU<}PhEcDLv6U^!&IAl4xH&LmF0@|4 zeE(~iGRemZsD!JtPhNoy%o2KC-7{l4AKbOFivhB+G9Nlh-PxQ=#iQ>TIYI8%^a?)4)0NR;p=!-s{*TbOL>?aEeKx) zt1{Am9e^Uu^110ikT5HtjBDqjG7n=FM}OP*L|G;^^d z(w7uR+xZ8_IkmpaY7DM2Q};`+o1k0tVTm84)OJ&G}J}^k_tlS$cTGci=bG zB%d~`W>IjwT}|jr7UJA1+BIrM@xG{C&Eq2gqR^MTFYyQY)&HepB&NPqS0E1fmaOrL z2pNUx=^K6G{;dKL4>*RhAzTLrXw7z?YlL!Vm9tSV*rNg4ndhJuVk89Vd{a7Q)WD$K z_#&4y&pT?ilknk3^@WRIm&k8uVg65DjC38oI3s)Gi5n%CXY5fk+nk9QZNs~o?IwU< z{u8okH*+Ma5Z@XAy-#(MxXPf(7>~zBDDtL9O){h)B$HmXiASNsy^_@G=M%7Ag;Z6! z2&1vd5&Lb3)2bpcP{(?Vl!DvKb?64msv*2TpYDeXy3F3fBFzJ;DOhWYgr7k7k~s@0 zOrwgV=~S7!SXz);GDKGd!0b9hV$n|?vj^!P89)`FpIJYLu1~A)XhH>kb~nwvp9!!C z4*Tx~56MnjtJq=tb;P=>16Hf4`!TB;x{ow*01*R>I$by{Fk-E4g zDM10-Nrc=!5!cJrSS3i`71UhSx9Wiacz8b!uV&(nQ2VJErsTFM%N0k+`uuv{qttIYxI>g19HI3>chuM2HKXPHAty~{Vh-&09EZ0iMXkx zi^#XfJrUy)wPm%AC-!;fxR;p5LC7&!#LrY#&344%11t845VbVYNkXRujoSgFRBprC zi`}Hr1^fu@yDinnvLir6K5|Fm^u%18CVN2~d{TnP@Hi^n$hd`-91`TxMj&2onj17a z_CfXOT^?t>0dmL|xaNqqO}Lu-nL9gkX|$qhfOvItMDGyW&3`C z2kIlruj2Fd9JA+xigr7E`!aWOmxT`l^uqvW<}y~8B2qTW2>Z(-mNBQwoin_`sI~}~ zj{qx{*0A_&Y|wJ_sx$1sEP610pg&J`!_@`0EEHWNV71%7bo*EaA*)WgE@E?zTIE8} z#ZR?yg0T<9P3`tp^ljIN4eiyc(QxP=$;RGMuo)ZED~_l=3i4%wGn}AH0uTtiPD z^RuZV5~ZiAnQ2-p;4kr50rST9ZWrG@``dVkp{BWlCe6r`%!9Ph~`^ngWG z_;diy(Xh%`0ltNHq~%>yTSjX`26H=M zF`n7#rhQnYAq{U|1Y9WbWd>ZW0`?^s)WbBGP#fde66I4k1SCa6kWaCNBqa`H&>i;b zn2TVuH3b0YEMw%tn8B;G8AEz|P!(h<4CZIB2xDJc$4pPe%0s%ha(mRM&Vj}wr-d#G z!h1(=ZJ|%$XHg94qX(-JVZj5>L=WH~mv-}BN=rszkq!S+WJ7z9D!Kc+-^ z180fX&O&IPX(?HE{}z(Xcw*iZatxD|Ba%U->41=2@S8O1mvVrH-UNp`(Q(uBI;KSb zqp+ucGx9Xfg@^~{IETIP5)UR23e)VOF2}$C?b#Ks4JRaYv`$a63J-vHK%Rt~a)V59 z^B80Dc+l4^{?(i3^B4t}t*dyeArW@9&kS>n`Yeup1TUco1|-KRENQ2^^Q;IDPkM zc3t;1A|Rt=)A|^-8UlTKzUbTO@wI;eEeAh4GjOF6PqRQIb#_5wG{3 z*2TU3!6~W2G-qDIKeX81BH>-A5TL?XKGxnDJfuCplQq?d#69AI&6aM;60;Y(ZsrCN z{q}xw?;GEZ$-zTRzxxP_rG7qC_W3yti_4Kl7IJNy*jgyekbhDk3*&ZnwC+7vD;!Jc9?K#?T7W13qC zp|$esTR}A$m`o@yF;iuazQ^nM`t*8^n2uqQ+P>{(8{WZ7cKKK&=FwSmfeP@vuQ)c0 z;-4mkqulZk+>M1Xr!SaBc4LKLt6-^N59tAag;Qw6u+`+vtpRA3irVGppCGmTD)SP< zW=lpELKPUJ)`;c@7gYluE5j`yLl=}bv7I1r^nS7*EKU>Lmzy-W3CfIV3sN*+3~3Zn zR09VPP{>mIlC;MmsA{S-ntGW5}Q4X zI=^t#wp>Q$xdT!uI>6W#O5(Cj%HG)B<$9KAY2^T4;h$z!!tn?jgO%HELS4D5yGdb3 zxheDCJaiau*ZNh;ZkJiv_%VDo%S;y!w9tQbU=P@5N#68sn~M01R_9CmMJ{O3FB^dg zjFwHxe3d#o`=No0pN7sg)HQD31l6jq5|+<-k##7!qW+YmjC&ZKl2b%YCg~XMvShiX zb4_?(pnE?o*=0S|+@A<#+~Y{gkBGS|^NzwPX(5~$2A zKU z*LYSoZ_bRBahmld2!IUl9+fJ+@r3dvbnX}Gy>eP}pQ%0VN-NZ~2x(o7d@JMm^RB1E2z zNj^56W&$-|9GzTg!`u7&#uFj54}LA%)%Pm|8DsAaKHv^!NX>7YiF)pU|0wtEgv1o9ctsq})d*tMxXf&OR}jfIw! z3S~ltv(M_5CTR7yuCSNrsemm5bj4K8rOtIXB+H^cMI;APCBxc1b}a(v5xwPj(IT~O z1A3aWbM8s5<2dn+*?DlfsS(6S!Y%VJ zB%eNh6CBLs@S2L#nQA;om47@iQ#yORx5+r~UMnuZPP;e=e^npH)ijBpjqyJV9Zj*X zFF^DxhU@-(?<=>&8Fc_>*>CN@riYDwkiF)tz*v9$t> zPq~-DZK}0&J-}zr{FIpbLzrI~3C|2&S3EOG0TT!x$CBK7BKMVB=;v?AsK-aiXx*2~ z=@Da#>QOR=g)`oNQ=(PutPbx*=W51FR5oMIugHM@DLLv@YJ`}es?Gg=M5r~TIAruJ zb8M0f5>!Z1k;=QRhK-yk&`o3N)d&at3Vx(QML!LqjiZ67jA+P}YwmF*hQvnr3N^J5 zsuj|{{zs5q@j|1@Fj|U$;k9>Tj7|A0;~r`ouk%R*+l_|)+KvCgKQXV z+}7iPbivYnz)=@teZ%5z#{<`=8S4xhXF~YRn}F9tamQH#%I>R|+_A=PX`qsoHF0P= zn(5=ZbU(G1+Ylpvm*3<~vpMTz>{IZN-}7MWJCKjzHfdz<6yGz%Gq-guhLFG|Hhzhp zTqVn=()5SMXsWt>mgnYgF2`KPwQ=Xed?&uIrW~uGHrxm!B;x0h$fXbN{~6{h<|Qvq z&3;b;QpS@%xVpSMcsiz@jD$R;7e|h;E+%GmzYVJc+0qwbDt)J7^XR#sPsb#VR)x#_ z)?U`O`#ECj@A|(~rFw*SNhXt{)l#Uwew0^$dN|QZZFvQf=lauHjYXAVJiC?9Yi0cv zheF>!mNw!td%a-zjFrt_@1_hxKpsCo8EkN~uK!3&zMqn;fOpta->{M=c^?456pi7e z#*N=Jeqva4e$t7zd2d}Wr^O+KD7vfL-V(HYNSua(Nb6mCC#X6(KEaxd=o6JGH+aE4 zgo1l)30UwQIiel%oPI6YP!QJ>f^#?$8?&10R+xOfxr-Fonw)lpf=HT3-5ovqeE%I?BpN{ z9{ia8=$U8nF&{y86tGd|*+61`(9Pg#Px)1j@mpd_`79P*<{8sDAjy)lv~EZM-rNro z0rqW|_`_0Dr)(kyX} z$3rBe=%eyv!8*_?j1P4-R8n24^zV(ZYJueuXACfa7Xz&=%+ap#P{C<4)?>{O_5%(O zf3`)5j`T4ktW;wPq#9TnCMys+0Y4iIDXp#4c!J8UisEM7sHs4UF*$OQ)!A*a<&_Gm#@lhNiIlJ5>+t)Fhd@8-_C%r7D+ z7NMDza$xk07E=P7%3wV_hCCZpVQzCV`Uxg?DlXXgCeo`s-_oJsqA9<1Ss$*=zwY_a2D@&l8UY%w#PTM|F`#+DFCZ!(g-{@0Vjo%PD zT&#HUxQJpbQ)U#f3)4Zq7F z7ID!`uJqdF=ZE)G`<-KabIlg5%-cs0Sf@>kjK)_=+# zf-T{>yn!&pfmklxR%+G22bdYSG^cxcxwM}$51p@JQ>^t@)JTCImeju$dEeTOQ@bZ=`hv;vD%Ic6-*Us^77JJ%>O2KRd*4ym&Qz z?BCwGT^eLuF7xUc>wceK|A2El&gr&2n>c7YYB9I`V(9qZoe{mL4j->lxvq$V0#E)n z;cg;=Ly1RiSr(@!dg!}|RXPV+_9q`NY&3OcX+TXs$GDeE+h7m$A6ipeb6{Ec6|zd! z3n!lVxNolP$_9$Ehqbn#J?7%KoU=N=eYfzP-Z;x4B>K^TFFx2;KzfvC)hh#ozxj(% zuP$tU@jW~6*u+27%X;!VB=2V3uL*sufN0-%v2cId z#p~iwr_7XRuukpc!m^87G>U;J_56pNpqp>iH+@`kRReyV=LZtMFNAvn^TReOXG_gq z|4p0IF6;D?3$1_u`??i|`)aRmdYv!JhChAxcJeCT3oz#b9c)QiI!WPQ09#xClj9Pr zm_5;hA{se2XfxU`b&@p^ig-B`ghUo>mv)#P@Z`Z8^HGx6nz0vNPbpF-ocZuNv`HR* zZ~7W}ey07isAgDxBCAw+Hle538fYR+;tcdYy<3N@lp7eQ0)xW@Ib z+KPOVTl48Js2%7t#m>ZDJi|+-kY_k0`i{jAyW4ujaXh38;~M|&qk83?*uCgO6D*gK z!ekIB`HaB10~go$%@zu7M!Ce{YC5X~Bk<;+WG;4u+FQeIY34|jm-EyhpOtmox< z=%zCc1RZ^3*aw2l8>S{nx@}b1QvzJ3PJd`hei7?bRH}faj6>LGIsk6CwehT`HYr-) z?*2S6wj}Ty*bBJwI9&i@lWVqx14kSO|8@Pc_&8n&i9NmsE(^RcWEXJMC>e--F^SZzo26G0_|dmg=57dY74&J9|aZp{Cp|^3)*o0QugXN%Ql&pk4OrwV`eI{ z!kWxNlBV}U8-Wlv7y`}kRw0+aiU66RM(wU~G1`C>qHTlrE!T8$?xo{1!06Cg7D>c^y zL8uBlrkfpRH+!v{jTs>hN!Fy;*DePBl>@}o=2zTw3;UK}sMdD#uzvOKrUugXQS@^X zx1%jn>^Rv<@Be@c#`70`s^#Yw@>s@`fdSjeCQ8I{-ZrK@IFC3AD$f#~AKK3QJ1qHC z%zk$eOw=;wmbBp)3=jaNmW@}4;wJwxRzvqrC5cB>nJd9tj9J*#;@Ic2u$2UkHl~Y& zSykm*VK^Kl;bvx4uV~!jcFoA<=gYUGmRNQ;z`3Q1TbcD!!~6rF7$s8l@tWiBYDZ#s z6Oq_6SIy7263J?Ak%-e;TDk7wrSOMp{<5z>U%17)68*DvNy033oQMPg zp`r!n)sD6CrJ}%8+TUxH>hjP##jY`=n~~x{mZqk%Wvvd?!Y}{Ul09CEBw`K5@S+#- z%2MNHyB_BgQmkNxq{H5+Aq@656PNYSYvmPOMgAexbBvlsi90ycMk+XrFvpmU#L^wlB7 ztzR(>8K04697y(xtU{xku0mI5Y-~#o0qD72)I*)f^_+Yx#)_||Z9HCNd!%MWgwGG+ zY(dd-v(DAT>8HJg=}MxhPEW9G0mhqh)0rf1`wA<%oclGG!4NSPHc39kT>Mchh^WfG zCR06mGtumlpp8}DX<1ilZ*}AQZN~*W4*_~Ibzp1n{wCuV#z2U5BA?g*3m+Lr^b~$v z#`&pu@=)z7#oh*rn^%K5FmblmOg8CM0d7e#9e(VyRzO>lB8N0I#*A)fR}0B9;l)+f z9LFZ-GeUD&VulNO_(6<|pbOsa* zlng`P_;i!LjHx2YotIXoRh5ePN7T%Rb~2~{$M*LejIJQ<%nA=uy65QQ2GHUJ>T;@) zZw8j@if2U=Hw-W$<|YlA;W(Ln3Ia8`H2h_nWhW|TSg1<3 z3*DY#V9=<-!-5x33zt5CKWpPHr92I^sZ{AwwjKWz^_P{0GQuJKby)^sfy{i3CtbqJ zi_Z2(H+-?8-t0K$4$v97uQ39HqSHGjE&CoOS1Mr;>bX$3SI}j2e%p zzU&=JvLc_4bn2)_-?`6)3_ zW8u5t7WWB_?uMS*85XeIdO239B5q6!zg#-IdDOoOoUQrFwJEJa z(zJ(Tf8OL4`RgN+sU#*BO1rM{xVb&=;zCXddYlVK39R&HdY_p>-(Q{3>%xSfol-S^ zmD+g#IGCS@*m|7G`y{^Wy_1(`MSmnaJ6c zGvdAf^bzC%m8Hk~Hf9X_@msN4x3jaFr!MjJYy!w0QmHvLHGHeVVK?-O|=lh!~XN7mi5d48@Ozp5zQ^U(D!N?F zYjGg~_O)zLOju#U&6MSm)o0cJeC$1+&i&n}jznTrnJP}}9n13_t!r0%yA`p`nfY<_ z&eh{aH5G{6oG1NJ@XSO)Xa5S*{d&@plCJD|q8+SVw>h&a%3tOccF%i5q~*&NN=MV- z3fu5*c)zy@Hpv(N1TnK?joM!wl_e=y^8XI=KfV9;3|%8z&SpK>Q#$|Nr_1{zZlQqA zl@N#SwQ#WAev6CLL`$;i-qBuO6snS;zl3d`9(92_LYol`)@JgjiK2V|>$w9mNtzI}#S~0T+IOl)kqmn6J=H;TY+1I3hEa z$k*7`E$;WPPk>f%9?Y9*ryMW%SRm|-NKAY4V{})^kMwbpSJ>#Oc1v#ZbETy6CGEofIB!jZgO*bb+6JVwOIeO3Y!@R zfk(Z>NW4b9BKB(i;pKZpmkTd@$cMo${yJ095U$nU0S%Fx4K(2}i2XE65}eH^FthD! zr5Y$W*v35j{88=VkPA>p$^4+(rX-ctEm2Iwxg&78C6#NXD8jQX_FWln3WQP75T`3( zKESfrdVKZxVhAnncU9pD$d^2~-TOzBMx+B!U!nc+*xG!-{XG9Y>kkYP(%y6GSs!l<+2=D;d<^uQ~P z%giXT*a1bR{cp-=$ke>TlP7~NET~aM6=FQixr3Pklp{fK`o2W06~%qd5h3#kz5%{* zpaj0UL{oEFm=lDm!T+U2Qgaz@7Eb3T63BWAHykXqq^eJK1?BSXB=sl8t?4VPt-&yt z7K_ZYI1}eOns^E$<1{(s{0Re5Ogj&8TMMlVA!Mg$VQK>&!atQ3YCb)Oj`(YvW9UYG zRRE#J~|pnb6FNIPYP2@TKQ26 z8=q#cGbyTY2DO-)f~DHONU@r(&ySjJ4&Eaf#qDb>>}C~9(DaN&XPsf&6j;?Q1%2Z)Yjnaq7>c?d{yLgpJ1(UtL zDTjq*;VbiRx)+zctNw}BC54*wDciWb`pe|-Miq%P&hn4wRm(zXT|}I*uo(K+MDdR` z_*BuF%~grd5aEC&3}~GHW1Q^m!IhvtNDsoQ!t4H`67iAh%Sa3{F26N7oJu=S=TX~b z-nJS{S0TPDg8%F9kfK~vt4zbB12*4$ElYb=m4lzgW%DqrEz=X}o?UFoa`3;nL3fki z7uza>At6qr?Tb;WcLg=xdyzn6x|8u5SwXL(6Ffy2pPi`A0g@w@mB7W z^8hBt&F=al&`v(7&EY&K`I!#hm=Vn??FrB~!kU!PHC2Qy?d~#HDUAFA%thkcrGBcK zUlVM<{>{gJ$qgJ9L?I(BBk@U!iiG#|+HV^;fp5YBgb-fGlgmUqXdv&%INRKl$@eMA zjA(@81l#F6RVqDK&u72ZZB7VGtgvEPk$CNJC zkKf#V%0FQC{-tDHY1@*oZ@)YtDZ?v^QK zUa`Khsd&=-D24nhg^a|GUe_Oi)O_d+*M|CejS zNW%_kZ`~E1TM>q}H*ZfgsCHRrIDsZPa_3ftgDk>d{CiLC_=E}ksLJik9hGb0GCNpJ zq)z4Q1mr7>g+{@UlysI&B{ue}8SO0#(HPyY4K-|#P64!q4eDkb;sXHRpLMZF8Wz-8 z)-v!i9PLkL@)QY7UbqbD4gVr<4^R?6FpXW~?%|lO4-1Vl)sqf~dI<+=zI}3JepBZ- zF`Y+A|A)2SyuC??YNG^#DRa(FvE@iEjlO)~A$mf4b;cYt&fZA#rckGXTH~sBXt53| zdJ3};)60O}h)$eohR&Bb5yJ;7?gYd-H46#jmSj)|ju2f(2QRB)N2-TzDq$0XL_4Qo z2!Dzb8<%(!kb%o(9u0>A8sR?nILBY0A3F_)Y6shiLa(f|ezYyYt#iJFTnY-j1l5~f zAsFQSPq|tqte`h4x|=bttG3<3P@XcV&n2Asftbc`iTF>ue*33B$&kDP?O-2F0RoHO zjwlB>vP`AI0FAHby%(|T@=7Vd+01K=ACD&*^&{WH6HTXmmgv+%dsgQhf;jXz;pN?u zMi&7DA)?Wo8?_bxf#yir6CYYOv+yE>I4vov^ms~Q>JXB__)wK$3cqyXW(x=X9cg_e znZJG$W=Kb3!)5fC`#-4thYq(^aC5+#wt+u(oY%F@O)NfU(=_(i>y2tAmYgZvZ2sMJ zQxgJ>*V!caC^1!{RaN$4|AsZ$8Nti0!T`Kby@Tkf592+93})*UNT_Xg!#BzV{> zs>Fd=M?!fDtyTvD?A?W51d~lC0ru;()}UI7bGXdU4N!+cuhBsT0r4OUar1+thVFY# zOoVjlowmVR2-3irVnm2r0^UiEm!Bb`bpw$3`RZeD5b_ z_r5+&TXXkpn95i6LshZeXs;#(JxHFAmbU(RmWu=T!gPFk9z#eD;^fFOkNAS-skyXs zI*()#bz99lr!nnq5r(FI=vO{QyAq#VR(K^ePTPYOlK&?;K}H*YYv^qtJ7aQQXD_vOgR2bGGZmo3xQ{oE4IRiqHA0Fn>d~1E)HgqPLrB%Lp0G`WA2!d-PlB-^!N?*Xq9p z z9_=fQ87AKtv={|8Fz)h?cjL0n^!nW;HU}h_DI=c=ayqXd?srxe_dBN=7a`(VOP$({ z|5d=`&LAJV;Foobyszo|-us=Rr{o%m6$g9ufdQGtJ$ZKl?{n)a!j?c~SK913YGX8p zSkbC{yy0pG@v=L&l$3Q4)qd8`? zzXp8}#T~4vgb=1DuS9UdE0^XoZQ<9$$q^1p$>HOP*FgZ9pEcuu%Lc?~WG3~$xSGyk zYu|OvvG5E^KB-+Ziy8qDgzN z30UN*$BW2`txk{xqNlB$^foY8yU`sRr?C`TQ}RgnO>-*kN+_bmyUd=w&R!N@t$hiq z_4N@Ro;Ibt>0#_HU0ZMo%*}V~b3^g?w2adadToBD#WC82zZ7-z>-|7%y9RFm%p-&T z@_p2MH~4|_I7X~#r&Zf(xTkQn_Kx@Gi2!Q&aBS(CThj|Cfyg7ncjX^Al&8Xg9)};< zuyM8$94Xr}X3LsLL*oh9ZS331nY)D1NmVSKZti!}m3rf~cHv z+2@7snN7lY88e(y1NbpNS@mjVT({-t8VnjyFIHSS9@@?6Nx{Fwy+CfNZ7P|(KrTGq zn8~@Rp_8qd>XK*Zcb@;x`6KLre$!i?nP2;L;tfye-RyTZU!K;-T0CHT-dv!ZOZEI8 z&e*$!ukQU>cCgoR4&p?HuV}GkBALUXPCki5!|ZV)(j-;h*#9v_;oP-%3v`uH?`ZF8 zoIs>y8Of*x)ub<4{~mfW&$XIRRgnu4y%Ku3FhIb5>kMKShkJ{36*!CbPg198v58w(p zck+Dl&>@t2x_}GeV9e|eW2%S0u}~oK^(Jjsnb_E{*Y5hwDAGXf;^0j1zE}RCAZ}DM zz!>)UD;0Pa%k6Df?ZK0?Wh>)X*W4ci*^Qnb6l|-gI(WzLu%>5KESBc>VmYHVh<;|x zz@L5Bi!2g_%(r5UT2f*3{meA_mp3ENK{K`N#ep9kBQ2|a^F6dLm9nLxXD^P8EeTS; z|A;Uc`0~E`{uXWaA5(f$eNKVx!Tu9V$^ZLPUv&QahmV}3>z+%?2Iw9+fs4qmRykwO{c<9-g!E3hz7vRuI;&(oL5h$Y?!Jqdu+8grmH#2qSIqCzij4HdHLdx6FA+OMs){0jWA zI@N9G=Aedz*a3UGu*!~;c`F)UcZNp)ZL3?`c3Zy}R;$5wp%GB!O+ocXDEHJo@EBd3 zhX%LvtX(%fBh10Ze8tU6+*Eu<#L@*TF+&i+mcka>RNWOqKX;?bo#QYFke}tyO~U2g zGkD(B!evyc{oFPpIcR#n^7U~`raAPv)Pz2ZtORF2CwLGxCnaY3P@N9Ze1c<~XW-eJ z_83k{K-M9X6NwY}2m|Fycw{t{h$X3DLz4+L?RCA%!G-c;VN(D^Nz+ZR+#<+(ii%K; zy48U%ua8^3)JsS7p=Ha$VdeUw@sm6sao;$8GvOA5-uv-iB#Pw3Rgr~UsatXG5Fvq% zyW9W{dQj2hNNDbo!@(ARQqj&GNC>uTfGLv31C`%+juOlcv_5;ADy^yuJ?KZ@-wZn9 zl~;1Ry%%G{(twOyD=+dT)<$;9bSO0PJ1aQZeZAxH@>sZ|GcP6mK`Im9llfkH9?+xd zP*zbdvF|Jq38oLHY}254z**UP?H0-F=yn9aq?f(A0H~&7~xaY z#fA@aMY>2D8^x;$K6Ov#(?xU!@MKlX^$g z|F;V>`_3D4>Q8wQrEu1gO^;$FW@ElFaw}c`ff8y}GSTIktsmxwz4mw?lmw5T44Oov zN1`?0Pw00N^WUpdRns$dZ@L|-)T*Ty_&BwO5#0d&gW6wu3d)q5rvmxCx^YW!k0e>SG}ur`VS2QIObeOPl(QCHlPzpjNa3?;z8mJl}n#JMV>%FTX!q z$^E42YKmxFW0;V67syYH{8)GPlt3Si!*Wma!dA8{G+w?Tst~u|m(*C)GNr7$U)E>C z;O@~`Dpfa2O9cq4@yN_(7zd4$LzQ19kuUH2Z8&B%Kdhg&y?~36f1YO*_0KpjP1Rk! zyW;)K{Qd+}n6o?jZ!`=87bC&%hwDEb#!qGqW4_a!rq|*Om&#!Z* zMm%)K{&+W{c=8Sf@L*Ei-d{SA_~W_MFcr>B;D6buSHFn*bML-$s8bIA(SPIpvO$-l z%T5m$t@y1s`s zJ41-X^uu@Rr9T-RZLEWMf+^IQrI(kfAgY?&XI?}k_$MhzAR4@3 zf=Z-8ECA0ZrXHFb!mV^UD+8_q{7rXB8Q?WU7q`IJ3Lxp>rlY8B)(#f-Tq`sP4L?d# zCSrqSDrxSuB;!X@n5Ms&c?&WGZ6efdUtI@A|MQ!%{M>OXPPn#%V&#~aNo0rJ^!l`M zCG?KNgGwDXX-n9%3uB;#lb(Xag%IiyHktHul~@3(Qax&Z1PhdJwq37#tiT`vB}She zzBvo@jo&|IDhF$q z1S|W*W@=f|e!6b)$!zmIQzuk|zWcl)^5#B`)`)}msW3sl@mO*5W<&D3MG(v5t6vr; zsam>*#bDXF%kfm-P9z44=*ZJm7w5Vc@jbTu17K`~oN5&k^$SL%KChJ1hq=yk>FH&q z0FeqzSQLO&K_55;6ThppA05({W*^_@k-FuFdYa!`x{qxmB3`aX8lMpnpK_!5-#ry@ z9)G0%?Z5Yw_p64!u3Sf~M{+&3o=Yw8(jpox}GV!zIQbg=QMF?$N( zZiZ=T^mOw^&1DcX*evqRLXbX+?%W!?{-4P{Gw_6OQ{2>jdQhjl3mM(OihNFN+HC__ zPk}`hhHR>XkE2n!XxI^P%g_O?;HIXu0_Q7W4(@M-Unv-7f|}0=n=#T4RCKs0<{)U- z3$Nd6YUeh#fpCC~3Sv2mHfjR1;&&o3*1bgJYFYb->XY4gY=We113B1QjU@^Xb&f%0 zsBMTtth|6NQh!+Ac zMrW_&dQUX7N9`|cs)`5Co@@w>4AXZY z8Y|01d$u92#g4v`X6UlAaMW||@Q#}4TG{we%#U~&*~`b}=%gzO%V3DWwY$dUQq8<7 zm6USxL!M!O1PAM4D=PQnt?TLMh9H)qj!1X&@NiZ;yIKU&$ZLc%_xB-TmXrlM8Aj(4 zu8U))oof6H(mqUSE*`AibRZo~Bt+v`7t6)Y3Av{V%2Crn`?!3I+e9k?bX?CWZ!8=n zHRJM~SmIkaO^=r%5W#s%+Kw}RaPEAKDqJ^;f!pTyiGhm=rOPot5Cu+SeVML-h+D)~ z4wm9RMnlc7aih<@EuhTu;%VJ2!G%HYeTCSwxFtx>Ohbdg|0z8S%;t7(ttjS*e(y8} zph&%U%NySz0_QEY7kTa#enD(a%`AqQ$^QE5K{Bj6~Lp`Os_c-o&GPf zf*79Sjhc1Jf9{F105%AB*11|%#Rclr*!fvzQ+A$q`zq7=EZcW5GjNMk(dL%>J9Wn-~N|tsWyGjM}Q{Z?@!d@&A&Xx6KkHa)fo=f4trD8lKPW?UfLruZE*uC>Z zT+g7@?XL@I?2HS_;cfrjjoo}Mt+kcY@+2%Q<}|Str~)Gx!aPo@th|B-Y^A}TXj9jV&VM@=uT;=@*w`A zlMz)AFAm1?8%+#jsJl5ZJ6~?bH?95Ve)aGdf&BZ%Nf^&?A@}+!^}Ri**e?MFA9|wb zX9~67u2$*%pedj+XkC<`R*`fkWeav5GLwjwIbdvwd=XE?TTe%_fyKb>` zFHBZ``yrXJNJIKp-32|P*mmkR^qZOAbZMU9B>z=r%h7MpCjGM3CqvNp$E1>f!c7PZKr%X4HOjXPShDb)XsKV$1FQjyURq>6w^@F zI;S6@Ms3XqX=>)V@3NispesvjiFTHldeWs8g@tBTK9^bN_vZdcJ2JH{>;d6m#wfKwxHGiCf(rV4?$m@a;I7(WFg(!ow}f~qQ0q9UHrJqXKIlJ*>O zRGG^)JcR1Zi~ z4*6*|9L)7C&)em}hujI(}bQxxKwO|pV+ti0*Z z3;lvd;PO~rTtYF&#Qh5GhFO##aYDqZ8xe;dNa)8GK2bvr2fA98$F3ehW|DhqSQbO3RX2>>HtS=M;gdCTL-Kt%;PP?})lV zEEZ)arLqW6Yy`(}8f~u7o&!WR5M`#0^ z3gSnXxGM#Tdl@j^nSGPc3thze7H@e+K|ECeQ@;p=%@E<=xcfW1cRNV>wfV5wndm+5 zCTVY;1h5m)!|5_G0FAkq2y31~@a~jQ+!^p#1>kT30&81^5e>6Mimm|k5LatPphQ^Y`;NEOr%D+0^>Aj91HW2&H;QY^7n z6}O9rO}n@*b_G`LV}ORT6>dJ@99CxZBt!T9;;FU_ML|=BK>+C9(JtNlt-!IO>0jON79Os^{t5>EC1< zT^%R~uMd53;K)`8LRyv6M%T}y+Bi1Wttck-TWST#i#RU}4Ck+&fF{DBC7HMuHD!@`;s^-3hzW)c&OyTl;dD)N3u|yfKV_sud1X7K z3pmg1lG}(50W_G+4&bwAq%?FWO0Q2EwK4&k0S|Y%>esHHmi#@T1uSt>V*ZO4QCY;kPo`o6Mr~<@I_*nq z`BNPKpsY-?V>UtC^eupZk3ChnFYX-E*P$H_>mko9WglEs${IVQR*7p1DSw+b2jUl{ zH;oo9^8*9s<7e2N40>MYA&T!p-V|gIiS$4hQ-a}~e(-Il0?a>3$6NrJX?YP9w+Tap zW6tPX-$A_Vc$_ilO9?8YuuR;&8&^j@Y<14e zbTv9IRs6a|CiL!P`ibc9NI%lnn&0XIKrk{!tNm$~w?FKyiP>SW8y04P_x&si|GUhfBT08FC0-}}I73HIHJP77k>>>-N<>|KxQrQ#64MIqa~(K)CCy-ud@J6QM7wn$%Nk#!+mO5=GQ zBr6k|IlHSZS_nlug0!I%(P&Pm1_3F1(g~9+>f^liwbcTJygE_kSrvYekn#tP@$ic# zRKsyagpKb2;1~Qkd*Lv%ZhQoz*ib4+u5cX#{g~X-(!9H}fr5?}7R3 z)Qx3Q?3U)z^X@&RSj9t>pj*@jC@H7ln&SkL<`Wf~BKPT9yrakt}=inIx5$08MWA^Btt zyX6M)p5pq7qnTuJ{Fh#}0GpEvo9H;Y+*k4jiYf85>;-H4mYxVEWsVodkHx|Yo&6sRn zSsa@R9Jk2eo(tx*8X@4kxwe{P_f5*z_=qC|3 zLeptLFYd;LA6)8H3lp9@w37wh5^*RFhktnZ#-#la?Q3fV!xao-c?-aUnejPaRG98^ z{k_Rx@{f5%+^4ct{&%a@dQgV_2g@~XyVkei3n+Kww5_!1uVV6Em&X9(r? z-v@PfRqy@tr}2NtJ7>Ftnk@wjBn&o-Po{3scA2$>nBEsFr)SsiPwJyK;-37t8A=YM z$f0~L60}fH`&qO<@a^C9qr4V2S2(&(@TPGP4E4OWq)ZpfYwgMjPz^hytT*jg}CJO3-UFA#@K=eY}-5ntz{p};c6jq*SR`;w8>r#RU@n52kO z1HRA7n-V_gj{bSCC3A`ci6mpcSRt7=$ucWr-Q~@ui|8FL`cS8;hcAV~;G$7LQrN@V z^r~_k#uA}LZfc@sWNqklpf@^pUF2Pi#Y2%P#^}L1VZ2zI#r!$bcmXvo`UW6 zJ^@#mpe+Q2)5KmL7kod{mGc&;%ooc)xOh5vkmKNDXF@^g{*|Fs_fg~BJOOX_Yx{^6 z(SA37EV^N6%UCirliJTawn18Gl?vTQ#&$U!j7VaOn&*rnFx3t zJ5l94E@I*s^*oLPjoH0#XG632tsPu>wzq~;mKg-{LcoX~#D`nK;~{hReuh?CUxIxH z3|Z*8bEXef@alQEw<#*?cPsBg9y-c`IivNpn(xj)sF&I*6gX zExVsoWFdx$A2vxYMTJsRFrNC)H5C@3ZH}^0@}Dc3gYOTdViRx)9o%$q$G!X)I@lEH z<#TnkOIua%Qc-k`olQcyZ)-fOnPkGS9b!`z$BPC!|Jyng% z9{*9^3eh=_FJ^$M`I9uB%ytr5m_9*I$7CloeW*64s3p$+Q6fWM)x|gyROBYRWV{MF z;RO|py?fCwCQ;^wKw5V<+#8xSIm6p;h6xh_Fan6dmq{2%)`YJRlh7IR|8rjht*sWv zYUDF*=-3^VM{UDMmkuZzjbk+&egNQi^zQf2%uD1EoK&pXL-vMj!fE|sG{)7wQ4dlH z$n95%U3gY_3TXc+N`S`m+{5jKO%M2tpb&X(luB` z4@J~W&j63$+@1u4{i1ig=9vrn2MP@y1Ym$M&Vq|xS&z2Y5P_%ye@JSV~f zzFX4vZrDgE72Nd?*;@X0yBStzc2yamyinn&X4Y-pKsmsAmAiYc`DA*GArE(=BaPOj zH`-JHu~%DrU}QM|*jXXQ^O}D$Se`!^eOs1?hmc)oB7{Z_wvi6|tlb7A;0bIn-1b>i z5^-bMysW)TA=X%OHefg^zOKy*>-TW8GLxvHUf@V8#%FVacL+Eo3{kGpi69f?in7Zx zxYNTNfz@cJazf(e2PQwBqksHMcd?eMkO_sHZ9w3bWvU;yKP^1vRAQ-T{fCI)f^qxL zQL|&S-Iss5#In4>X?XdM)fUjI6mfT>!

    CS%WFdPYBuG*73S)Sy?K}ZlfzQtIJ0c zKiQlxAeSus-);2JnH$SZ+FX<_hD%iQ;H;tNQp7U~oM)Dw>DG0>KCp^77_L;ulT%;r z96)lBgUxZJcOf!}wXAJ>L);ElC+mw2lcQ`gso}9XRQs0J+Z4Z~va9S&ga8c_p+a!Q zUBac+$71-Gmq+(}=e9xCGZyr}r^|BeBc*W(V8{>YgyoT#~^(1TLK)Pd{ z2sZT;@v1{&MNCkMytliF1Z(P=ATqTxAe&@Sx3-5AOO`}f+}#iO%P5wVa*0O=E0r_DJCM*S3k<@5w2(n0kb%$=&{u{r`qz&*14WV^cEKQ6&e+WA%8?tR31AOjXmr5Tgto8wJU&7*fp zRzdIdC=CXTtDXqWyfc2{h)<;F3HxzokQO6p=PHf6zPuEhU$M#?C~mUXrd*?AyU;=; zt$yF6h;)5$Bp@Eu$q&C3Ftybe9)0#6AG~sRcGSHG^E%gW@XTzm_G8@o>%cq*$*?Xe zZ_g=kA1X62)th5AW4dzbUz1&Jbug63s(P;V;LHyM(-UA`Ws@dZ8x*NLBJ%7Z8EkKj zCH1k_ogXfG9xqIzu$zNj%1kgMTnlM``lLTBWdRG>ObmxK06&G=X zZG{M+Uo&g%(yr2G6~YxsP-E=`2GH#}$w@6mLsB=CUsVr6L5(LGo=oA5l?|AgV{gL& zETCAQhyq%fE5k5Rb9mpN89^Y3R@3iJ)eoyz`ZzetQSy8=adn0uh`_|x9Jfr z7Wfh@!cS@*{xgjtT@xX+AvKU>g&$c4VX@P^%9FlB0&IXw@}_HsB1EP3hsOD^hJq4! z)W(z}UnU=2`d(QN0d4Ix3q@$uaQF&<!amtHHN_5i}o0c1QZ-GxTfbe`XN zs4xJWh2%xZwQUWEiN`?w= ztO(m#U3_1~?i?2n%eBsh6TnV9*bX+f0-EhhPPwmd7@!G&Ezw%L*wbJ`F#Sr zDB+Vp+9pFt_gI;~FJ;xChNS)HIokXYiGxK~7ByM*qqWw4<9PX5FK*mqNghY{#g_Pt z3xQwLFK0hRH|GUblDtb#`_17L8}7?)U)IMrgblKubT@mZPyk}Jat<4Q)Gxf z1sH;5o2s@;#drBi$lB8V$B_CAd(}+{YGct1wD*>ultr<)aWzEb&Elv>?pL0(?Do!O zl4qQqyb0zd3J=WFka!0tZkyM>72V_5CVc7%u7IO6GHXE)oh6nrJltfZDl6E z6*dPSfB9ACOW^+JqdWfHZW9hi$?`n`-Fg&{>*?QCM7VuGMaY?hevfgy*5Ks-vPlK| zD!JB}OV;OVzj61(vK*0LjIJ^(-yQ+%An%Yx?p+Wu6jxM*R)uk*Bgp>P3??t=sfS#e z?U+pMu>m}=r6F=`LgMV4zbWrXyyaN&BZ`eN5+xWi@q4yH4MC$mO%ulnTjp@DA8pxb^4gyeWvNzC zkFYUoKNdD~)`um^o^68y*2#M(M?U%DRj0o!pXOx;^ZzR8pm0)||IZ{!F&arWdr00g z8aetqKjX#pKJuaNoUT~oWD?$55e>RipV7G_QkGSC!{b4DX-_;=S@}I%OcAFyfS883 zGha$gsYLp!JI-y@H16|Eq{nw1vxl16wemeFwM9W#CLh{+1keF%&M$lgC|p`)f*DO~ z3U@87j{#72L~4)LoaE9TMvC{_AQQhu^d!y;i{_cPX+}yx_DJV#Y>OD{+uEx-VPT|C zt+#&GNhDBu_A=c|1c){gG}rJv!DSC;T+_b8Urn#PsUE=$V3?G5Yp|2k7{tOVon48v zpd<9HFSacqDJRnl^x4<}(2x81!n3``k}bqW&sU5wbv{tEbg4|uB;OEc2EYD1=IG5y zexdm8%R2&0N}*YV5h9N-@QRDsg~LPnZZxLoD8SBV;)8a7Y!Vr&h`y_0C^v0;*=VGm zGG$zi38{Q{o^VT>_<;6;y}-7IiSnRl8sXBVp4W6}$dCKw0|C=%9MT3MgYc^44w(@p z3w?(4J!+G$ov_d;&B<^yMUi8Idm;v_1k zZ!86)TYDT?*LFtg(aA?T;8#eu2oF=(Lk+}!v{-uzNZ8>bCRrGw5-_)oPU9xmhR8<_ z$fAL)P%E!@m0kyW41eZx4dFF}R<3`deuhnNxA@(U$Iucaj6AsWy*6KjYHuyw$dKfN zbRzId8i)t?#Ngz0ee?m+vj-3x_v+nY(j*nubO;BEaAPboQQ0#!!awNxaF_3Y!I_$= zVvD1Z; z^z0VR#AJ=j?)SKbpbdQ9B5+LnZjgQ*MCMFerukx0THAX40{pIjTCv2VzqBUUzoV;x z)(M)Rgm|VmIKo4Ao;JVcuJEZ8Tka3!ZlMUA)%BDrNnqS>A9(Hab^q0Uei6Ro;xalz zcKE$kX*~{ouC@Vwc9JY*W+)C#(+B)o*;WlzoBLv-tfTJRGMVHNb%OFZ zQhKT*%^~fYewR~Z#G~qi#BmhkC?&+VU1U46R;FgPMm+^(=7-1-U1?1?%c5=r?^f-t zB`-!~@wBgbUNPh^Rfc?l!{L*0v9sGS*3x*Q-BoEO7QWNiRICZy6L<2d=q5-;E3NlH zhVyUbW%tjKeNt~#VI6XI^^^_-3DEO>l-*I*6v_zTk5z+j?+0}_eW=Oi4pELNcaK1S zaHi4YTv;e%J6T=jGiuH_`r)zj|92+EKcmE&=!|&CZ?vooUT^R>D|bBSpb6t}=pLNG zeI}sKpPp1YNSaL}k#Vv1veq!0xKJh)n$fEK%i-6Q0XIpNkLK)~-$6v-)$>nS+11S+ z*RcL&c)MH0!!Cp62Hf}}j^}=L#U@WWY&%?ToOR*je9P_-oo+_aW_0}5d!lji?AtUU zkMDyA`M(zuv7ptrB`IXuhYmsSlKLy@{()f$<=vpa9B=BQtmCt|>&Q|{v8ec2(A;p* z#Gb}Bqw{xy=GQ+r&lqj|?V3+F-&D;XD|4M1d{q8z(6|i}P?y)4^C-HuC*Ot#d~!8! zHxR(!e`r4-i@HjfUgZ(bU})qfqZ2}HW(}{~a^21xC*n(3Y0fKr2J6EFb1rL|25Dvd zpOHBHI$ocwiZT;eZXImNciJpUoVYg1t4sjhmEm=x?L{OHAzHdiUrThG;N9U?4?`paXz-m5mn8>O&XQ`M_ig;nl+z@b{>*iDaXNdr8GDyG zWEbG%zUr((8`!*Ya@kcMWd&e* z?Ns94w9-hf5fuM_11TyDj}iZj7cXWh!I?Q1QuIh^v2OHc2-1SIy$s>IXqaMcwJ=}p*h(#UPq1OQ2zF~3^<&T3(PDa zSmW@jJ|ldj!FyO4eB2~7I?*nU9I*Rl`nzxn(;zh%9{x-#M26c@rczhBMdJEgeuKp_ zBHv3au8U(+ewMgMWov!GIdc|r{lb@%M4IQ7!zrAU3L@)!q@C+@fbbBF(_@K~pSYPx zC>?qB{bJOcW37!u<{NG+rv&Crf46;zyUWX`kJ~Dz8QX2YOUD!6FMykN5k=c zsBW?EK*-fYIkjJ$Rsxs|h*`LfA1&KSa88RV8bTBqghD}=Fqp}D< zpu3?`o4%^#QZdkOhfj+Xz(vCvUZ4mcylmsuGBah-Y#5%B`LcjlV^RHdA^>*bXyP`! zsQ6saDkM3#h=AVMJZd<1+&|9#Ufb_7cE#Qm29tk^l-xZ{HCCSo!n$zKOD>Op_Up6@U zIsE0=SXn^p@@*PF0V}aU`6k}+1mP8ZR=7MTN$ry<4WCs@~agdBk5iO|rZ&*y*S5_|Z7bPK6Ms1A7rtSru z%7w=(M|gZXp_98Ii^;kwAH}Vu>}Ju$lVJ2Y(i?bpkDV)#vays5Xv=6s(%cqrcQvel zTp~9NrVgb&_m_jiLL4eDmB5aeZ2%Tq_mGV@zQFiTP z(o*~1$rI|R2L(q(_ai4ZPYjjNhw6IfX>bYI6pLRr(#WL@( z=l7hBilBPWmu{LiI$y}O6szxkOB4DmK)rH-N~U_sW}K>Njl|+Cjj}{HzODG(zgG-o zo0Ak0R~7C4J@6_1r!&KnL$Z7+#^hx8l%6haW&~F`+v%`s)pK6o8@J?nx{mh{aDJFP z?_2I4biX3($rnZ%u6MV`jWhFZ1W%3hb1F!x<3m|qwQ#!miE4=|M0vZL)j}YV6i-Ok ze-{k(EKY6Q#EsAr+`!5IA-X6TW`77RCn*C9;JvJ9?ns;aXDDd)|IYgAWi=<&S@nT; zO1gJxm#*9py4y${`7D z{6-ls{e$-SDKZ*uXawvCG%h|sjg=%L$%!*DfcKX3;dd(cN$dYPDb(Fq2I4(su*;^w z#r!I9TIc}zXVc0ZW9YwfX1i=KS#-#S!wf(W>Il*@o!GR;ys#claw<0Ni#(eybxi>z z(@+hCbwOyY)|)^2c?#MGlz9#vSjE2<=9!a@`(cyj?%pYU2K-^iM6?e6SN@XG`&#vR z;<(_BHj3ux`*bZ~Kch7Wt6!L)rm(%NM)ecC_^kBAE%FiinbU;L4?AbHaBP)pB(Adw z_@Naf{Oxv$Jq4^_>-QqV0B$k&+Y8_@t9^$#Avukg;WZtVsrQhqd zg$g*byh+RfSaN>(*`%XjOeC%vq(7+f7AefN!Q{lZnKy-#%&RNCzw^A_L_&F~?y&6& zc5XAVN6<_g$v^nty5@zq7;6yl(AEHn=_gdl}Ij#nV*yR z*@AW~56YIdvHwT**@ zo}KOCd9Zfl*R!oPOHG?{f!tV0O6m4kg8-hxI6t;t?yXNXWqHEzn<@u?N^{H9jhTd&6x9SX=)7hf)cg;d$!r`a2aAziU}EA1HObJwH&% zqcvmN<7t#?C~M+^Rq8`-1~j{Oh7FY2+#0KJ@+rtL0qSBHA+UHpRt^eALbrOjBJyKR zb{qOmfvqHHC~XFQucG;1dB64hAU)oNN9#TfQ|gS+!OD%XdraLtdw`j<&?lw)NicOn zZjQi`nR_*0^&#mFTQ8&}6zBVvMfz>ZE>8UpJsbmy_HZ=vNv3|ArxYd4$*P+`IBQ}G zYsBuNB%=i183k%pL3S~;k&gm~W3vABFKQp~oYbF&#C{>4#>e67B|ePWloz)?GEcaeYrsRXhrfnnpCWNoH!{b3qdmL2zVVYewjv^O*F&gqDzN_ zMyfU|?)(Z@9snkyFx;Qypwctql6n|%xDiWZLTr%Vn;wox9YNwXdTl40l-e{1iv%Kl zm$qPG+>Urk>ANP_bCMsd_YW&T4v^p1)j`=bEG^2z!J9Hd%2TxZduZ8J)RIEqX44)t z%9XpWslp?W2)9Bw33vm|m!0zE>REaEBeL=gkWNl2_(`uVU5pHH9+_FVcfgAj|DwDN z)U^{x0944m#Rt}4$TxAZue2iw8&_knf3-NI=tGtJ-q}-wcpvmYd6}<3Tnf~=+RBFV zAnbX>j1l~ezYsoz9JOo<2($!=z17?%*dB|Acq8ywSfi5`LD53J;tJgto+Gb~@f1Xh zS;|MEO(*a^XZIdli7u3Jo((p}3k|~5xs52}LD7NV{R;0}BQy^h)cS z*!JWiTK8F_Vn7E@E3_1{013fJM}_P|@s6pCr!=qcLY&je*xagKXZYquh6hAro=Yfi z>!9sb#^D2P9`aa*niP`5gj6F;-IJ)CLMJ~I?N!b^YU;)i6407v65AJr2`cHP-8B|N z7Ft<(M|Al28)ak|p06y^CZvX6RTBDVsWkp+8tupciY|pJN~P5@!3cJ7mQhAV6GI#m z%t;M^A6FuPfU|9?8B`4GA;GPWeW^>Yx*_w2H5?hitB)ZG=1-z0A8E$a*ay<3SaDUG z5_D?p6qXcg_rn9RR%7#d3yPz2psRx_Q=yYkmVEhbhob#iy0dGCV4CYokx>R3KryJ^ z`rGfYvw~BTyX9>ft0SL=&EjiXZ|TO^ZR0NOXt?~V#|&{9lMhXlmm!00B&+1tiT;xz z@gHorvhU%@J*M%FR}HMMk`(Po73oitn7YP%GxbE$dEfXicd%Gq1!!t-J4cu=$k+ck zKjI@<-Xql~%bA;~>P~rA&DsG;hIq&!swcIV8%j<5$4)o@TF)MeH|K&vKlMlc_v%Ts z1O4q*eqVAY?M$B0c@!7+3+?eSi3jgOmUIVnMs?FhvbzQ2(>LP+8w)k(XmiKi)gjd4 z;R=#L7YwE%D?u>aQ9$>lG&t4^s!g}>h;p_$2yO)XzK&#csTRj?iDP?~Xkb0vf^Cqx zcp{6;VTa6eAT<7%(Q$dyo8PCF9aS{g_!uc%;$)SvdpFABHod_wv18w$Rok;q`PW}S zzVW_h3E*_f0=|V7%7J5p>EQuEzlk~mW0c@)67Q4;-r0Z~JQd|JN-fe{UPwHKgxJCr z`cglUcjIpkeRErH6`JiSW~FgE)=M{KAmW+k;AZd_w+5{<)ASb37^pah0vTsdmVZY| z7$9`*UZJF)>mHv4w;i^HQ>K{hgQtkQO@OYzDWWxhvbxzvlPV@KgePybMr+No+p2p7 z^V2BIlFVQAN_mvag6ADlNf3_krzvD`Y;;|}VFMOuPKp1hb-%CfzF$^$;441Kwc(NE zu|%E?{Snm1J{(;O=w?&iwu{PDdi_EdfsYmr+_Wrt?}sG_vB2EE)A39<0-%(uLp%kX zcM#dQTi)MQXIY<>YtyKlgAjWp8=%jP-PP&KLa-2wk5jzR21Q@oO9pd5iE=(TU=>!yxlDaHk<=9kLSq0Ejag}yKJP@4)`gf(_|CF`!F$^92UR57*cCSGCq zQi*e>U?id${9CGiZw#^M;a0OL_Xa0~UR2glufn8iuF#if04tSP)ve-Fn8gwP zW3%U;d0K~~HOStB2!GE&>R1*0ZYjlGS`+vI8OMvxrCtT@k<`=FXrw~JtTxLEeFYa+D1K2bo|PaG(9a=KmRMc}MF?4FGGvq&QdupC96$FZ=rp;27q~@- zVH=iRZCa1*IODYMNI$s!P2Pj{Y7!r$f2&OB#PZ6sL4;P%V>zi*Cb@Fl-}i}%akIErv=Xx)I9Dsr zVADce1}FNq9!4R0RC=kGjkm`(b!l`h7T|HRdv%HtGhhdNjS;=&=%*uQcQcba5$?%U zL)5*ph|*GZ5jZ4&eP<$OwyEJ85FL&h0;HUG281`Arp^L~>C36Ats&H62?P!T8msCM zEVdvUrBrq6-(rsr<1=_sY$pM_?$Yf+m=9>YQuy1@O5r`y(jurjW$$vc=h7ntqrjTj zqRPh+;2Zp(Jg?K?jYD{A8C6r}UjXd|$`&ROi&Y&HE#$$@lnBDJ7YnF5orLPO9=1D( z47FjE+sb%#+q?yw5$Z+T4bM0GyHxK7?O(60JT1N+wCI5kob4vSS7vNdDG>1-P~6V- zOVA3Md5}kXt>lV*Z5P6?Avt}L>r1m=6}cZ}(v%@oK&LBQS*8mGoe``y_Uz;wKmk`O zt*0yc2vr*(aiOFf$`5|R7mdRf1eZyx^KI?^@S5Q9Ov$Vbp1nIc4vft&jlLYttQ8>( zve3%CF%A!Z?l-(LI#Puley>ei_Cf6a9z7bUkn>5m25)TeAii!CG`EQN|8Yg&{>R?q zYl$Fv+&M5nKUJq1+*2#0)}ALZm+AY-zrW%7ORok~L=qBmfu*WLX>JGdJP94!>4 zZhnoge^m&{kNJd3(}MB?$LdBRXokfC}zjZ4~`NL5D9H$ZuW(CH_@n*Y;6`8Tqpn5?M6w0>ULh zNG|Ry@7>Kh)ocxw$>FX=Rv=sDoA(Ekiw$#Fr8X$tm5B??ahD)MIH%X^|3iMhTQ+6Y zzFA9n^8@tq;fl@2J}wJX9>Npor(zMG>S=1s4b#Hc_&NO zI0#x`%_Yg>@Ws;8!44F`dP5ivD4IvmTA` zfeZr|mfBT3$)3R;UTZB~Qz6UDYeVI=EV@CfGg420g&cPu4d2qigaItp>iLy$F@&@< zJ>Q1%-XdT#D{Yv^g{I-CAPp+|`hH&Nc2{!^Ve!%7M_bu*SbH5o)B-?+1Qm^`0Bn>M za=RVS@zn4wny8avJ`Z<&5N0E@gjG$%cQ&^wtQj_|$^&aK9Bu^LsW6)9`2%{%xZav& zfyivTdw@`aoA#42un(fmpmgG9=Elifr;P5~z>|CphQUgJRarsBfB{Z%ZodMVV$(~| z8O?hN{lu6M!+LO&Qn`YWn8+olS;A=}9jE;|S(rA5AG_igmjVjw(yFk_ zpnWjiNN5Vv?h+t<94&DZOwJ7f52J73Xz9wtcp`rxvGd4;t!r0qLX8{LbSkvR*Ad4< z`EQ-P@I~6w*fGGcsvO->|HET~x^oD1;ALncRWeU!l4Ii|;7+_sG&HbzXi{>`HCzf! zV5GjRhSOD}nd26OwKN6UqKL@`L@nJ;+l+`XR11PDEmP5dAS4w{j8nT?pkVYCP0&W; zf?J>c;(+X^3F42OcjxGKBw_b-wFwrV`a*eir86qFuM`LKI9kCEVO!JL@0W3LF^as zOgV$zj3n*XW`VsTEmAbIfy6O0?nW|Bru*+swtf5n=jPcQ$cV^R=W*uC7?~6-P=d6y z*h6cBIooiV&XpKJmJEsCFI+#azSopSd9QJ6z^j>>j=6=-5+iM8hTz5Bnm@hXy_4g4 z6b;yy#pmeC;^M?*U46?lrNWtDiuYeG?`)xZKG;|uevc!sAm0TlwZ}7~|HmIK;)vdj zoX1X&_2N$L=4r_d5_RQLWm>!eDW%}H|gI$<-`+{B|L+B%T9jmT(N z))d2Ph<^gzI@ytxC*DW&wsAHJZ;ElwPbjcA2?Gxstez(;nw`b2u%u6CdA%)~?CvWI zsN6^xH}gnAcdiCfvw==(KELn4IwSpUP&+dNr!(qBya3gPC5=JhwQ*iz<(9}GT4 zJ&F0Jg<1@I@`etjPU1YUZKt(9Dcyy*ugGsho?%>UvEiJM_R*Eg2-jX*=x`y(EI6_M zW*!v-GQ`sC*)D%U2bLn<53@c5bY^zg|a^!R-)P8qwClIi!Na+u*<`Woe2RM2|AtU&k=PmclM)-+Mnz+)YrR?RjHA5JEsRHkv=1m5V zWks8|alDG0$UN0I@{2;|{zH*#3c!L57`F;+L*n#C;wcL{1WbVGYsrj@&_(%YM$C9) z0*XSw!*umC;+RXV!&v>@CzAM}k}O4Z*}Y=Xq@AEETA&$sY4wwsU+VW3+*d7`6A12= zCMu+Sl{7;fc8=_a|JaLL!CK>lDFwHLe$lhEwiatT@71LWlCqQksE9kPNJRXs9%7LUTli1p>?U8L=%sOl`0Vz#2TXL(e1XX7GIz%ow z)hx_-QWbXml3jYTXSz0}G=}(-krr+mtyqUig?C0+jrQ6Is>#+k87g>rWzwrEM?E+( zWZ1Mh*6oAG8K~et%LC1BWtYC>ESmcVWy$=g$7{)_yVzo6Vw)e{SG^bYXXnuD8=oyX zrTdM?dX|5S1ROuQpY5SPey+n{y`$p3wso^=BQG6YA^9W|BSAWq#iNb9_^cz6LhHVx zT96gD{!l?@;HN1*_@AHi!D+gG?0B^j+vJB0I{kv4L8t`9D31!tKAy@{^Qq1pcTMGf zaPW|XW8)70dg!uFkKr$g@-KJm32ypz9XTd!dk(=4clk5nNz16H(?Q-EsECTP@#;l*Zhl}vK>e~ivpPjo34;amZ)9*$ zOSPcQ5iY~jyic+|rQ5`(I4cl2e}i}o4=K9~6nd?qpGmH+7_imLLKAaG`tY%}t}Z`! zU|nOg&w>Xi@5iOGy}t+RcUy3euqG|J!QF>KDtDk;6tJv6nVicsGoVl}wPfxM2{`1! z=D|bEvHh;1BVyM}KMa)WdoYQ$?%xO2%88`V#%itLh(ejAuD=GL1AdTLwQfO&$g61 z+N-Mu+);tt{hGIo-eBLH`z;z5T;%&}w@d_Hh%zOtqhe6R%ge{-`bJ0ACA*0JL*QDn z@J6nI@-#EZ)Z{<+KSwguCp^!QRCd_@re)6qE@`hc03e4=aDm%@?`Dr0-d8PoAj+5< z;u!_ARv2UQ!`?P9e=s|~^d&8AHeOF69kb#}Zq{0F(V!||@fwl1~$a`FUrMaiQtqzJF4o^)w3Zo9Y#Gik`)gUb}dXxT?} z6`K}di>>RMdZHdMT*U&Y*I9SgNXwLNu4SrT7Gm2o3C?s4$9BJh!0Mg=T z_ZJUe@L7qo91Plv=V>t2;G}fv`W;kEcMW(a(IY014`S?EvC1u`jY)0}U-1R~(2IsZ zBxT9~GkFwl5{pc@&rU)PDB2cJfl9Mbv}E*g+!xO&gjsiDGI@#(FW{RRp|a}&+;u%`+`!~MyISvuH=>>+x=Z78sW-YJeVw^q>NgCJ(4#_gHy-XCs%FQK-y z$y-Y~+c*=b0KD$|#ob}RLXPqcL7#PML-8Q7tikRSUFA&`k|_kWt;enpX0vfQjIZ)u zP+Xdn{`aNiOlAOz0`(+K<`-u7YZxcI@~P^fCSS@;f611M0=BqmxT?Q~h%@6Ft-6?p zU$AW0lCn@6r)oZLaf*zFH*p##gIGn1Xf41B^-SU0^vL^`cMdAfdsMQ-J4Fmco7l>A zQ0?EptWVIX;?}a5XfV=z3($PIr2M#_*`- zZSq3i z^fH--R{L18@b%HDIlsrR{zd!+H;5q2#pZc=s`lelyG0A<;v%0vhz<)duJ!V(Qt~+5!K+J=e@exBPKo!1r|rrWFgl36?q^PI@6y~M{fleYn@$C zka!wTI3vh{%*)_fq5Gvm7U)W}kStgZUFS>%4?O-B)@rCz)KKZ3u9SwS0$@|Jz`!W3 zS()A{-6Y2>CZv&6aX?gGi@+*^hC)r#r+W6N1s3@TdN3{)_@s($nE*rt_n~}&;N;kM zBAY;j-r}@b&^{IXgGv6%PG6ZEu5f7)c-y77G7G(Wd~GnLT(8r3!uLDMqCYa1&T_Xl z^1z^!-ftFRKPM6T7#aa%xVg01kltWNpO=1~&z`x(Ckx8qTI2FU6~VpKwV;H-;^t%O zyw7t?F{EWg^Qm^~L-Ur#L3+>q1N8eq_W@-Tw{g0^%q*ZbV=ch4d1pz6D>4+_wMouM ztj&tdV|M!c2er3KjMODESMSP+*Iij3Xro!P@x&xLYrCseKYscZ9K%Vrw`C`SZ0Dag z{}zEk$~7Z1By%0QRx0-fam?y_UZ9H#)yN0%F%hZYMQ>B^!RGf`ANJyHbup9OQ^i4d zRGCp7&Oz+}zgImoggmLpT;Fp@h!sSY+}k!+k>SEO(yl^2#IY@x#xsI+auqWkV$pX!DW%s@Li!CS zb>(`neT}ICnqU+wOODrR82Y-!@i~4r(#6;U;9i!;GjVMP$5UP_ zBJ+k{bvjI94=3n<0QVolySiQGON^5ic!fiHu#T`F9k#OhAgy=~zFVz^G650~W}|7+ zND9|-vI0Q`zNyySHk|~5>NS`s$E%fhU%XQc=t596&N4baEgX)+1N6hAIIKpQJ(;@4*OkC&EBD84fxJ9hY6g~9e-U)_~ z862WaS!B9%(P0o*HpAdq(F-bHq=0{(ux4g(C{|$E?$j!zO~BIJd+*ecxgK{gVs%Dw zmANzLJEPA$yr6W?*bTCR!i}Dt?b?=1C)lxxQYs=kgj5*Lcpjz?YIG9vqOIzL(P?u{ zv}bIFx)>7(SN^`DvnQ1&Xr-r7`d#Ap)&zUidY&V%ehiFa$_)x_$;o(&v*54yip#sv zCXTXxC=I_X5Qf`^6M{cs@Ptgt;?elXZrua!@-IuGYI4i251UVu$j7VQ1GqmW{#VOq zPDq`oAb;xf+Iq0rP;`DX@8?4CDa3535s_Wv=>A=+6o-#iTzV`#_pozrYqS`X!~s^W z?Mga}zcZHFj&+lgb7^}~H3ZY5MmA?vkLeHL> zpNq@ep}ME_PSu!C2&u>>_0VbOtoL=f5-JDFPV%6P9=`se=32u(2PY#z>uPkK zyYy|RWM=B*Sf{5r0DfgZozol&QO4)m_`TlRe53PVh9xWC+w<^o=@tXLqa-dHQlFOv zXb&$aQ-JL>|I|Fo;NJP%x>fspp7Z&FRxGZw9G*|vnEEWC0FS#^(Wyx|M-(qA5LgC6T#%_Upqoq749yN8Q(EbI+%i`HCPyoWVw+ z@L2}fkCuwnRoqDB!aTb}^+Jh987x~`Sa8#lTT`?_^jxgk=EqBWH{BO~g zUMzIdZS}Q^Xp_NWrlH+0XSY^_w;9O{5`$jFQ%0l^`BHgJTtt_!Caw2NyJ23K!=eRy z#x)USMJW*Lv{AKadgE=<+*D8`{t8i8nw}~%7 z2&k>x7@~un4nxS?Hf_D&1phYyCJo_Wc~l-+2Qq!^-G<&l9B(?bc(dmL$JJ>;)Hm&pol<6jeZgYe7ue55ufU6~}D1{nuC4X@A0JWeN}#|dr;4jPjO_d*(J=3mJ4 z<;LBcGRu0%gA>fuCmaoy0*;4}uuU?&=`w5tUN?9irXr`Qgt4ne{N;D-!O9NRdpglh zAOA;3+bqd--g3#Jul#m@`Fq4_*H+>1n*XYOyyl!ugVpvy(FLEIKMW5%I?jq!P1J3f zpbKWdR(*2A&E@7H*VgxnN7rS4uAb4)`tN#=?fr*AIBzF@6%5DJz6NdEwRH z-p6u7U6J@-f}NMDw~XN3=eoL`rTtbkPyYF_<(bWz%(W11H1d2ZH^v=3lgESO;@HQW zY@Rq`I59t_u7SfqWnZ^1Ua+H~<=v6LYf)e@)NVModej;k=Z2%j?GRiY zG|}qs=zPu&iIG^L-_tM6@CYcSdUNa&?nQ0h*(0$w!ChArp+MV5XQeWy;GfJ?DxS*p z9rR4vos!5-)(md)RWl}MZKM+Q?T0WG9=`-_WGLXg=z+lej-Ykn*}8T_XD~V0;_#Ty z*#fpVkQa4mM5}+4rSnsNp9^vI@cC_x1_XMqwt@Xzj1`y;AscI#<=OGbWpP+yojWVo zM1fvz!7o|IA^PZ_HAAPF2!-)i+rShpy(@C?V1-502xM4pGuWNDp zN|YiF>2xUDokiw#PMy3E@rO$pq4aN64*4xZisA}MmXHUBG>U-L8kyyGeyLzE`C+4j zirO={)T+s;_SPS46pmuk+D=mX=;!up2B9bPK*rmuY18%$#tyZ*{fWxq&rxDp{f@5W zm;O?;JEZ1HlCulHEbo?gMznPdn8BXwyX2jVX+EluG4K;T+@qg1g35X6Z?r!YPhq}` z2+vy>HMExRz2M?lip1Uf;n~0-{Ue?5%~9`hxx{1ZZ(&;u_aBx0ykK0Dq6IJWuCiXV zII-pn?+bAeT0r`;qOJb2g6WeJnj>>*6uR^`V!N+fuG9hmpqBSg?w7bE3$ORSkjztmHQ%35LlUEyP>EmgH}m)xLQ$+8v*_p@o~T~{&G*!A^s(v!Tni6 z%ec-=#Tw#a@MSRW-Udat4dXzm2M#cRNsE8JbI=r8>hlJHv(TG?A3YL{v=1S|ioo2Q zG|r+^PKA|-;RjwlGDV95VqCHDr_dE8|ZQdqW5HGix#i6JfXt@Ow)P{vxYhIrM z1;zTzs{)T&t(>!PseQ^o%mdRsh?`8Nr9o6w^o|< z{XPBtx6PUgZ(gtG^KnvM7R(rd92*Lnf{DR64I(#NWjf)K3o>*TfZHTe-hUAqMKK~} z|EAJ~xm?qEiZ;;&FTF>$?y*!d0mrCyO!BJ|VLDt{oSHezdyxF_tKtFku4FIJbHu}- znO>+ZvmW=!ahc24!H*7JC3f*$ARP((9bx=kT#L-l<>_GG5j|vNodGRETf2IG&Gw@VgZssx|fpDB!#%~lO1}sX;=4n|1md@E;;xx=6@%e2MvU($DDvA zf=HvP1j1p60aob;Bp)7iU0!(cDxHSf;IRnS>~;-CI32vHHU;7We;@xf5V-aNV!?5b z3!6SPf%FnP@pOr=Ld=~#=BIcir98Qy=xiY4Ai&j}>^(SX?_>=KpJ?*!1!+>&#J@iG z$%U7B{Wyu6)?T}uJ5q1Lff`Y1(Z7P*lUwj~Xv3PjE%+<#yAQAKz-Mn=_8^hxBcCPL zocT2*xFyV@6%wkBp1f7^y=HOoQ1FGe58rRMo+L=nHE-oNS9iDWJg*gn;7_dbJ$!#5 zY-Hbxx3(*Zj|!MI=YFBvej!XVFjd&q`_sQ;;#5PWKYd=r)i{7~{ob25{=d+y@=9ZS zH(owSxV!^hviRzrbt=vEyI=|$AxOao@%_`^kK^8{4&AHHSsb`il9Y1Zmlym}>YV(s z!{gL_Yn-C{OLt4by&984J;v>i2Pqf+&>vLS?p{Q;B>Qgv_)?+>!)IhoEK>TGo;va0 zle3vYFeR?oa+_!Y;<5;fpUJL8X-*wx*;dkZ4jK8O=jgYJmf2xk!gAvox$4in-8cK1 z|H|(1Va(!0|93ywUeMUQ3XMO*b`zMAs`t$YN!J;?ceU`tu(E7=$n(x0F zcyr3Ca%&^{5N5WaFFz^*v~YEH=!LS6MDe~Z=W#)#Kbwvl=8g4cZbpb0c-T$Y#-`e~ z!NSnF8lQ-n@zyI$t$@=WApl1J+|bP#4f%P0ekA4VtA}r46D~w2|A{O0WhLKQ+jwiF~Rpz=3{y z7?ihT!FOU!iV48>f;ng?STZ4Jg6m2z?#@go=V0{*;R*agJgqX8CH8^xUtyUAvtim>#7~y-Mq4UJGg^E!obBK z4bm=(6-pv`eJynXKtq%#5C(j3k zVc64OK^+b1Q_DjP%IlHx5WS!`44X(TNSVrS-k3vjGA8S=(l6{;fs2OAfc3(y_#=2I z7huHmWG#W?70h}g9wbSS;|gA{kt%osHMy;=N&n11BHh)O{D!Db?ABo8=dU557WIzY zD;auPnwT<;164QAUDP-+8^#M1J*oCaM;2@Pn;jHzo9Kqe7Ph8Ea#ow)v0SaYJDd?I zDmdKFI1760Zl;ZY0~w3*`V1KUn62D$f&w4|&0o-pMLhF{Q!}Rf5~8?=)bA=d1n?LO zYIS=p-oT&uS4NSx7G>{RG=-@V)>jZ?PIhY}JapJ0#uSPskM2@s|zmUAIN9xj_k-Cs`LsL3fd z)^P`am}|Bs9c~-)T_@8O81knH`TS5G2{bS14t;|o;1$HTV^7Hq=2Bci(@e!E0zKLo zIJxbJ` z@59LlLpw4rAxfrI<}=ininLLH(M!Q=2}-sQ1Hc>;6*|FP5SH^P5An9`aLu;(_1&mf z$UjY^gWi}{fKhPxNncR_c@#bZxZF;8zaE@!RJjWdgu8+Pg0~|K^n&LynJgzB&E>ct zkdaaJGGb;t%x|RbOjrI7hv1f=v9ttcwVMZX;hOn(X6oAhsJw68mc=;}UrO!&vQUfP z=dF!hLJ5q=q||*&(Pfz@RBn&0(^>#M*4PGN0;jzW^#lfkb4j_NC5Hn&>O*Itk_8vk z@*!DPW=-_XlL)gBs#fTAK&aX(V12+n0cM5sebelg(kl)-uX^-XqP4-}7yfa}%;dR! z&bv;9%l$uCo;Z-$CBF@dFvqCEryX{M9n`rnd|^@O1!=nJ%L)%S+MPsN_6XOueHD{d zl1#g>o$fld&&{TcT(47<-F#V}xUcG!FUQQh)vvk;&| zbN( z4{d!$>&g3ZJ2i#`aZB!nt^f01DXJ&6{#MAYr09#MM4eKyowY#mzt-D5x9)`&R9F9n zxRDR+OHN$u65e&YO-EZ6SC6X}*#w3^-Pb~IUd27s*}w8ZEJSRr)ZhJ3=^TrBM#LH6^ zE&Hl$#~Fz+2}89)*ADzg%vxl<ADiv83Y<;}0#!QtvfQuuq)4nj16J^f0#fd0&)cKc8+?VqQvD$uZ2ia9tE+ z?%~o#Mg7Cv$O4}KIfMyxXReT%=E!Y5RVGw;&9G&JjuR}6$dQ<}rf1E(pgokt-($KpB9(rbB z7z0a{Z3CGFQT%qfvRGxH(3KQzVE|8tK`Qy^6CrAhF~P?rg#nfWttSa)M6Ev1hd96c zthO^)Z#@O#4@J!nB_wev4qA!nN{k9Wd(~JxR{Xe>=apNwZvv1eK68duK~R$R$&|2$ zFh8iv!UGn|{dR+`2RR4{e=+JPnh20JV_LLA67L)D8EqgTN(@t&hZdpO&N#<3EMW(V z&aI~9$ZsBHp#1CC!;r_jtgxVauI8g}%KiG`@cz%zF30@6fdoc86EamXn%yL7SI92t z*-io7Jl!{2vkCHF@tIbK>w!CPar8jg%-Uw6`AZ3|P-Eg^+E++EgsauFNzD_W*ghS8 zuKYilY2GrH*(pL390Jcmj8{fIf>nam>AG2ObntaLbw_NaZw)oK&{$o+kXJXfVguTg_CB;@%Q1nAAd#ul#Cu5aw?`aJL?T{h{!U_G5NJ- z7Xrx)HUn2?SXMKnw%wASZUL%=b7CbY7(hO!$$(cfNeA-kCI*avtEPw-05Nbc_?sk< zO=X;h8+txyAdm>?cXysoDjAR;+~eHtg+7fLT?DhDmE?Z{Q8GdDp<6i;txE!RiSb*KIt2TlQ-Xm+X_z=ZS~}D5fe&IIuYwX1UpRIjD8K?vkmo6R;&qIt`P$aB%c$o|R+)_#z&f4TC+>v`QB> z<9(U}dP%?fi*j>9B_#5&UV@Nb3BDp6jtUX>KWL@quj8HJook{h_7sSEKt}uJDKI_b z@*_Z{6bbpbn*HkE;ZKkhH&P}beyH)bv+S}>Y}tSIlIl@g-Q0t}41%+&1bJUY}N=)Z`3sEIIVnFo8z zs#EFsx1gTm#;hX^g`(RjAn3IGGHkmw+CHJIA3G~-XF5TkJ!bjS+qFoV&S93fKfmsw zJA4ss81M*V_L9Vp&kZC$OFI11TnB#Ng>|;?GGce1#w>0XEBh88$@)TK$~tXYPjAK+ zN6&LdXli|;%Ch|Fy$5!=8uH7cbuM@F>EM64edmjnQ5XJ@|MTjcbaEiuR1G7|{Dt2G zqTZ@bJV@I3`S<6=GAk7K^pWtM~@~imcFXD14YKhn)ecg;=G@KvB>o?5y*Ze7~7vbu}uV~ zTSlhakJ9>BJyr!R&qX)F!!EY;p674qzqr=pRr{4iRe`*qP}F4Yc428sXNHOOuT{4y z5YHTTd8h9+;{Ajg-Dlq!68uwbcQ=OsGP-_CS;<=ivzB~3c7=KCDDaJ;Du;4*N^QO` ze>W4n&2GY$OpRoO{jUR+|A+Qe{9&htSZ#t6y;-@%}UwyYNdUZ-$ zeFN@eEooY1{mY5K$)PDu%a0m2qS?2k?5^1u_ra5AQc=Ol9u_&DWh z*wSj&Ymo*d3)8FuF;53@mY{?+6pbMBFnC(M8Zy_mHkg(=Ug zTiJZ&E((0A1&J>8P<3KpFbIDfdQLgXUzLg#7+5DnY8wCMhRwT^f+(OtX*Cbg*(K`> z(zT-?-?41b^6x*yWw*`4cHbCQEp(Q|F)MM-dN-)4*@4!@Ogqm-WOgY4C0za{_@TEA z>)SS%WC3esX|9TcoRnVqaBlQF#g!&-f)X{g-l)}S?REn$FRVp;qE8EHr@6}J_;wcf zgw1_m0-k3VWyDodqHJMkhnNF)XK|yV&U9IV5=K+>#D6N-g^-$g8l)0UI6@b*N-u-n zCq$;sXcLIN*fhY;zirjoVL|3bFWz>+FE&2}Zeyx3cPuFGd z$EaaNBNd~Op!FE~Wp9sXckG;!p$5vw#pL%-Yl1K4SK~DNt7W(%|18;i@x`VjAH5L{ z#mK28cLUgQrTjBnfeeZAR!QlA z@Fu!@DZ;P~P7_b6|5`~1ZwG|gDej7{nL(R$!I%@gRmnJtE(+3GJS{6`gW)25&77~b zFe$*9olzQ@BPs^nh-H5>MgIljdv=f<9^nJ$>7v5h@4xaY!{CiF2Lla(Wb zhlBJ(YmUD*9yvn-$~<^!vh0$LpFo*3>eHFUhD@dik6<(hp-oxd)ng8asU4X9@$!5g z%ydpeprKhh6-S0#ZYaz4I}Sc09Bq&W00r<9*&Q8-Kreh;T>Kt@G9`D=74$9yS`DbZ z?B;UsIvxY;N{|hYqQCA-IjAqJ=R`qucMiwU5U$l@0DtKyqK z+y^xPTi_oNZ*=es0<<5GD~5nUVH)i9Z5moU8CMP4bE=LSo~r~D3yGdEb(Dv^2g0>1ep(B(6ALTa07Kz2K+Rg9pOmo{0({Lc-+Tl_W-|lzK$w z&Q>y+wRTu_Orqa$;5>dl{#+OB#ZNm8D_1>~htdBj3ch8aO1gw0trA%6X7ZD+kZv{T$7-~sO_7zqBnzzF0ma5zv{1~_$mFWTZHd_;a3x~HDtUYm|ETA> z&a8KDqdVEaf&RLZH=cS}>B>EP^2TPB(Ex6oEw&nm=`PK^n`?G5I4&5wboqv_?@2Nu zYPNje-rz=xp2j#T?rHE#B#xnfHYf6^$uwBFx&xLhKJA;|-8J5WIZ#4$-&P>zF7<5M zRN(bG;jC5gTBM$kjl;%1X*aMl{w?jq@|Y`N(`OBJgyAZjU6U%@)#~%x-v8m%0Pos1 z_7dONJ*8n}Y9j+4te}Krg{e7KmspVIeBrXgJCt*rLA%+evLyf4@vH8lpsjBb{-{2g zAiL6H1ns89`nJ!ShFiW;lHO$h)_Do3Q@oDY9Hnj+qD*2j?01lPu~EB$c$FR74VtWE~|P)K&Yh1AiTo< zrYZrh`BGC}9eWA1qzUaR1JjEF{!yp_jZWNG34A0x&)lA_q|%s>aJn7pXbH zG}eO1Gnz{UIWLAGT}Uc*P*?$90sC(M37|=Brl;Rl0uNrMvAcQPuN$3#Pf_^$4kYd( z--W7)_X{FPz7W!@A0qPDN7q39!u%r`&JF?f=`xz!b0Hjy^EYIB89Ks&5C0@|&EzcD zELN$FsVaQ~14K9f9AL|8>uJrLtuPRp-oPw3rmV02wOyM9 zo^Zq5BQ^Zqg6tkn{D>TOeRGb#*W;WJ*z#w{rpm7wjw%(-|MJZp;7vyD5KQ)vy7Yh0 z_t5G)aThnLcYxKzOYwhbt~O}B=nX_ZYJ*<8A_H(v_&qty;fIJ~_J?@7(waa9t-{yf zX{cORz{CMtHSY7t?v3@hm;45V@?NIZye`LQz`2tQ5{1sI<{t11d&?Cd@hbf5IQVyq z%vpFzXB^`u8!Rcy=>9Ig*E#|*MzD=Dm~Fbf!UNDVwp(f=mWqlyr2#SGg=1)i=$45V z@bXvB=I%I3n_o9(RdyYnN_;pHY?pi8geCyir8>`s>hqI)4?bjGLcX_`G_k>iYwopQ zYtC&U8_Ww+gL}f_I1Ar$^>B-sNES1Zm(+*^uLe`FTOL5WuZr}huQ;t#h-;)XmVxD^t0=uYi0FWkvK#af2ZdcdQ5+Gp*{ zP!B>4vgNyN(eJ-61j*To+3P2of2d}#ySDNbL_liORhXAq={!}db2cyl7+@YWWanii5#VpAv+y(CZ;Yz4- z{XHEW1c$eGq3-g)hE$4U_(Sl$mpkU~z!^;(5)OJrC!RQH?9@q(^z)!{votg>|1{X9 ziy{K9#f&lM`M#?f!BqowAKzCfzmrlWpa1+dQkfgNC!P!ii=ME+PlW7jdnE-4thZW7 zn8TY8@ze3r^&wCSWFCT2UvtMoy7h6iMo?Ty05HDn9mj!PgXPaEJ9h<92VX#L4lUW-={@sjf)PDSCNp1;*WFdD%`5f5O7{}AeBfXM{gNP~(1Xzb|eWi`XrhZ{=?s@w% zjCN0%R9l6q=ke)(T01o=t#1cTT=sYRD1-?cYw@mRO|%&bMJRjFfz3&qsbnz-bS$(%ae{XEh``8n|2Nt% zE0z*KtNPCRPfRb`t3*2k?Pu@pN#UNp_)Jt99RmUFW zB=lX2tAYhKdF~PDRQ=&yG&n-xF8aPnb5p`3!0%5ZG0PPP;ZjUj5iQ|LHC!GJ`Lr3i z>LNt)+MyRuJzE|W~U zs$&m+^a1&z{EOV&{L>;i;sQNM7m>>?6( zi(zzdBFH9Q2Zf3v>`wH9`r$xH9Q9=}j)E6!z?e(qNwJQ{#CoMH&xYlV#?Oj%Th+|r&Cfy`c^5#?8_`5BO2ahXWx7rU2DgCn5*NZe z=w`tdb$v$OAhZ5&92Ep-LGOOT7i)v<+HH>5_tdtjRTgW;E3ikZnE3x)WSuNH_R_B> zkfp*796n?Z#+0&9>6iC@us=lrQQwC&=|NAA-72ZucQ%B3)ci|u^z8oULq#3c*wx`h z_0kJ2gTWm8)lZKcq=o+r+OgFyHuT|r1^~_e?pc8bC2w!C)iO*rWzYTK34`PhQTRU`Ax97 zSStc-giu2^D4|_Y$Kv>e;btKg?wALNG|^~CyFMc#XXj-;xayq_wgb-Nj4OCL0>>D! zX=rnz|C(v^IBJQzXl%3A;w{V&UYbPEQpD?=!3N-op*znoyw6$qgRg#5wnBM*x?D7) zpD5db5;M(~KhK!aFXb*~U{9#2%F`-yd2jA(*9yID5#c6?29(DY+1Ys*Cxs0Mj5lf; zmp&U|w>sTK16znq4v9J3SQ@WIA9|sk1uy8K)6d_<{~!9R#T(-g1gU7}@2=r>_`dZ< zYZo1)`8;aychM6Gu*#fg?ow8lqTwf42!|?Cr@SiA{~!9v8^u!h8|o+{8{dne3}tas zM7k!r#K!AgT_BJSHZIfT8EnCaL#mXOQtb8IAxPgzwT5HzQEzd6D6@4PvH<(Z;$5vF zERIeRCKNYPp~q&iF=+Cd7j!sosJ{w=&PwE+++Hj@s2SdPtQ{E(pF(Y1?xX@5XF7}R zZ$uT3f}+x(ePfgfF4EjX44?qNo){g48%?XG{<&*P69u2--HQ%#STm|MqePN(F&Aed zk323Yc;Lwk+6QYB6U8|f7l^ud7ehEIj-7|BCcSc%(_QbrI>bgqF;#vYBA1$;&p(N zAqk(n!elz~rqS0MPHAXI?E{>DrKT)scn9MR36Mw@t;p3013fk~6HW5I8cd2&F{5Kf zmmJySndEl{6mzTMyK+x7mBay&0!1r?bkQu}ju_4# z<{~EQ!7d6-R0?r+UUv&mi1bbc5E1heS{!4OjbJYYkl+a8;EY-1dYAd zvbo4aoi;woy_bIx;bUaBf$8{C#C)CLp^L6Z*ChJRagP=_;bA&&AA6Q9fA8>Kg~Oay zObxm|g?^g=ojH7;a7D!0jRL1!?qVGN(Cz$C&wy5lS2NEZ>pX=V%7Fp946!^Xt{dOJ zalvd65@srfd&3Y{NAQ+{A1_K!TmA$>(i}{LO4=5o5DL?k9)%s@{R1ZW1Vu?^%;mv} zX|Dl>h_RL5n2wR}WWw@vu$6QmG+b-J;v929_!rl5Jle{shuskGqra7hh6m`GhR%+O zh%u-DJ2$tu`?YV75U|0F{cRBDf1U;tlreS&dYF~BWw6Y#bkUr6fVzw&Eo#V%hz6(0 zEVN&j1w4UdR_?7&%*vJ?We~{pyKq!fGcBtu6h-AEk^Dos+jF~%JfICh?+H!eM1D2c z*;wtZS@lOmk9e8j&&`m`R7)h74oEridSj{e=`08e?@k3bkN{5->xS~yhp?}3ve^sN zm#N6-Q;DHat>Xd0u&(wNw5CQDq!h~>LB4YR(Cy;fCruU3CKNLT0c|X!;6_5o?z?AS zab`h(X+izStzA(N{8Okjy&YGIVob6)xfzCk9*}9=K)BRA^Y2coi2k0pv_t%R$KQE){tCQv>7#x2S>|2pZz_t5P^sXw#rXoPB#`6~BSqM26f zrGV9Q#iEj@UfKQcYlD{u;7}a+d2(&r?-hlBi#pYp_4H}tYZq)ZlMG@r%K2;}IVk7$ zD<-sWf8qXLg)pP0)v#fe1OKZhcIfuITwIPu$w|1i(yTyJD1*RjoPKNA>-%Lp;;Ay1 z7EaSk>53phfk>Lf9Gb4oeCk?HYd*dg6*FHqH!|{)YWs>$)MM;j+n6X%GA}DXaWj7h zN-`t{GL2LGt^2NaDc~fOU!S4)P0RdZMEwran3bJ7RrC&d?&H$P<-uyJBqpSuHg8;F zx5kqw%sO`R$%Ah;O+mIP*0Z9ZS?XjR;dc6|E9JSwREhW}?mHz)tagE_Lgg3${E6w3 zoi(Ig8mJDWcTKOP4()aGCbGtP`ux_k1J_A(jte%x;m%eZfla#TmH4<_6NTa8R`wUf zX5VENHeLxMCAlD{aS~g&nD%G3G8eY+-p}xBfzSN1Oz8tcuoUj@;NjOUIHr{+3HGSW zPYMItKg8eqjB9WT!#7S3`j(1xEDi3-MKIkaiupatX7T$l+;R5YOtsmNvLfX*n3FLS zvn$UOghR0ad|&2#mZu*g&oJ_391z+WcL`pw>_Qpz*#yOs5e+%0PJFdvGXZ8o>;`-`QI38);=s zYFuc}u+pbg9d(*2xXFXkE|K)GPng%ao9mhp-XVQR{ct&coJ(H_cjyd=;j#epz8<%# zl@~;d$D9okZ{DX=3E&@9g>4@be6t&I%9m2RIEumV<50S~tz{H2O390iVgM?Bd>3QJ z5rO2Mnky@8MU}0o$OphZsz@Oo*-S4=d65t4>sDJsbXO?UPQAdnxD1?n*a3_N6w#w; zba>fZXV1QJ2Iq1qM$Kfkg2R4fNbTR5W635($&z15ZI)Y3PLc=?_0n~0+xJ#z4_BM+ zUp)|HB`SpSVJzkB>a6J88wuC8*Jzi)VxbgkuDYs>)^QX7CduoV+O%h0k58zKvRIK0 zJXS2t1746DC*kP|m;gQC;g^w7BcWUA6+mwFU*fY{YJW@KNNXO-O4Q5e7gQ~Kk3RK3 z?$CWibaEJUU60|0XpY+(FYrN5O69q1-y0aS^7T~qk{PWL(TRnSsmj>LA z`Fy5j1dOT%B6pL*piDU3(|G9GpE(#6qLnW{!{dAZZk`^w;9;XDwtpJ0{xNTa6U4wu zZMmxhG>#X)EH3-oatQ338%PgIBM5bPXqU;w1~SKjGh$-r0M(Ys|6ABy5U&jYJG0d6 z-qbaLVEoII0Srb%;%~5s+IWT-2#iYdG(qVBi>Z*0rd2sf+;**SF+y>_&8uP!IJ;d} zl%Wv5mE*U^w{f`4w3+OrhnsPn_a?Wi)C0l}faqhfhC;fnMc~JlOn@HJKv#mHlMjJF znov?A(on;&u@dn}XaK&by1EKJNwVxb-nLgW3*FxtFXiWgV3!u+;u*3J$=>~&7OZ}A zkFuXYfHU!KvrnF--}Me$-L?XMs{p~?T8cW$u;Kq|j8 zsvvh9?uXj-i4^~%h_S2GHy1Kfk!uw3%tPve#sIS7Fuu06++SM5$%GV6oit3;ZixBhFj;XB6i1f&g$G>2?ea0MU;p#{Z|gxF-x{2#8s(3Dfbh6Jvpv zt)5vxb`djTq0Q^6FaY9tFpJdfoL1k?HdPEU?V}j5j{ZWmo$6elP=b3dMOj<-&G+#( zuVg0nA4RJSf5zFF&mMp6g4k=5wnvOFEARp{D~__NC{4G+Qp;_lK$w0a?!9!7RW(f) zsp%9TwbZ{xBn7<)Hs6Nk`}hteNN=Zb;2OUu-17TAQvn|ba)|=B-#xE-271W5YbxBX z?sNuX0{si07iDfQdU~){;JabT&Ey!+dR@zA+lUJ_yEgwbY5Y7fKo3JEwr;nCv^Ob9 zT*ULWLSc7L#k82%%7AKjJeIigrZ>v|cg(&>g+ST;(>jm!{L2@Svy__5=y+Ly_C*|b zeoOF1gS;*J@!LiB`&lX{r&R}akTWn@8|J&TfaCh`7@dM9ExpmTU#zbNytSJ@bbH{I zi4A#Le}O%^MZ+0-G-iKu;u(?@*Vp!1+6A&&-F?E)ws%df?jqSl=s%2CgDkSjGU?m! zob`n{9Jh7GlZ1hfU}&sAq;ja9vYXj6l{V|4fF>|2?yOYZ3wLb@#9*%&Zqh}ztP@9@6{vdptzK}cBjx8dQ|8)zrjJhDEbb) zAcz4S(y!r0X19UHB9Pgm#0-a$%~rv?5#Y=7+og3+xR~7$3v11|PnS9d;p$j+=z?be zJ$E)w7=5M%2UPb8s@y0^t**H(v27XSlEUm2rD>F?>~=?E2TlsIiNlp{(!(H8B5@kB zyk98Fcp(2_&xKEGqFoSy!PIxg7CwcY&k_$orp?P?fCC}IzBuV(x)3g$#!vOA^;XVK z|5K9x*vghZsq;pU{htqyCLA#)J3~bY!lh2NsHTnM3njpf?~ExG&zVR_2!lK5$}wx7 zg-`&YM@}%{ge#KVgBW>%8i5ipVH;T-OP46#?7MSO@#Mly;Z6wGND>AbG6b+LIVbA7 zLWZWS^CJ~uI3MBKPjtIZPjxo9(GPsO0&7_N}(M&nhP{ZUC7UvM&|AdB*7@PC|p?)GBY)Iw~(b&5eTU1=;Yh zo!)9g>vk4-#`pzF1g4^U(xp8HMQW44y`z0>OfhyORj&@T0IOlNbg^`FAS*)RP+uAT zv+qU;{1l24jnOyeFqX#nAd1O)FP416I3 zP8A3(o#sGAi`Lbw(opxDOzPn+3_fd?F@D|p#*4=sQ^PXp^`E#z0-;Vlw26KIj~P}1 z>=+mrOefIEANQ`c=k@Cw1HFtQU~*^cc4v>sZx$#;`2@qLO=tZIb$dd z^NYUTftA~0@M6gDR2pGLqy@1RVM!8Y^B~eOfu@19ZOC6$5hyJx>cmm7(Qq({H+rEi zmk^!{!Ul7ny8$!}uWOQzF^_*1t{JjJ1ypSNe0>@G&KY|Yd>W)J z@xx2QIOd@Lu~eGFM?n#ro%<1LCrmU~0vq;EVN^^(sDW;m^wBdj9`ldkXe)m&Pqjdh z0x2FeshSw&!1c=u6J>Xl?TDa=6bqIB1~-HTISAV4P@ju1`({=apC^DD6pLwdJV&;_ z1~Ju7aD^j*Z)Vy-aG#Srz-Tk9Fuo(w6%`O~HJ{WY3xYBtLE`5F?FJ15O^NC z(SL$!ZKDwyO3@Fu6_2z+3r1b*Vni5jo1Uq4JRaqNYV;lW|FogvQpn&&MtEgj_(kF z!yAe)Q2X6;?1$6EsHdw)f39Ls9X6mo^|-q%)#p@7Vb5Kzp7OyZWh} zF~*TM!gvI#HVezDKM&&LoH8v`X5as#?eG=xt@z+0omN4v+a_CGQ!Szl6~S3Gte`ri z4Lz%fCI;v(Lg);pF?r_Yxejr$CUF|+{hDSp-ZzeP;93dM)!h*vN1I`4es_%P&>QhN z-ndYD#N$RVRad>*ROTjmIbq8CG3O7TgI`L?J8Rom`fx=u{P!+MvHPlhs89=s?OMhD z2#JX2^7MYp(%w82*3>@SL&*F7$eP2pT+4waz8@5#P4UpBW`L+3+uw{6Cp4@og>gx+ zZc_(Vza+qPB`PeCN6zgBx6anOoia%Fs@ip>Lk|g#`xZA+BSTs({h=Fz=y6QLIZBU_ ztwqC9aOosz2xEfDDlZ$$RkLsy1h!Kr36!zT3>M1-gIg`0DvPTQ=gl~cI837p57PQh zI$Zx^kpN$)22vk~Q@xhNJyr+_Unu}0SsdZj%73x(T=Rr-8lC$X^3he4Jq%W#=o4~b zLer{l2jFv9z?UclPar;Pz*ju{k2r5Vu{YKT65QpQaM==np9HN)4JUJ^+D4p{X7Yai zd+)msH%bt-h~#X#ym!5BGQT9h5@zmWATZR25fMY7gi=qYj!G2Er_ZdAOo1A3K9uM# zOjlXY<@mR(&Q;XiybKLx%T4@GhBs{$;~2kPt>o9L*pQ2iesl2&b0f%%lRL)^^aMn$sjuoxy>y=mzDMw zjE1M3je4B&xAPpzpO3Bp(rR6qZK&eEk4%s8vF#_=p=w7D2BD5KXqF=-TRZgbmVTBW zIsA~CQ!VmFy+5uiN{5!T4Q;(*`4Mo5N#bNP!@=K$7Fe=7utk+(<);!M!7Y#nGnfkq zTBQ6P+nlX)2+(Nnf~m-!G&xzv*@?4zl;$p&J9?WwFmffBZ>yt= z(M=D|6)rB;+VqvOB$?7STZ6%(9Qk_@*OGmZc7h&vp5<49%5hFb zE`HVE)(wA`W}wB>DQqe{87?cf#0Zr2XG|>hLK0BYS~)-R8cy~9ogezOpc1!t*oPB0WIs#n5zO(a1-`HsWM9K@`J z)H#T}xwBXUwOPb^blT)><5TJ$gm)CcC3Dph-juHy)Pq3GTN)pAFIG*%+rdQoB(J-K z(ItF%5k!hlnJyGEO9NM!tWX;KuKN7)wSK%b9DMEUb|MLTN`X=Oz)t#&LmyufMog(? zzzdulas4B=Lfc_3w0MYI!|NPAUU|6Yk-VA?%-tGZnlmJTRAzL8Le%FrU1~y07`0e`PP0lT#oRRo=Xv251yMx=_<;EF&aW{y`i4G5Kfe9-z4u^Y!JDf!94{Vz&Ho?}K@4LA z^}?~Ro@)iZ5k=dr$N4cZW8`deN*pTgCAu!NZ?{PDhj^5U+=lpF*Sh|4Y1{3S%D4pK zziEp`cvb<(Tpxpc!tnVw@9)Nd{5knumHcLgvjBcPH8%3s;jW?3`B)jqeyxWs4d->7 zq1O*+>FOD*lrm*K>Y#EwyjPOhk`3A~%k4|@?tGSY2A8+X*O}sEDn%*dWJN(n;ZVrR z@7909Lu1E?bL^5}j%KeoE{&ZF0qQ8KNK@)ONc8!Xj2`dXgy;jqWuX~Z&uvuexjjAM$`H(_UcZvh2RL-28JyEq zf1lz5j>$5jhfV_ele=u2r?aORSK~2HTSKOuM8gqlJcA@CH8TH>D3lX2=#|YS5xzQU z7)%I?=7T>bbCUD8Bqm#T0NmS&6^iVBnyXh&6@;y07A^hQHB%MGqyo$Hb*}RLP^rNG zs!uA2kYrTi;gj}1`(A@X!77BE`nKA2HHJDN>%d>|OI##)uCz1GIK71wWxJWX{&}kLNih@?}`*wRD(~vX~o~yxA0n6S5tJ%c4Tczd!>UOw- zaVdogz2n|ZSWA5#3g~Gdx2Veg4qM{x1GiqY@2H8)8S1}#%t;K7)#tsLJYJg0Az+r@ z9fr9^6N;53aWY#lohoEgy4t&BXOSFIv5ZDxI;1uqCJ`tAaj(>8jZ=x5i{2hkIBZk* zBmFC>PqJPAi%5B365s{d1!AZ@E$$U{!+rmv)syE6TN3N5ZeRo~6K}2+$w}q~*lM$; zAq@vep08Pn%{NL$&E4;b$G~Fv&odtd=_QLWylKU;ApC?506b$sf0%fF=yg((N1Ogb z7*6LZ}MPVAlqkCjHBm|1@WLX=uza&-5#MHxjDC$r$5yrwFPx;Q0P`j1TsKsc%Vek z5IH~S+x&Md71X?j$t#0C(^dLkO${j&uo@{NtA(!W{Ve;9wnZLrU%Ojs?Qxe#d`ZFw zRiPB}b8tC|QP=?<9Kt+-J0G+TfTKTK_P4=#LJlZ*fo&R73;)0Db_x#nz6N8_Hi~HR zk;f=h7{}@niF;jntHklFlL4&DXhZ)0p|EnwuA^hEz-h&r4^89#A`A>)O9iivjdfwR zVrK58Y^fR^E6;N(!^PzhDGTw(xTOk%L;dwh!a^g3CkPHC9RaHYeGby0u$zDL#a$-u zDd@roRFyVF7wyKLzl{T7652l+qPrxqatF@?)?%nMN~9yOMO+IX#k-LJBP8ovai4L# z@`yh2ED%aANpmpD>!s8wMJDw9G-sxKDp{(?F+=O5KtvWN(7`pfs0A56gctc*gXZNu z@s5=zpXLQ|5_Za5`!t}CNDMj9pVnnr=6)sRAIj-;DTLJzw|Gk(H;HD$5b5e+D-u5o z>QyEJE&5Q)fp*4UGzHE#fJ;W)w zv+>Au74*LzwBz9Y)jN)tpX*%$HdyLETE1WRBQ4y4@G+q(eFSzXS}|z&=d(kD*;Dr+ zBUFc9611=LqoEZ8)jR8!W~;}zf(`@jnUvNrFa!Da@nlpe>%eL4kzNuLDHMUy7jsK* z6|plVL<5)OXvGZf3C&dnYWu8n&= zB~%^Og<9yAi>3h{YBi#ye+*Zu$70qN1hMd-*rC_7&>4=(5!tz~484$Jj<@w^hFLj9vbYZDti=>l~7>K)hR-`$f->+F?D4 zR5na7gj(Jlv;J(wjdF4&L*aI`T`!w~tE9NVy_&4$xgPxk_eTB3Umb>%uXhbslhv;i zHhd1_i|pVqa8S}9-?V5|Sd&4?x{Ee9u<&tIMQT;Se;beb*7+O+(Cc$d)>}UBn8LW< zNX>xOZeD11j&G;ieB)M}!?~l#O~16I8q0MHSkw&)kROi-RGRA0fGKwd8HZrLtl!7z zuN&UzbYDP)@KAi9VN5KG?HUikL9KmAS;4$!EVatU-96(i_{r~BdbaT!yvOhL{7qW# zg8dGAI92v!EN`VM{(r zd7{9klZWmG$|~O$%Uv)6-NV-4u@QdBEAO86l~Sj^sTfzkTW7AZo>{;tFY1yvyWTEx z2Q5dTxbTEp&fCmBbmPXgzr*nVEjuL_rdKa-n{j}afMc@(R8YL5(P=|2ml(~}dMJNW zxRatju!#B-e&r7C_C$(wbSW zb8_wfxX$?lTv7{e-jx{KHwX-yudRX^e_eTQ5F1~yb|9{3vD-K$!%$i0Rqz~rW_Ka06x1;E3byEIDP}IT}4$u zN}1)VX}hI`ZMteZbiRo~(W!=l$xvY96Wf8yhPqEluCiWyl3q*m;$jkm1hSWBuRG-C z_IFCnosm1Ct2AMi{CA>6zel=^MbVJQHZjJQn1k?-dO|I5GSDBFY8Z;#r?45Uzr8%I zO)pQcx@Pu4ZTtX<6~qnH@tgeNjfy4Y>G_9<{n@<!Ox>Pgq9Fw32bDtR0@ z#g;nPe1E%u^w}D6oNyTQG;bV)71U%xZMoA_F7%JkZNiE?z+*5Ja^D)Dh(fM4yI2u- zHKY5$a1IX?cvfeaeeF8O(32so*(||a2TpFGxlEZ+P0c&Y422NdeeY-@iauR)HvZe) z4~S6KYEG+e!pmnU2Tt$TJBYv^_QivAwcqvpfz8gYR1nFyVAh{}<1zTn%Fyf* zp=`Rh29nyYM<}Kgg@tfj z6o}LW0{)ACGm3N40a`=bEOT(ggz74OfE4htjh8~61xg$mAq^~ixwXd-c=YgsYR*F^ z1Fh{L!~p=EF&XSQWm5=XNG*|)U-tBU(+ z1wX2`rk#E{x*JUt2$xa?m&BV_wHOG%SYj1R0+ z2GmY-Gb>LO_dwPJ7V#i_wGBU>nGNY5gQSKX(MRHD0Tlf3f5`BgeCtm*$WB2-#q_kD z^hNH$F&L{ckY{*LW-FTpk^`HoZZTDHTp&HDaq)JL#73&GsXZ#)KM`C0p!ZZ z#*AC~A7${&GrN80HS^a9={5P20QqT7I8_2~vKS**}XLzARX(kV>+)paBZ2 zjWYB9C_3}Fr0+kDe-RWlLQ^c5pcFLJj(IJ?6Kqqo!h2n=saR?&6DZF#)OE}u-mZ+0 zerjr`cIwQfX`+?K)ODy8g^E^cUYSSj_wLW%W4~X&rSSQD-tX7z^?a4G)DT4QMYSRt z(%f*i2Hp^Om1W2H8KMSAt=rHvXpxXvv1?B8 zSnc1Pv$5VDG_iq|4FFLa^=^)bI0?L2d+)TZPW?}{t9C44832hD>%?{5AlMrbW!_dPuSI~`YquuW2jYGx<4ZIzJ6I*-C$ ztYEVZR`AR*JkVeK^J$~!Ds9SgL|bX4hso*sSneHLgNkvI4s@ZivKJv`cYc$&IVa7l zvkciv$FAP-o{p*kPHx*1llF5M>|<0#kUTjwJ&7mYxFb#bFW!fTTNn9Tl-{`w|tS;v% z2!uScng-cay!&5iqOP<^RP&Uu-0X#Ah3x|$h0nb1T0dp&dQ$MXx5&ZmlDiW2pcY`NkX*dO~ml z8e_5ApXd=1Uo>arfwY^YQ|8ZgvuS}+Ca#^>H6W)~8X1}{Q^(p0B+!~MN^1BrYkG&$ z+13ERer{H)Z4KQJn<$iBpIy9b3_pJ6Pv{BsQ^I3AuFkmA8}UlN#TGQ|UjnrQ(&F&H zg^B4QJ5%^l9fK?OhI%-m0TCYmo7{8Tp9#EoCuR>yDj-y2baaYRi_IG1@}9g#Ijf|- z$Wp=9Djc`PhwoSN-P#kTGCqKKUJ`7uayQ4_m$iSx3(tUYQD1WhOP_8hVM*`$VQD$8 zk)cPuOEq~)=!uzJqRIFsYqDOq@ZeKf;5f@faNoqM$os@*h;w*&QotQPI_tg|X>x`V z+w-3qTEPRz6gAQp_!TA2T0RU&exmc9x>V|EoK@g;>TZ(A|9ARlpQGvv7N|@Mh6Hc* zZFVS!1zqSg+l}mojs)hHn@zy+2jbq^7NHkf5}2X6Ispa~%@ook^CuWV*lKah11Y_7`s? z{H&NNu^}n1c*LerCR>1-yYkw1u21zn#g#_06L~>2cLXJdjGiBRrSI{Si)>Y5(Mdtr zW(O8bh&l{$<8?<#mta5zyGZ28hMNbRe6~t0eGj_Xux1G;%ezN)yDB9DiRjZm!<%Ci zUW?_OILEJn)*KR-;(M5OekrQ#I&;8tPd;ME(M*;sK7+_C!|4>OaWsReG3O=0$VEp_ zuNX;=e5lfDlDp<3Ms{s#;;2`RsKZGJ(=feqvEQ>hFYbcl!ZFxu!LY)RE&g-Y1X_+@ z)vnIwy52uc5b9kJ+_?1fG{oeMjw)9=)CPq&6)uNwHVm;3g! zopEzxMVxa@=k7fe>vTf9lB3_ba;rEpphg|eo7*sFT`K6I`7w?VIv==WF16#Lu){3M zDSITdczw8BG#81bT@nJ~41E<*yQVL6N6(``u7`aLw({X%b&JdLZ}G7UU?7CBLif|1Pz7nE4^g;7o zSa+R7wQDajj2Vn>x6k$PVqpQE*1}?3u`)~voyd+vTH+^CJ1kcnf~86=-1r#T0$$W^ zrEwN0ufZ6X_j#fv%|M^q2%KgI%A)V9BvNN+5rRirSxR1%oy4411M08Phf>kj{|QpQ zC&&Lo;!&?MSbigpP^gM{VX-hkPs1?#U9`bHf{|V7dHi*a$4yOshD#YT@XWA|R-ELfy5tt2yfOrac#@7T>(89H3e_S*2nA7&egX3Xv1-{7-^z|NT!Y1 zTQl*h$o1!Db*<*5nB2(I{IWQlhp}*~84LqMAWGbUJ+#Z1F1JpG!hrO=T%t@c#&L@A z{JQBbtL^WR^N`^@^5Ey!jjlJ5i>I-XO2jB?H;Eb%rH1wOYY!BK3y(=mCFa9&IM>I$ zj)Hsm0dkdtCaFYhc-GF4Tx@T4x$>u`8yX16Oxw0nsScY`A9YRnxQU8Rh2(6=&mAad zlsKw;sv%X1jDn-@;#qNMLx{D(p@=ZAw##zr(DumxzHd8_#?d^HqX4$*#hPI2qI~1qz8t=o6423& zQt0H~H{{vL__0xf{C?>tc9u2RX(bL*y*Q^o~H8C+1|q1vyH8AiJrXYSNDkrjGcStbO__ zFl^cSUyGwHw6sL;^{{L5&6>YJ{Z(EM^Q~Q+f z@gMTQlwHm!d!<@%{{y>CePzc_>hc}}%K4y}xsVf3^1(pSj%>NBO_m9iyFBu$mTcID zcDTdN^8F=pFA6lc1r=oI5Zgwj|LShs*KaBs0VH#l!{avJR~z+aD)bW$kc2NzBz2*0 zv>~B&!{tAiiq#vQLi>!V)^-eTaPoo2-9nfM!KNrm!+Vl5_POBgz^bon702(D`DNRT z7K5b5JITb(_joVh@HbKP052>Wf7vxW`b2*JW${gVC3^S|a+Ux*;0z;+@9L}^3uY^} zexjOrDRjPnDE4|d-RiRZyT0B*eCbD~0;?y!Ki)}HmRaP-S`lVJ$@NuD;bZKw?YO_Z z%WDJI;P0(;@`5}uAZ|$^gqxt zy^npEC{e|>)3^|#1w%E=#2+pSa-mCJOBb~nVUiRG8E|7ZT~dq4md2ZD*h6rCXT518 zi_%-Hg7mi|sY*JKte5@UNQuGjO7MU1yM?0!1yThewa2BVp5@Nd9B z)I*5=sRJ)~1*TIL8Qr6(*o{!myAO+r9&8F$$Rb{N&0#fS%WFZIYatDa?b?_lQCueu zvb+Jl2|u{YJ^r~voGa5!m>0~b-5_xvW}YKU#QECvSizb4sLCRA%0{KBw-*OV2h`^p z;8^7+C9{ZxC)x`YER7U)T`ST;1~&?x`Afp5Yl?py1sC(f!MKvO=?AGoTlga$YN1n$ zy*)N$D>1lmgOq%v+qxmVxXhr&(8LofLku)*{L#5!L?oFl8S3y6&HJLyo@6fTJgRov zm03`otHU0-AU3Xjs4l*u&9m)~SIM6%w3pRZ%dvA&NZ&J*XSC;j#NGRjcIxPtF^n}c zj63=Xk__h3^k-pznqK$PckxL?qS*{E6g8I7BZjfdd%tEVly`h1AI>6zcQB8j!%~>9pM9y_xK{n^42x|ojHi-DljAE!{vvm5%M!&UUZ<;%(Y&Szq=!u z(neUnclhl2L@dSNplw46<+p$&CZ7|;36B`Cg`gE7x|&;Pm&u&@E~IJs+%YtVA_n{c z#phAu!aqId6#K*mr!85;iOyYnxP91N$f2YBcRbA0rd-&_BQb3)9`9$`Mz^&fOd9OD z(;S6~VX0ykBL~tSpobrC)|@xKza`6P6j z&xi$MSoxxHWH}U1n@V^7hxD-som(qfH4yBMc06G=TzvEMd$|h}Y8o~@VdOeZzAQaz zg43hI`=m=t@%RAR&@t=UEe|4=o-1U2HF^$SwVTl@QppkKI9TJgJQtn+Lrkn@-a@rG zYRKUSA>y}{LdGS({^X0vs5Pz5PKqE5(?SMHa~j$akB8f(^+4B$}N zxs33LOnHI2@K;p`A#OamLhDClFfAThJB}LK+@npHftQ^|8mPqrc;}(XDVKy1&oaad zKVd0G%1ukBt{L_U>z8k3d&Vm)7<7+2@|KSQMnJ&oQmktO+#h z=+G7q4rMW*m(pxWqshUHi(1ciVR!Pq2ddt3VfNB5kM;`aRhivXsu`K*Yh2S`ip?&c zWJQ+~XDgmtH(#8?6ayG9@v(CCvK3M;v-35#|L4FXhqfmAVoupmdDe+5ycK$Q4r}Q+ z;(!=&+GfhfS$!zi(jtVD?d4{H0M7E!@=wuhkZ*_j{-|XSC^YZ3`aV2!@OnfAEbu?n zZoPm`PE73K%-`rtNv}t6&oB;%NV)4r9{&9_+aQy>ckMl^pUY%;cBeB;qL;Be$uX37 zd?StJS`QSTb9cp@iBa#t>xLd&m<}$AOT@}@^!E&_*y}O@Jf0G$WXUY4bWf~A8v&Y~ zRM{T#;6~m|d3x&oRA8JPes=)jfXtsTvp^R6|(b5!g-$wa5}T6CLY zAaJsf`o)Agn3@}jdbr~tM#gtZ3`k`}>Dz&aFh-s0k(Bd@n;L>c7j#CQ(2Jb3oJ;r!l?W!kh!g&qz0;bmP?FDEtVr&e0|ErivD zlk*0?*?*93yHcBLkGyGKX~i>OyxX=H6OR9qk1`9iaQ=`mcOlzdl56)be61v026MDH zF}RSpfsG(?mW#L)P1j-`&`?ISxPsxUU&K9|uh+&1pVJH80;(_Z-`BFL7?VNMpOcUE znZ2oe*>`$pNy_=^V!wB6lW+b-uiF@_DO(;{YGW#p5}+O&`EhNDK@GPNi_BIOy$sEE zAOj|K1~I8|j|=CM8cLhh|GRDTl{7?JOBy)&EKKe^9ClWi2s7F~LGhmu0ZZ!HDJw4U zU=PFJo$)J0IT0qi*e+FMMZ;<}TW7O{^WouwAlb2%guJvw_+!M4p6wlpMo~d=h~A)7 z40BJsd+D*m%gdLd60DevbG4(Nz&#Qsj-{|F^(%exlj2A%s#x(beQJhQ#)=_o#WWwD z{SN`RVEAwA>cu0Xfj0%eFjfn(p`fmR*lZY+uw*%a*yPQ?nO4CdGTUwL&ykd9_^{NS zWUj|fM&xaE7wWP)&4HW|Owdkk9Sl65)h_>SFEkR6!F7!7e|frjMS2(fbKaSQ^`3}a z+cURmq{|DZsc5*-7^U5xn~zy|}>#rWyq$MXx?eN7A&{5l-kGV>*74YTy#teV>n zLPvPbvA@hA29LWot11{6xagPveaOwyC6g?!B^$bRz&o|Z6dxC>5^?;dW1azSMmNsH z&9XDvv1jKgzHA%~U3`dTE7m*?y7&r6eZqRz=$pur-499zRda3rP0{&4v^F=Uy%~^uf-$ zm6>u_Hj@MJnLIfH7;OhuV@py9I3^{M=^2ICb1U=%A@wnkyXqu#TkATDlg!}1IW+q8 ziqr6bt$`2x8b6KZ^ySIS9B@bl8WY$CS4aq3tnH%PZZI%s(ea z4ld{;exKhThKkD-{cj%|7P1$I2& zvKo~XDB{WKfu`M9`EG?m%?bc=0db@2-#2S9pU3acfFn!pd2N3%*(jL>h?a-edX242 zLYFNv`#BQD8xCt8%xjw=gv(3uf(MA%*OXc?UlXjFWsbm*jp%1yk|)iSVvKW9qwrC= z%|<4>kfiRFjPs*PY&n2n=l=+wX(hemQ0$&c;dQw~Rfr|2-5$#CZ6qFcb3nt#_Q3oy zR8Pgsg-v>7UoD*W>grDS68oW7=U%YMXyt46vbsUEOnN+lKt(jas_;ArVw@3(^_EGD ze0CiQme)_g&tj~WL>9)cDGNQ-fijnZg#cynkOO>&C#5Xyz@;m9qT@hqAso-jnUZ}u zvgWGh;62Nh_c>si2tAvki%aTe(p5vkT9q%_z3Mvi?* zWJfC3Amge+j2e2)zc)!sm*cs>aAWn?*tnK}9+k!&a1rDU_)E5|G3`%VDmRj*@*8>XyhT^-Zto?`` z1OoGT{yssiu;o78f1I>#qW7C)e(AYTZ-vJpjWqfkv;2s@2s9Dlxl5TPBTZMFoHz}L z)Buc`wWK!2%c338YZLe&YnjGcqp-IV{c2D1oz>t1rhVl|t>{-51%w5P?pkXs~EFLLpI*(QA8 ziZM1?(88V}RqU(_Fume+$g+lp)>Xe6-Sfp;XqhaR;X)+1-s<8P z21D-vK}A#*jBqg`UuqCFm&bxz&Ss-T_drgoGQjutpy{9bsKrvqUa#LYYgSvzsBzBe z7L#(dW|~kvPm0)Tu^zlETQG)tX7xAxa_fW~+AYMHjLT(}k+rbkT*WI3%gK39iy}Bu zyc?WV*?RD`{dF_i@74fzAza_V((RpL92fv}kRPy|-kw0C#5AmcZ4fe`0;+t`3*8U` zumzn;4os#o*W{c;$GRkH?2(?{?}F!MOjZ-O!*d(bLFw$9HTZ7QeoufkkXy}|5#j#!kJ5kv|TbhyOBO)V5RMG`~s@b#_A5wt^Xl;yw(>F5|07uHAB7P5%|dPGBmH5hA3q zt^7NmPQGyj>dio!tu_AGZ^CUh+*W#SQ)aV>ZSbhF&(+uU16n~Ca>)ntL5)Ihb72SG zpdM3r=&l|GhSr#+ihUhkv=62H0@kR!satHC9h{SkEXca;Y(m@A60l4kqMzFUKnuRJSo9*VnDhn~-r1p%m(W_hiwqVY)d`Q5^B(wJFLb}v60|`9XKfkzS{3Iv85Xb5G zy0yo3j1Y2a4lB#-9bUE(%}TByHm9gI!8XqO^4EPh?=)q@#s?R!N0?BJHB{1q8s{tk z#4xwtt2JP>`*dPN3?H6gt{X0r;3YGqO%01-FM;drvhVHJvW05ppaok?gOQ)cg8OFO ztmdL5H?fGHTF}XYuOYt6i>fj!&)Ct$qRL)><#9UX4kRZ>B}yuoqriui-S!%5TyvcD zm1r1=dNghP2-;&GtVSwKrLV`>diS9UnV`68YZ@)+g5iHzt(4XO`R&$&eL|mWWr)pR zxCOIq-z`2xEqOlfuyW8^(G>w?>a=AsEV!U?A?Dt} z;I;8m!9R&@Wx$PJc6O+fQR!lV8vSx5)y;ASO3WKi1ZK@uAT3@CqKs%KHQdqo8pf-l zU~M9;=L#^*ZAkRFatr$LHmvjX1B8mk<`wUX8B0%yI73v1NJH^{4z8Ut%^zCXe=cR>H0TW~r7;`!$yu z*|j{zH(f!TO!KRum}ju}tS?TxuSS!{UU7Zaip6hxuqCUYrYQ-Min(Nbu?4w%-N6@9 z2Y7<}R_`%Lfjn4MpK1`|BI6K5{2Z1#jqVUhZ~^(rDm_dSJS9e)#w@ za$R=eh`y!_VU?^+V1{GQkNK|A@mPBben26W6!@)0y6?PQ1@sc9&yiX|AR73v!K6hC*R@8Ueb&{$&OkNxo(?h2dDfbFE-)_n83;^Bu+fx(1dLd5JT)fam zW(K|hu1X_!>h?I9S%2~cJd`n)3b&P~d9p5~)>sL-Oz<(pK6mwHW5vzk3*AOKo@&@F z*-aH5I6vZAV3B!y-HJ94Pf_KjZG*4>_IKO0{r*@?P4EIZmST0GWDdEx^5!s#{*j6_{iSmFWIv`@fvl|PFb0**maubE6@zDuZR!-75edk>+jA1z!A@R{XK~9HWc{7=EmpqtLgh+kVt+XS1}e-yWfb263sejP6340z~sR5)~M&7ElElDX;5d$SZ*PW8{Pk45r*N z>%!Z5QCQ$^nZ;~g7jXhI(4sFt{-CFX$RIk~Ol8*lJ);m`ZqY z88FJbGW0MwXd=Mw{YxNi))2xEw$ZM_Io0~2YgR44S^J)spuk4t#&BoJezF+W*H+Y% ztPK+WtfKhzWS%U>0(Jg`7v-;M_j@#%CKb+Kw2=W?ezSBaf3T`0F;;=$wq;K7{zXJf zJ1c`uZ?uDNEC17tfvjU!Ha&YIta*L?-tS=sR9y~{4c zC)HNy;?+*yaR67i@C{OD8ZmL3qh@rMkN7Y?xJ051(1(r%2gR-q#`|2w?y=A!fT-2|VugAPmx4LdS%_?xsrb7+1=~4IFUTtAs6CD<)hO2gUI6J5ritef)WQ)?- z=&4IL!>gGUt@Av#3NnJ`>!X81Yru8Z#fuZ%c_~JFFXz+%bPH;pb)ikOxn=`B zbb4sgXFz;RC44CjJk@lf4YW$@>8I&$oq8HEWaD%7e|4eK8{Q&=`wCgROivB{v^sxw zMxnQMUgmhsps*i=`)1$mc8Ck;B}Xv)_3`L*Hfl&1 zb5X~@KC7t^V-0q(6%+o7I~$a)r`r+jFqUZz@J&~KS$RPAQRROzSE$#jN(Es_B}r@Q zU;1KSuSZmw8Nju7@}ngqMY$AmgCtxb)9HyZjq# zF%nS9H#!>MJ^9L{*Bkr!j%aze?-~5O}t8P0U9>d;GksLZJn_9^pRUNB3}-nnJJaHqumYPKyX`5R2_Q{<^yA)$9X%a?A5p z4cKLL(6^ejV&~BHT#o5(^VQL8d*sEX7B4h&_5qdoB7663IL3xZgkvk?Nrd*xMB^50 z0E87VjU=nT&%8|@vAije#;f5jA~of8h~)K^H4SYT1EDsuu96CrE49=TAG4d-0DDan ze%Nm5_G(R$NL=x0{k7~MHmnjC&q6}UZI^qUZx#4PVEbq+_AW$@lpCL1s}tj)<5GbL z(snZeDUJLzni~&&Q*Di%oGwwu=EAp+^?**S?ZlzuW|Z|-(R8b+-LNuPMLXeg<7t^C zOr_oXZ6AkXX>%OD zY0Dgn>H|8hW?dLT<}Gm=u75c``>M8 zpsNnPO@L2TTWg3OY*uP8X|Ni2+Z=A@*j+Ut`5N#xw?8S&e)cxWFSlm-xfk%g+4!bR zXH(Wxq?;AGcg7yEY+(7v&kP0_XA2~=Fnk>QyU_#h8ip76Y&}SafPuklHY(um7^!hE zwd>e%w>4~mSk%@DpQN|kIVi-4@DWS2>GRnnFd!Qrv?$f;O)j9BDJ2<+_887s5hi&55|y zKJI%beSv=+N&}7^JDHE;l zJ%||bvrhZV3X|}k6|#ltG4?X6gzsuSP+#WCnEciGLy2jiTpSy^;Nvp7-OY_AnuBjp z96oNcSLC-tRKfj5={rV(Fx;?VdKM@Qx7%SZGErN+Ba$6nUM(l-1)cM$R&(K)R zKO5t;bkRWd|9!p-SmR~cM;fq{OvHM;V+lPbmH9`HO2Q@NF0IeqfyrH>oM(YKw1d;D zvUzq=t^&1+dZJB9?&Jl>-AKWj*t&1>s;#a!#UqMz4jwiR_zkAyi6(PR1)n})tK85og*JI7a5zrw`Qrb z@mQ=KJg&!aJV%3$?3IyevavP|XqW?11ET%M`6?D&c37eN%S5)%>rubkyE3w8wk`A7#_ATS27DdlBjvvdrxCu)O-z zoKey&gaAyRiA#tBp26MQruouyDK`;JgPSFTQg3;h85S^1SG&|#Xzj>PuyqwS@Lh{F z1dxk8M6C*Boz6=y64YOtf8I~jST!8(tZYY?f4OAUoGtLWfw{m5vjvB51**Zo;ylg5 z0by2T6zXppS0*9}2QT+~tcuoU*w1)G>F6Y9Q6iDTHT>w$RZcwy7SOs_3c-WcRR*mL zDiqll*a-1K( z!&0;4wGaHp66~fUl2xo5;I%flC3S@zClfGI?qk2uA z)bm^L1?zQvIYKgj57`GqP?IW&v(GHvbFLLKk1d>c@1r;)vFaSjSF_et50TP4j(0B@ z--{eoSi8lmu{rHjB$umkhR{e!Wy3;T$)}n&NK~O3MkVG?veZm)3phwLPbbR;zxqkg z45m)D*>6${R1*u_>DKL#jR?Yx}rpab4Y0_gUV5H z;!JDVJ6SQX7IGxvm74PXycppN?0zH&&mgYS?>&9d_%($z$YaI?`{b#?~e6()&m(R8^u6!iUDO__{*a{kj$M5_RS|9c~g)#h$ntX z8EF^=raShmAtr76dAA2%Wte!T+?9aB+dosX>vs5}t4wGW-~MAPw~ z2(KnEzFXqPvB$s(6lkl}96@4@X88U#xXBcxzM(+lJ4}~t)V2Jz+tCODE1P8p{0ivi zd-gw;Tufu5m<&?OHpKn(QS-aO5yX$>O?4qZwLTK1&ii77(Szjh4_2JNe(3M2MgXa9 ziSn*lCY>*D{dCyx1o7=9wk{zr69)CLe9HqTsQjExdlZRYh0~>qlsYnSe4T3+bY8VU z1>bqWnD{O@=}0*-qnX)ZunXBV-eN0^Kk()7C^0X!R|M9O&4qd9_iKsmo4()p{@7nE z=mI*X8z#!SpPGH#t51&!x)t;Z@?w0vkLs@AStqm+71;M?J4lfIzRjxm_#Q(}m(6s* zx0{IUkn&vf8+04|?Rp1~DD-~jZI8vTvi~rTBucj-n}S7YvDbnVdyNF+j=pH!=yJ?Q zipBF zAO=`WhG_-vZnon$NOcy|^BXY*>*g@uPTiQo&-NMwDQ#7I!@*i-RS%cn6@G0iNG$y? zcUyn{=cAlidY`G!TCaBQ(%$m%MTHYCCzmx`yaL6+h&rE$*RNZLgCxPfm$-8SKi0|h zT-5}WNz?iaI$OWEzH@UR{opvqZKIF{Dg2ZpR9pm-@$ZwBj9z23-Vj=vk8%$hIG#;T z&6BDL_b;WUKP^Inn${&2zD~AW=~8n1epZW^{6}5v%aY=)zOs!2;lSm(8j1W7xt`=j zKYBM{r~A43lW_B!-qa!v#?mu(+7W|@>uoup3{daNvg48j#aG&Jvi zrFUk~Njjxo8?l;tflprSPR$z6_2K0s#!`a~vJ^sT!F$JuSHS3dPT3K|=@4{Zxf>a_ zv)A~Z+lOzE%e=%&^c}MT+l?dN{M>k>i(iJ=uAa8vcCPcG@mp3!OCr8r7>Qc7)=9$G zD#b*%jbPc4D5+g_b%&n$Ifoi&8wE0@%xh2+3^^*ud1N50WpdV_ruu(duOe`GMWSdD z?lTu`a9r6*ts^fTv&+FQn7!`$0#0tjC7u|6rWprUrDH`yr+pVu!O~EUBbX`{5@P5{ z86CS-8l4c$hGqfX9p_S|mbO~yb37q{H?WsX%m(-_%U2&TW4;j+y@DmiIqB`z$!`?N z<25V|4297R)lzosJ;h+ypZu1x_%_x8MACy0`_$~fP1wpdO<$UM?BfZl*)wP#lBptk z&6fNJ`7S`Uq;&o~fS3PE!pg{>0hZJu{CjE2;){_rs7WJQKi6gfdU!u!f)`KWfJidl zi3yR~Nc@k7mByg~B@~*U3%CGyxac}p`67VSM4XZA(2gxmW-E(oS~19Ocz6FY8=fYF z%1wQP)4|6(gtqWMdtDHl#%gh?Y|m6%E;$>3qK{p7^vX(;Uk;gf^<8Ys=c%Gmu-=ztE@K1C!dwKKo}Qv?v)mkO*0VQZ<})@r9-P{+Rq zpRa32IGJT^23DFuWpg`yd2|2l*Ld$0jZj}m*s?v%}S6}n~>6aIk zEnk%k8hKi>kU=cm*V>W@n+Kik(Rwf)_MPpDAb*5{B-czpyIN8|9PdR&;Y)DPpj2K4)O^ykt|rs|(ir zEqcnRM+-n;KzK5<@;uHX57t z3bPPVgb4ftel@xxnbA5OIQOoffJ|Yn3^<&%8M7n=0F4a&_!p@?go{jm!`JkciRXu0 zHj|vO`GhDpfk^qw7oBtG=wtZN&XxLZU!8T02yV0?acEaa1hG-GWI6I6uRJ*BM}&%O z@^~+d?dgqP>$UpcZ>dFyq`adc#%p5fq9^<6)yE#51jJ;XaAdwZ?^$AZp>7<1Bb4xh zC)+dg-XG*g&0IGrtpbF1Ax%QnuYT`~5c)Cz1x>5h1t|et|(O;3^K)6Rq z@a_O)5!(4xDL+h05DQeh`RW_T^;b%{sIyp#C!B>{2G)`^SP7&>sWef8Hvl4mEi6Hd z(|A8J)BX`Z15p6D@~h^TxqeW*Y>Sfse*uxSTAr z1e`dRoDR?Qkk%F~?=*pX_RI-Ewil#rIA$YNfzdA0?Q6oBO~Mk=ek6%p-RJ4oc;U{= z<F6}a~qzoex52L z&y`zmm!Q%>lZSe_H*2jfHdl1M{=sMJUWhe3YJU|`nXO|!re}y^TezKR151=r^Ec!- zmuL4;hs0mTwUN!a;lbHXhRn){W^hz4hk^boz=iaJYXWi|k}Vf8#g@b{C+STTp&^@) zm#0ME+QMvjcG>Igj&!g^qi|>0s{`}ek-Uv6pm{h0G{?Is*QUN|Im_@$EoB*hewvLx zV2M+jp0M+)MyhDbBq+A7sT;$;o;A4GTjPYzWx0VflG|X^ENybJyv%z^lr~YI(acT~ z;d+yj7Q_~H!~?amHEau*UkYp$YSIkEz)C5vSn7HYub&u#)d?+jkU8aaV#r?tPeu;d zy|PESeA@WoNbnD}GZm{zN483$;l;V4fBllm_;Xa})VXqOEDJ{W9m78$xv}V6r|#8* z+3hvi@Io@#!k|vd9K0hL0=fx+PU879cK zRJ{??xu??N|BRX!mL`#lyD#*pj2zCb&ZZx{CE9Sko8mRLQrce3Ckekd@+-s z2WC3S|DN?t=}s5z{de*|+t5Vu`kP6irX1oItNNpt z=Z@#6VJuRBNu~cV2!%Q;)oqF5TLfI%_zRUkqtJb>&kW;vjax)cH72O54(KDo)fj`* zM%EsdvBoH;)c0UFbQb{Ie4+{rbjp z-<+KiQYym_1PL9;i3`4Hx04${eAzN=Y-+*3=|CfAAX$uPFYM^3`z(o8k8E_h=Dj(+ zI3*wTZC9X?m+MKzrJsJy&;9Q8u`@QEv}>@);!l@`hk9wM?Vp`}l#^ZA*Lf>W9|^Df z;biFE@Iy_68;ZRKb*NKxu)?*z(XRi!-SI~&yj~;YYkkr8Pv=6gtE-C_h|4skIT#Uf?q#`WLo@mpm7xDk1N>JLT9!k@Se60w$n0vNW%+6k4%+BZEg?TI! zG!hbazwl|7EKFt~;TeQ#O(dn3RnClB!9!-}-iWD>-#gI_2_0E&p3?;se zp6GAn5>EjoGkHPtcNBJWjXW7}v^5PM>H5e48UB;z7Gh5pCX`5D0?&kZPT8fg&wTiK zq6|TFC{o8FmzMWZFNL;q$Yf`|X;m$BRin*)N5Vo8}k zk;le5tttkdC9#ukG1`tct?h(Dln?sTy>Y`iTGPi>28DpA9sutwIV^iH1SB&@VB?nz z`|G(xKEF}U`dXlR3c^TKLC({@4cKI>%v5TwH?ZO@3u}q&tO_n}cF#c3+G{OL_))*tyQ@Rw5fMP@xBw z%83j_eND~d-XAmr%7#r>Hixl3p>x*`_2=hQ^qm|qLXIiMSBu6IyTw|7+JcgfUiS4_ z8c9Q`Y^>5_2n*E}aLxyrMaNE#zVrWWK+SXpQp+$9 z4SU5d=GT3#2OlTS+9U22aUp!_G$-^{5L=rEXUFz+5*=%061YS}1T& zpF1lSy{b@PZ^j?_xf$Js*LEIrAZ|r<6$krbtoln`$g%ANctpQ$5gp zy3H)8^oKu9pTcnw+~S+F_$}iN^9h5Xv;TlB)^~0-4+!}A{=suW^P#$E9aY=p$J)a|?znj|a%dJaGcp_|Dtxyf(I% z^z8Kx&C|d4OWGHoFWbb~r=;9qFMo0Co2{s4Dc`RMv1yotFCOmiEokw3TV3cRc}Mf$ zs8&)ewBrA_bGKpu)ncGkCdC{*!9?QT!&1{?q&%uY^*Ks(>z{{tx8kwgVmG zmBEE5&)WlspwAg{Mzm`MPxF@8A1nS13ef)#ap!=Orz&VM#8gPfIMmLT6-6Dm(&QZP zdA!&9y%zy>hEkhhuY9aO82C@si!bmlvS`E>E6aj=Iv0*IvBS%IC%d|Iwn`jCt?PMK z_dPW~P+X@3(~!g&B_xC&A|KW&KV6t}z5JAK3X^~`)3R*kzJ+WC+6 zC;1%+Oni91_u(i(sumOjSD|M>YOt5x}K+o5w_KRPcjhgcgiCq1`hLxzr_;lM{k z4#i-%WFc`QPQ90F0~JxvA^72dstQf$J@F$haHAkqE@H2+We#{4oi!bc^;e*K z|3h{mzk1%8m>tI^9v12TVZ4$bt?MFy`D;?8j(y#0{PJ|5JicLWl&xdJA+9f-nKiK& zI+xxVI80SaZQiksyhvWl&~@p)QQfJ_5)q_e^SGe-mOQx?1>rVN?dsFK?~7xCq(I1x1H0Uw#)a>%vGlZdQ^a6E-ad!>@eDHmrK| zp*b`$b%JNLlKtX36O`18C#bK1qTSzje(@hl?JpayYa`~#O~#q;^0@yY2{)ce9HR44 zrZHZvgFQR=g^csymm4tRCBT7d@%m*1ig}gxz*^QMwiA>Xg>>lL9*h|)v39bp+D%3^xv5*fCRyF!|Owy=&W7-9{-9u z|Ig96_%qr6fBf2kIm{fQw%aha8j{;7Hpii6RL-JOEtEruITH<0X=Y9(tyx9rppGJ8$uhH;9eV)?w#Zp=w6Z(jM@2>M$x=7acacHY zdn%t+gSy(l_1?V z4V&A?RCml2a>rD{V6Fsa{`+*zDk5CehL#eNzyP)-vw*(*)zy%BARw694R)edkAPG@ zJCv)lfexzlWF5WwT_HU1$3RMrgwSl7+-sjK2w+9@r`cR%$M`3Q#gmD<-YT+GoH0Zf-JAC3-L7XPIsSk!0B2dd)+1sLCG& zg*x1z9DSUm_%mLJGMP*u{ThkPI4Hg#WC8}yt1pVz7GzS>R%?>wUU8m|A}fleO`ZT%HI%x2|oWEMEL6#BOzCi z7a!)SjmE$h`*%PV7ltGT2lu@juk}pulRg4tSI{9>`1~GzD7L!vNtXHRzbpnfxkBHH zGJ%P~sJmuLt5zrAv2QIMJ-%|XYo&IqdtDz#(BcVULm(5EV;)aRZfzVOg1@Jnzk@mR zM=KdIRP}xv+)P0XR_1CPQ>YgE#o+1Rj!15L80461l;Ts)D!Qs(^i)K}f2XM~+#O5* z!p!jfxX|5#yb1Ojd(LPE$b5WLMU_jdnX~bPu3~El`0RmL`dCLYK4zWPqXx+OfZSNX zF~f|u$D}xbF@N`{{TubykDBr1CxuxyVCb8?7&7J@TD>9vlMX)0orV8*szJB$o z(>O~;)B<0a6C+&Kh4j6=kUb%5qOhb>1FpOz=*!zl;244AM@4CcH01=Q3yGiVJJ3549q;*twEfV0{9={AClTE*Y8TQ-4CC zt@iDBJq^u~dwmU^*=oPYRS`k)-M}og&vjEl89vUPv|@8o0v7p@Sx5&4gI`>n^-75p zeWJ~!f$w|27Eys>=c+!e{c=3B`y=rFvg(pLnp5#3%!3SRc5xvPh@o-OT9j?tcW-7S zYKjchwDmczs-|r!Cmv7v)t@o4P7ng&R95wtL;Nt^*DOhF0sH5Bk@atbo@GE zaNFIUI;~=*HX*vC^v*O$F|zfSKp$?QD@T4;(RpKcMZ$B=d&i%bJ6r$2LjJWG;WOS=CyM#Oy4LYB8jZV+DNV$u7lxJWeR=Oq`MXq~Z=yRRMYv&Z4 zt%CN+DdY=LT9hm2;1Y`z>L=wXnv(a>z~(c;C?>B3V}BEU>H*$1CO@B*=h#u>%3%jl4pHB``3Ngb8`K#yY188l3jCgMf&hc z&#D)>gLOspH&n4+&CQ-sx=FLVP#(7a_OrHx#o9%w!`m1_KXm{!VwBaNp%9>e%uN3m z`(8cc!8p9%3pB-~f9Qw~S65j&^{K0Fd$B=hmpa^Lu|+6baQ;NwzrrsO$$L^~nG~IW zEnoq3z4F9QCf4QK&0UVmBsL<`RB4Kse?ZO+T|5w>>eivfjs(yamw=iCP8fUnu*Ff$KJQf=!H;Wx(n?-v16Sz6pcKMADp4*0p%P-W#Oh zgumasb!{Od!NeM1nCVT6d(6G7m?@ei@x{Q}Cu{kn->eB379xUgg2mcL@HCT+T!~XZ zY6~BM6xLJ7O%%2cw_%;uu?HUNGb*IxXkI5sJ3=PR^B#s$f~u1~&XveC<%@4#1Wfk} z11DB8-v1imWNr%jwj-eCEOf@JqV$fyoE=*Z$tl%4BI5^cxcfhQ`>`q!I863N?>a$yZ7(j^9nliN z_>wXZ{mu>2+f@9pMog`?8b^E$2F91mG5x(mVE0d5jaavHm-2Qo@hy9O7Qjs?Sbi^F zz1q8W>u^lj(Kjog6;l%}cg3l`SdV{pv37eb_t@E&Y~8)*N0GDrac*sgvu@@L#1+$& zp)#U)U>%kAQ># za3?*tWWb1&m0RZg;e#G$H)bUS`j#1 zg+=Cr!wam~TGmaYAtFEi;w@jc<=>@DmzJf0WT6Xn7yURMkvTSqAfbB#E|ksruXt-a z%D7KS4Z%sV$B-6{VWsad>OowY2Cg)9NFCcD-=hA?pFRUnxOXA7P=o&D&o4_BM5R(N zHmcN3;$oEV8pve&bQ1C)j}jnb5O^@VPrK@KaJI&D*_5v|)jI}=!T6;s_j@z;Nx?mv zWOuvzafV&3GN#HouHO|D&B#@5lzD492+YR^PHT|j25>_8B$bOSC+*8xzpv^&JauR_ zBQONB2zA&Nb03Y@5O|m;ROA{im^Z12@Q>E`VU{do956sV5!2rFwVe|BE{$d>5yyN=GJ_DHJX6;QUj>vJYfuh=fPm7dp}~`ml$YqlMiiRWC%g` zWJ568RXPLeGcHXEJPt4@lbA=%`H=D4d9F!na1~4u(WUr~o6CmBWx0^4!duSRx=Hh zM!0_yJ6SBc>AQ&wcO+4-Tk*_8pe!G$X8+YaMg4*E_1m4L;f4&!T?1m zV-%1O1i;o;iuso7>dYW8I;BokkbtvC%YZV@S)GgrW7?XLA`fgo`2SAw*JS>!R5&tW zY#lsTk+JvzSmwzU`!2C@^OF0=ex&{0%J}?{H=gMUiY|^%tU*EeLFZi35(>n3;cD3# z8Q)t!@Xum79jlyQVibt%;#ZiVx{Y0w>R<(*Z0-qLQ1$S^(MkQk@)Ew~H(Y@Dn*{{l z&IaQ>zOc{QPIO2Sgcg9^7m|sAeJ$V7OGBVyXFD7eptK;f>(W<<)d(}p>|_W~aduQk ze_68S%!l`|4Nyq%r-i80@dnkxvACb!IRw140Emp)(`)#Xhf35uYG-DiLRME6c&O^? zp>jv>%K|z*MJV5`Nki5dn#disnQGRD35z>G%Yjr+aCteo!y~R7@`dbE243aGcWF5W z@;sz8w0@_69qZ0XIfo*}b*V!VZ!g}l!t$L)5KdAEa^Xb`ZQ)WS6GKCSO_1Dc(|TeG z`PTID{o~n%S**BzeHhV;5Z#ogepDC@+#pCu77NVCBbC>#3LN{De-1*TT_FzR(sJ$^ z049fkmZ^wBvR%9~XT5?I_6WhIK(2sntKDq4zwB}`7enh$ehR8`>Y<8~<=Sl;D?hun z`XEM~rD1j{24miR1>7(C*AXtfSq+1S2P@T!+ct8~efClxyEt!uehh;p7G)!*KXdP{RcPEtBMl<>^#u}X?0Atuf1Dbimw;z!|D#?Q$vCzk zBHY_L9z03RF4`L+ijes0xoBmVLvqrRP;qOim@`?I5hz7ys1{^cDVOFBhQU7N_(#nD zhWa*x!gKX+pz%~=m;HX0SxRkCZ)BZ^^YC5BlGbxo5^Cr2^6Y_aNpCOzO=&PXruJ4- z{><)a(2SlPsU=4L!#1B=GhIltwI53zCUQ=lp`!d@C3(>EJdc0(6q?UlH!u@tc6E(r zdSd7kr@0o3m-y z`|`fd#)~>J%kXQqy0WV0gQ#90b}xiEwk|vDuGj(Xzidel{1+Uz-!C;Zu{7=!bX-v~ zL}z7`K>wwzJ+inSMjbrE$$&Ly+r({H=M4>F>gq}&V zU2?UfHtsgvS`Y@u78^{<*aV7aJjQ(Mz4zGpXaQP)zA2jyG5x>S?Dg1G^fzMH9(ka< zA7Dp(TwhmHq-;l_+`ZNRN&sZm^+PUJ&mQ})Q+gQOEGf9)QF3R*4XUR*%ue)`w&smt z6RVS${sNsnI0DO^MmH@&XduI4{ijy`e0_Ok%LbF5LNzERST(lVc@Y~WHQ1_bCh=`Z?ShO1+og|NK_w8*eNdM2)ZjhQj%7+i zZ#)~w%HaD1-lh-zC}^B1hfZD|>%~EB#&a~v=ED`YFKOq!2bZLo1LOAJgjwbV+2_QFE#T8Fbnu)xC!;MlfQJ{oETAD8zzrkBRpCm``f zY^Ru}F|pa(Ss32|TYhS4e6E^w?Y3!`a&%nl)Yhm~r4Wmg~I?Mw23I zF}C)yhY_65GpzvqIGe7An4XPPNkL6iD)iRZ_9xB*cU(6=}xS*#e&dm9H)uOW;SI!6j)?ZVpcqP_f)M*=#%K|C?TRCGsM1L8 ztsGAJ_g%SAWj^@h*T{XSGSj!P0YOsEwg<&mfCAb-Qd!#xBEtJmc_inAb z1KF{|N1CCHb@D4C+8-`|a{Z(1sSg=+Oe$+-2p(6Ng#Kg@_<)k7t5Y^u|a5O(?kxaRxxZD55#_{`yQ3IQ%6g11~jUXjn#t}a&wW#ME18G?D!q`$n^8qwX~ zb39?A1b|8>eECSF40zE{Tc_?UT1?9AeRv-qKSNXY!!#MQ?BaH;-}3Y){(GFntVs)(c+1cs)cTZ-?Ba4CAEVIPWw76T#)UzRC z8v;xiRH34ZMoE-|0V12b!<471`+y6!Cknoj1K)@Fhf3v1Mb3C8A50}GAG}ev-+of z%QDZh_LMn^JRG^;%C6@Z+}sQm(bUZ*3yW#%ZoD~ow~L@2)x-(j96)es9c>6v1%}QQ z6}sP^Yc+;pd#iC{uK~prT4Q>0N&G16Y~*cLZzcXWxQExdEe1J#)7O{IF>7o?^PnPI zBvm5DCsyAY>ltOYh`}Fdi=xr#R~IQ_&T!(S3M-a>b2pTNI(oQe$7`LlyCbg-#Wlz{ z<|KZy`v?yi3t3Pe`Mp))>dT?~-f{%ceyf{&a;&VA`rF_@<~|(egA)e~#Jl{A_5PJN zG99+FyK12F`@*(FYKJwjzCfC}yfeD&BinsH7Abxli?&4MDErTg7i_&J-SV(k# zk1$cRg7d$q$6niFVGyU@wjuc>Ld$T=T`r-JQ#dFAM*7R=p8 zgXiQgGk=LEF77n5?46AaC4(00Juc zMu-|W+ZFM zSOvvHWv=PgJ&prsde8j;Z~DuF;~@>>?I&0v>BBpBDel@36UZUR8mt=W^0!@m2BPrNL*@b(SvLMlooZ<#iUMx4YuCHeWD3B=XVzU3Fp5)}pBxU% z2g(S35Jr%>bg-3pfAo$Mgt;vk~kuPKYYFg5S<1*Nr92h%-n0NQ}aPPYyw3DphKG4C=4IMlh=pHRa@gQff}QC`=?NHWkn4u7kl>;= z6rW`m+Hz>6^Rmvi8pi0>#;-S;pjv>vP8HTb6nttcO4gjm) zg4dx@EIOlF&HE>0F-nj_MAm-J?Bs__NWEFCHw9|}GeT?ApW!Df>ZsL>fTs^_&Sv=A z#(vvzE6xTBpyDi0h|ooC(~WPJ7wdNRAW?nO7f%Y^Q&CcxO1z z@68&fn#TX2f1r%sLIJC7X)|7``zJED1iTTRqna_;C(Bd~HDoU<+A($2-))h%X{UJK)v&zKj2IbPnPs)#%8lT(r3gje>P%igZw#j12wm}y*u4S&% zg>`pYvGKmiV3hfu|?l?DTC^c}hf#sx`hDD6YB3|WXF}ew7I!xrihtp$y zeZDAK@?*1^VM=uS9Tp;v^AE(>jVj6A|1TafqI|>X(H`}&3_s0VH(s)FrJu74ZrToZ zM!r!j!QTU)X{Kh=6AoOAKs0af={?%Z6aY)3`T;&-6~g>3Y#v4C+ApcI`BCTqOANSV)Mn+9 z2lkz_3k!QeqCC^b1I)_d0|B?}M}*LSRw0@Ecp%w@g(PU%H*hdR2*{Ts1yqkP7kh)iFt@agn))8n8Rm;!FZRUsawr^MY2WE^6<{r&*1A+I}z+&zv+z3{o>Fz^}?=&#*Y zucl3(S{G~7gbz70vjVtqM<1Rq<_aY~gO#W53#ed&T?4DEVLn=sWRz@pZkWB?*q;Jt z!>?*Rib0XTS00xs-F1?LukZm8TLRR?Mm#JO#Ck1dTqe$gV*6au+U3dMZ`E2CTTqz- zk2snV4*9+jKvtc^9M}jLnoL9_ACH}#V9vzDmPJb!11v&9`JrHy@ty6H{nfQNYY!H% zLrNe+SsX=z7jwf+Qs+_8VA)4E&*sEe&E&VlnEnlYWp;gjb*swu$G{_C05vh?IevCq z)wa>;XRi~O7LO9koh&&LK*j*I>16}taKiG(a4R!I-dJHjsCfboG+7cW9dsWMLSlan zBF1C!&YAD3HoKxW-RJ)P2#;Kw`3TJTE?Cns7frki|+%mocpKycheBpCFX+Y)t5i6mx?!KcEGTGy(RrAZj={0R7D@37brhYh2mSAZW@V;FiKWN*zB%;pj)4N z^Tx!fcB(l&$F*f2>OGM84{+El$4{u;2Xql??GDTJfDZ;OQm)*=SDbtJ2hP?~RNvb& zDZQ~#>$VBGuWmPWVNw%>igH%QEkKB>X(361M-aeRf4Z%r1Tfh( zbXS7Bc#JM!l28{BzRzSu_;0$4odO?4(wvXLos}NuS(z!=#iD7|z+JpwvYv93KE)25 z!mK~=f`-tt@st7R=gKV|c;)kTak|TfnvZv znb7`JzNK{N@d z$UcL>dm_ltlFtS zdPP%glPCO*;j?m%GmUhdt$+k>?m)PvV5EJZ4(LOhge z={N|17VJKGc@{N={}nHk_sc8we}?EHunZZS+SER+Ida5{rV@CW?nd3Y)4t@RP`+t0 z=ipmeu>WP4KHSv!-2Vs>yII~lsw#*gCTsr&!&ZT(bKKMiGpA8W5v4zF^=fl-MZZ;P z)bq7yx=p+~%y5)Ecj0V?q*a{vT>lzr%R&zB2$*pPaF|^*rS4V3rnN0w2G_1^lg@mm z$@CSyeTzeKbdnX~`Xd&PoDV&hY9IFn6fF7VslDvQjm!tc{__11V>bS_{^R_a&MUI| z@CO~vA|n*QgleUSE1*?g6F7*sZ0)s9-8q_ZosU0rl7uXD>h*Og%7SG zd2OVBYzf5yY!}j`V;i=t`0gq1FwxG1+m2%ZB=$on-)kQ)Cz*waIq_XW`AL$F!D;KQXPi>AQuBi7UZ8S+6>ykg#C0WI=gmJp z5wKlD{94D?d`kzHyM1gu!_indNfIdBO5mMiU7ffLKo8qP!wnV;?LD*{@!!2|3*~|m z3a0W|Ri}W!Y>V#M^=dj_^S)bctomWn%<>-1?9@RdC8P&dd_Q}%JYN{TlL~~20U*g9 zJ%I#%I|sK@Ddvh>Z z=>~PXaKX`uH2}|uBN=?={-tP})ZtEb+ck&5QP>27K@3c5W>}r0op~EN0IZL@IGZ-G zf?|s^he|7VYVW2$#3r-?%e{O+*QqSqi!E9w?h@h^uD8$|iPeFgh|A{*jM{}%nf{#@ z%rnFhbygv2sAzro-LZ{C9tA7ba?!BHt;Jq=cMOYq1WqQ0SHX#11@aFryLb!D;GK2D zKv_3B*={Mn^NW&Oq4gf?3%Awc{_3Gy=c5cAq0pB(oEYj@+YwtPCJZ(v*I4`&BV(GB za_)`51{_hY{~YH8v!!*`74s2?pVWNokTbNNTox)U8wGQn`1R4UHhxF`jWZ?cv{mc_ z!+`2;6BF6hs7>4#8zpX)krW`{sC5^0f{4q)if<&Aqaw0PuYm=jJ7}i2<{$3{AbMo6X4yAjzkTvR0=nk9 zIoZ#AF*+`f6Z%N~@kAT&>%EIPR&j&~ep0&M^LvtDB0?QH(c5Zdh2W+GzwsD;ZA*9m zbDI1ax!5e5a>qiV8p<6oYY!+$W)aPs=j8#sfOrJ-=@#<**>*F6Nm-zlH4<4|_hfOE zRbrgLCiER>wkc-l*&Ds^ISNYPB3%cVe8MTE0%S zPksO-&@lT%;_%rWOii0qpeE0^n1#d@%!gZrND-K>;U$e`hjKm^W&)6;O<2YL}P**ofxMTBjl~^^nEDTJLF-iF4PnWog2_4D@ zXmaoG9lS-4%N$Abx%iC847mx&AtAPr?UOeVwy})t2o=p7Jb*61ybPWc{S<#?rVW3W z&|Hy?lX1WUXtsw;2<2p#7badOQx6ClV7f52p*31ztIUaKKv+Q@dEki3%-RZa^_n^c z2=Kt;bqd$qs@#E%LGNOcv-kN|eJhr;=y5%5LzDUDuOm zMV^Q@bA1;H*Kz5}*(XZ~;8qJGUDWw9JWN?Z^??OliS6Vvxmvxl$mtOR!C#VP>y#P= z7`rWYi$8BSdX3Zu%EF{oh}r6+#;Vw6|HIm4s{q@S*@YlyG9EH;6!O(#8U{lwrgS!n z^E5a8bDtIWKj@`UHuf=&I$l`OUljwlX8^{nI`mS(E0Fo2LUhFF_bcG250rs9N|q|O zzU3ndL0PQKmhIQtK)cecTZj^IgX^(o!3Z=YLXXBV{yWfql^QJszbcq*#;#NM<`B9k zrusIo>l1~d!o2v}E}-a}4NuBlfOY;yMt$~J+2_;)k?%sAo}6X8!W%qO_VIescz_H0 zs1M(wM?a{P$yeRM>0XHK!nMu;v;-hNiW6*&hvZP%paP^M$Bf}=2_s2{1)DoOkFJ2+ z6Fqd-Eifgd)Mj{}7V%PSe+XrJu74Ei7;=?dqVm6vWMN{#gmCQ4*9Zow^z&!;?K}Q! z73_h)N&`nD?F__QP8=&WIB9f)Pngq(hwQz&%irNIB!}b~ymFRE?FP^FwwO;j^p_;K zOLc0(8=1n3noelt*gnW?q$%pZ-bh_C-!Qf~OB(fi|I6L@CfX)Iwv;hyEf(1lG8 zT|hi#{yYn?e3WxH=(0?(b2TCB_hx4{h^IQ=zhsklWCoG4Acx-(o7SnCqG8dDSr8nX z&Odlz7hOwv+)jC@?E9JfZ+lq*rqH}y4SbuvGi^TZ?sFz}Ubgg?s z#qMZQ-W@3{ZqUHKT?c35N00I>C9v$gbM-3urE#6kpk8N}NoSp%wP)YCvppYSxPJ3; zC~xb3KKbpu<^soUVK>(FY0rB*m!f`#!INpeXRCW`xQ5%nD#Z14Yzw1X{Ch48&fEFc zO{vW;b;)-4AVl?c(W<8btopuj&lm&%X4M+&7Iaf2#hIX-^_iMQ1jzVSFb!dvTh+Up zY7#->6?w9%$g@2p7)pBTN^vE4I&Gz_;S>PB*@vTQ{C*^SG$V!~t8OM{nB9Ffu#t~GXj ziskKawTH+*V=($~nFQqqsGcD*wx3;byMF|M{ryEl7$ChgH^f*asYTI-FvC->2EtTPd->#e*b81 zCorw0TyBSDlF88#HqG_j$USmzkEic|%(nQSN=oQJy}YI?ub0iE**O4yLNEojY4_Qt}}Lpxh|n7AFUZA%JiGPHNx$w7E2+AF)l;Dp!j zuw5@5?>NL|S%(6MQctNWmGEt=>8s_TYbx$6s>BoVp@*OTLZ`x_2JV3+6~R#U)yGNgd@1isq@ROd>(B5ukqt8vIgae{!wveZy;*Q$42!UEM zKXxbb{`SSnC{R9Z8^k6?eG=>5QXbhvJ$1-|637J-o~)Zdv!;8xbPpsoO1EZ$yFz9D zKq1$@Y&8Pz&Z;Z`WlhBSaSE9@HgdS_8s-AH=YQK)8p%vRZzVjbU1>yqV9U17=FW72 z74N7M%cYMKYyq8Hp$2|~wRX<98@?tZ4SwsxE)%u7KN-MRr{d--+{0i{mu)ELySsiR zTllqu0pKR3$i)tWCfJv>??6VRMLXI`saI;E8$pmL?qQZIaRc|NJn1(OkCe+Q9#C!Z zo=pM=H4w9e;B?1<88?=?CChwRkBwgKtmr6VyDk0t-Uqq+Z_Hw})^N=fq_y}1UqB~;o|y&4|D;}w{A^yFQl;R} zZ{VD3M2c-CJ z+)B{-8pZia)V#w<+8kis-FTiLE0ky3*fYM-#*3`WQ*&&| z4d@kdMOQB$V7h*0tmrcKQNd`x->B?QP~7kyDsGa-UwwiG;f{5ejS~tQ0@iV z{NLJk2!GE_OeXvgM(u46jR9pDAx0RFUKldggG)10$ldud@h=(2tjCc&$NVNk{}^!7 zF*7@xwnlDzmdhB3D{QGMYSL*G-wProbQ7C&$N(!xbVBojw+=%2BuH`cgzA?vGco3M0oF3A& z&f49{gHF*AAi>2K->|!{@7W`3mjY7vvKl3Z8r@{n%ZhdAPGl`g2Vi8@NDizTQ+`bP zIYVtlvhq}EImD~H_lb?qs>&7f7bYr;(erB5ooMsHL+tg#gJXM7+s3qMKHgoQdQIzD zCG$YWKAS-qzx}3Jg&n=hPqMW;Xd=W2s3zL>u&+Rsy#>ex9bO%!A^{%X?w3{wLNFR&5W4;Ff%J1v`?Z#{Bir=lkXY&@6<8;J!8EV zgnvyr_TQR8hYm@Vrye4GcLTDXUY`P@FhtOpqQFI>atYdv_(NzEj-UXqAAwQ zX+I%~cYg%1TecHb8$CdeRGM2z&>FT20uWzN9dYVH37aCWvmS}PeLQxAf&$=gAc1|! z8wn;m7?y{aUh7c(^DEa+S`BrSiYdhs+L--Q_Y+<>V#&aoUuKxQ-Tt%<2% z5vHCuu+jB7*A;Q=5~}A?m*XMSCa~E3QW1P#bqTqeFTjL}n7VnE;OT|4wIwO~13X#B zUn5(02Xwq(CS?MHizg0#wkxPTex~o=qC+QZSg@nONQORNT1lr6hxmVRek*^tzJ9V`6Pdqf#VqgHxdK;?S6r0@6d z8IUk-1kU%r_`IWyD|g;CRu-NsOqBq*6Qa(2@;2n|LK2Ac{Qd{+6k*34yoT6%#Th{% zai!1j%hOe=O+N(ttZb-;u{2di@t(a;*D+u+-9=-dNWJkBUqP|^&m+24ByPo4|%qoGTr;72WGJxpu)KM*p*|&U$WBPgE z1{7fQ8gvt;##iyLZn+In-=75NQ0IbkDP;Wc?9{H_%DR`?Wcz9h=t}&ihc=m7CsI$| z@1ooM{<<5|%C75GQ*zM@+KQ}sGa@jjsnIXlJ4vT({s(oG_x#+l6Ne=Tn!})?e~sx> z0GnCyxp5Nh%ieOWr4#&nV!f|YUh>73Xsc`N69>z19q6U>l={onqfWa&W4FUCHP5&>b6~^QyKN?-kZbV6 zA2=yn&x4nB4gH#$NFoUqyr4Cnu?p+h!)rVlXkSU&;HPu^Y8m`0~2V}Kq^=LncQ}-^F@q7Wh^O;lP z!n@UJHcr1KNEubMW6PVpRsK}qcR7K#wu&rfya`h0A@Dj8(6R3@w&~y+LHN4Z^*xfw zZ#IJnFH^Og(F$khHBEeWQUU_B&M?GL;^GCgLPX{tUY`9j)n2$jQ)L1eD%;wptKVZg zXj%lN0eB`_&|t=qQgNR_3a)^2r{ID0kkV~g1PY%3>gW~zEZ8yZ6cO%nHyZBGjoR7m zfHo5Fzs=+-bz2bq_04LC9JgSCC!bMo+QL zXn-hC(3}oF} z5sC!&&jfpSN95c`7O_Lb3Uw7kBxuU zoMf&w$#NlA86`*SWc5&i%O82jpHNHj;b@OsznJXvsveZ4=V)SLMd(9k^JgDUb-~#h z+>hP;@{|tBQ$6z>7A+Dym}&vcrE1+CiKL~g`nTuKa2c7x~9su>Qk|%_liTYWFTQX?O9?d!z;E1B3tK9uXc6~Ss&QQ2e4mBjX z5_rsYQu`&G)BE#04>()h<+fTj3K`;aXl$R^Is(Q793i${$~}%=!#Bm-qTk2mtiK2B zy~N1MYoDz7Bb93w=r(GOKvcJH_a&+9FE79`-RbW?cWRG7eWLNq&PdPOn1JlU*T6j@TNu5`VLWOCyAPA1ZII6!m4%rUDi0o7_>9&V) z_EYK9QGv_Qc|)w5h$^N|vp4KIN$a5oN`haWi;^cGT7yynuc&iY2l zpNFnAIvuUaG`fyZ1LE?o(wDDcyf(cn8&7*XYPLK9jU{GORh6?ObF~`OF(0q4^LrxR z7ifWqYg(4C0dC)j(+n2#tjP86S)h@$ws2e2?@6&%a+NEV@vRYHogw4#+cJkBLHM`# zlCb^Jt}UpSY}x!i$?Uow$Ik-FQuUU1;U|?wv1@p3*?=fz9DJ`p@$8uI_Z?xUcD!jT z!hjXpb9@$EVVjyc&t~Lau(`<@+xPb16h>p7DWAC^ZC_chnd?!tL2?!!1L~wQa@p$* zbq>vU{>!EO@<}R0)Sv_I&6U}D!kh9*3J82b^C!cD&*;g-SfSi{R6jZfEBQ5Dd3#ja z&9f}*UIHJ3(S|HB;$%vncE1wsHB3MqUo&0Voduj?D!y1^8<47lDh%>U0Pdn^vOEpJRJm`LP(hf z=gxn{FECu)eX}zw%Go+U*uj#JA`ke*Gqie+HX;{j;yudzFXy~9ZGA(P^p2-j{nTEB z&dbIiWcJCEo3~)+RvHQ4!eG5n0&7nJ00>mqCNZt)u3*Ez7zGFkv%^n;A*FsC_v$1&<^n8?Abv*Bu& z3ozP!E{ux#NsNPJt+mfx1s%7hmuj^zN#J=}dkE$WQl(q_Ty(m%>0@lDOEQRTIdE_F z;YqZMaI>1oL~AzdMu8g?bm4x$p@Eq62!9*nQ=L7}{&L+SPygUOwv_93$YBtl6`aL0 z3qf&coCSww2W)~Y>Iy4Q zlegkc=B)i(*2Da$vt6GDQlaABU6)KJRn%)%l^1FRR0Wd9DGUpyQw5niZ_H8fg}Zj{ zK5Aq3y*RhHS{>^4+ZQ~~-xw_x>6Hhhm%3%i2lK(`LI`5KWzqU+65qlOBulMrx)b0K zPI?ohHfJ-rFKqYi;c6m*$A0v*3+QSW=HdEJ;H&ekcX+@DBW>{d_jAfmXlieJcqjfD9iM{Be<69!!`sx#|tiN zc-@y@hkHxy2YUdgw%~QGM`IxHq{Rj@m%w7aBGoU*{8i0Ud-Ewu{;9X^JO>I=VeDjGKUNC4#NNNysg+Ob>p8Y2 zJ97YFh{hzAgJvk7mMbQ>F?r%E&<_Whc@Td2c514#odLNDk>Pi`ut5+LVSX3E+ZKf% zL>8K?Gc~N4>BppbY0B`8pXdI~(-Y(_CLL^Sy^^C;wC6Pp)IUEqXt61%o!P-TfmTPu z+Cn*l@T)o9g!)drcjTBnEe=%4zbiS@Klta|Cw;n2m$!$OK5UPv?NLkt!5_+^>qv_75jM;G)K>(ywfhk*UoyOl6kExV5p6pdW!BvT;ouF81O zs(=E6g;^99fe`iS8t&IA0QN`xsu=sWDHQnJ#VJyS46ke&WW)G8T!Xt($Wx|tV~IB) z)hn)(kn(uH`F5E^30m0=!lF51t+;JPqV;Jpav-Lw^?g*kL3{^s;7Dv9 zS>Z&8t!yZV0x}xTfr5-JlCcdD$kYJbhR16IZ+K%*|gTEw-uPPXU9po-~!#el2ya-q4PomWr(`;?*RZ|qt z!Dy+{DCF7qSDAtm&|nfKo&NO)G;0t_!9WW0UNM3V5GN`XMa2v68(S=(SMQIuiV&~k zuY5IxF~Q~z^avm%BxyJj9p{FP>-+%jjoeo;=kI(T3E~~#hnQ<%yD44$=Cn0THtHE$ zPBP5OwUaQpsQNz0=*FtP`W&f#-{Oy)LnqCQV@KB7>BG09Wm`YS30}a~2WAgL!CIiY zsk=0H>%PPVzEoqc$FuP=R_Aaz6qe`>BiuYzwcT$^?|<_OAsN2VNuQucjswi(>I9mL z2WbDLdEI`uJrqCKeq<6~{Q8^2Kajtt3LOjU9FI5;3kD1<(9yfaT_xBSou1JkWXEN4 zCc(q7OQX!1d!k~fI+yi)_J9Bbho~nBYj0ZB$4$Vs zDKS*hy%LoRrz3m3Mysd1=~8L{bR_V@PZ`$A5{>iOF!RdD)RllgNr(D z>Br#NFPw-m4c4UXz$u4GU%U(q_Wo1bCQX)R544P(@c3CwQZ`vsd~LMIXud${=uvva zVxkRjXU7P{pHaZP3yjKAg8AybWdgYD zJMHHx=PCYFA$O=rA9|HKme7rf;Jg~dHE`w3HX&U;t(AfI>higQ4+?202pmObKI=vF z@`spU0}-Xt+;(e%YO_8p*owE_j(o_Rx#P-~DlP21G8hFmN?6zn7TG0NVYW!ecISYs z4cIuck{!7$BiE8NC@$G60yN!mvZEp0-3XO(<3s0$WHF}c&=LRr3fZS6n_lNDk6bzn z@&A)eB$OfR5z5S@mu3%qhqfx_y|hDS@W8+>$Ma}Lk(B>FK;K~q&SrzjwawXO#?WMF(mjLMO&a6eQJCK`nJ!1_{-HTPeD8=c$Fw^rRtGGVcgN@Fp4X~ zGmwP31G&4y11ExvC4~?2e@d)6-ZGQT8Acu!%KuKq;k{q(@zEaqWRQzr2VCoA_^RlG zhsj{PS)ofXo6P0C;{XwilG;gQ=q^?v`*nqK87-l&NH{J4;FZbZeYV}xr3ffmp9d3e z0rxi#-Ty&1``qcNlNeLf$wkK=aW^4*L;ArBInbkoYua-r_*_V!-#GvB<5nPtNC`O9lj&}H z?$!0>o=lY$%+9Y|fiNWI{b6gNF;tEc7YS$_;lSA9csHR2`;}tGu@Q zB@mKfI9~YbmBn{BaDX88@Xxft>|bAo=D;poBRW(SDvBxJ5ciw1wy_5Cl~<9f6?Mn< zug~2}d{W%U5QCpOD!_-?X?_y8g>0n+wX&*sc?9+fAn-7q$Bs>_=}Et3{EXgr)aSjh zmHJ+Fvr{&}C!jDe@B^wxEOptR)TxQae*>%|&0qDcSCBhYU;h4BeLJB4*Mh7~Z6GWF z-4Z$LXYhXUP!30o-$Tp_Qu$J~IB0zsL7=I(Ha~|f8@jVa`shaT$LfEuqmc_Hpa%A{ zC1mQsg4jcjl|IQ6WA4isQ!U-#`0vP+9dF^L8<$Hw|olMKm0BR`k&UzMENS5Vfu zVLtDfS=rukkH{{{GyP^qFFD|=*3Wn^QIR}dxlayJ*GF*_6?x4NNt69PbL+y%Ifqk1 z0#GDU^n@BjOP3qDE%cM~HCXqC_@pU^Z_L*v*1vumeib1o+`*%x3UQ>-gr(T z)V;20tw$!0mm=ywjA)oUBDszDTu67&B^L<3`5$@L;HTwE^|Af6&&ohHKt(yEWk0kl zzJfljUFJ8Ts2~oKmjfZo11;UqG&573!vEgK<-S=2=Z^?v+P&p2t1YuuSqMVo`&*yS3*l#uHD)E*3zRu))_L6Wya9 zU%5Jj0DrN%vo*Bc+W6j7f$h6Qhy6@&5yH_*C??F=k(F_vZVz9vf)pI1Y64D2u6CJb zyz8AAExHf2VZvKk2mPGAwRuGBH;{Hav7sPzo_<)0hw+aTx`8+y9`)BPpsi z>L>;vk3;^MAm}tknUymP#m~|lP)+5KA^t0uwOu}wot2&2k16fAi}yxyBtZVbU?8k=41|>kc{Fs2?gvoyNi%Hb z(_F*q6zpIe3`A{&R`)bgmYfOHS(ye(4^9e7Gid3EUick+)G#(!JA!5Gi1a=)!JvUb zaz=NqZNK!8D&&YPqD*Mah7Iz^{0}k>mcXjFhy0;~T#xG(I4I0tZnXO1+5LFGjrTwi zsdRE`84~MhnRSkG?jQgPAOK|OL}mP!Fp*&Ik8zJiA%eh%el-gnxE-@#s^NN=+xX?~ z@4IBdy?vLxz2C+BeISJ9OrM($*dB(M2OD3o+F+b>1vE<+MO#@1umRnV*lLK|bvnK3 z%9~6>pk%;z3^hGVUVw)AN0d|v@4?P9QlS&-+*YS5tI>&=GqyE}2Ju`fqneyGR$*{K z_w6?AFjM0+Xs1RkO-fDx?^5rP6Y>WnT8$h?d#k{1;sdz^QhtVq&LV)p6Z@hB^Eg8x za`AP^$$U3M32uCuBfNIiFpFlRkd}3hD%BZ=4A6%LFZ~pE(T@D@C7Y!N%c^{qqRejp zcr)g$9zm9yX#qf^cl(a#wlBUhHFlYEW0siqSiEM)&~CtqG~Ev6je_{T;~sm^*F}nt zLClh?3kV{yj6G2Em?l#LSA&%`9^CWBact4yR|6cSN~i;kK-Y!TTam0A4-y?^X&mvczU5_t2|rW z(mkbj2>>wzEWf{a`Zc*--o=rt@$_uoi9h$qqKC7N!3EH_5#eu&)RPaSRdG1%B=AMo zbgPrp3+sN*H9)e*XanI7y!FkR*qj(&0b)dMt#ghyHLr0S=jcI>QcL%8V5+8pN| zI+`oQ8;Cvg$f1uiU*W#pAmxn#y&CkciEv2g1i8``ZPTo6yj}yyh?=;<%<7G{FM%zB znw8!B&)a5d0Rr{Z=A?yuvPmVac7ZggeO?X5PE0VAczPoCrUjGTPM9P8i%b!E@85K# zK7cTwy43bZL-k4x>cqw5RPg47(e#p9_~~)l6_p#3iZTCqMU3J-CSrIdbJy{F#go^% zDN@kgdViF+qBZ|#-zqmZlZ3(^;J}zYo{V2(5f{eU+5MT>&AGoy^{j5>f zbg@g&Sa9adF{ntdVIUGcBZP{pA{g;t#*L96MJ*Lw!+=b6Tqf1r9j`lE%ib;e2EDo3 z&t`LWeya(WHoBb0^`Vk}eq?AUuPC9AhH+QmUewn$O-$U^k*L_?Ot<-jmAA!Nz~H~W1F&p@SbsM zY?@uN9{_OSovW>6THY1uih2zrwT_m#J9Qh$iP+NGdKDFB6H>kd^^~g1?!G-1SiIcq zR+X=nbJ}7eDl7*&A-ZS3vZ;gou{>N<$1JZ!HV)hpCb_5jC7FI}FAHd{I?O1=WFVn& z)uNC?Tj<~^R#G$VM$C<4CW`GKisul^QI|E=zR$Zv+`21#gF!pz%fK0YYrM>9jEdO2 zMf4Hs8T8!!Bq@O`Aa>iXQrnn`QFmPr&m8LodcVmIIXTMTAHVX!XgS3=vM4YWhS9bh zhV81+BvM<7p&~9Ut!VU=!S)mXY49ydOAZ{pwu*Srv+!I=O)oyJH3kQMvA*0wdMUDC z|F7nCV@%8vStQ5$>-MF_k3SGaR#u74VcIY)opFYWfUP)ljz1P#Llf1Ni7AFs0{j<& zzO+_qIUziceo)lH>0SGVG2&8&fT;&~65h3fA#z4PM<*`bCY;dw90*s>y%fV-C-UFa z`s;h)tDqYhhXPAX-_7cIGnLWaV6#yjmL_pjAR_4nC@;N_2ng)Y9`ahVyR3KfE>Bfe z7Tg3<)Z8p4ivB)%Kx@|PoEBSw0oOR%Tz^@QQ#!5o?bg9Lln7QaXV$PBvVP1qq7^pNdEAWhxv&z9#u@V;$H3+vVDc2<4{A#Q>%dT_m$8bVC51 zDFkK1EE`y;so$UXkLGq8$4w6-Vh?5a70;#FCNMPBCbb2Cs9CgtB&X||xHGRy>&K2% z@g+55hOB5gl*iC#(1ZKHD|{T(N6?52Q>ASqAT z-7;6Y&71*Ds*EzLQEMnth9NzNj%X--*!tp<+`h2(M}5D8M7H8P_FFU%Bchb5PRP@K zM(kIUH&>R6#S1~~5@aeu^r5cKq?^xrN@zYbTdOw;ey~{xb0O(R{M2?T zPJXXcn3@|;i`={11*wH(?cXuOIIoV4|1IY^w*Xidh0BQp225(G;rx(g>`#yl-=YEa zD6jxY_=tD^Fz2k7Bjh?Q!J)q4jEKdUGTwpPrO16wVFzJj)#=~{tWA>zLS!YhG2!js zq%gLVolTD+MShcOhXU$IrHORi81NbTEL{uVIH9m%cPKpRJ-~TtzE|3Fc9q*Uoig;n!CB4_rBE8F53#vy zig02aXR|y--)SrE7fA9vgfRFRkr>XNm@2s-?^`l1>U<8M)#ZHaE&wKiQ1p~znZJ8E z;GY~hin>s2C!y2KZh~e-k3rYY^v*5J13g~Nv$bqi(c~~67?`75ZG?0Lx_#v|fkAau+&*Du-He$@vM4s`lD zIf=2~xxzZ~BsqSle*4W-vV1eG3=-(YJMsl~2&VCcL~oU+Paq9t1gC*9<3HW7rrq=l z@oZa2tM`)y=pL6tl=q4mf%4Ir#Q7V^7+Q?!pHDCWAfgR5jd1q6UW7D%bjaN7@*Rl$ z^3;c6b@4TxG;-jJPn)rJ1kj!Aa%BIH^`t3()zXS_KK%<1+t+U^?t4I85`Km=`c{HA_ZsJHYrPPEy#`2^Om=Uruq7+{@=rAQ)7 zA}dcRE5sM}J_F+#d%I;VOdNWqfSuqO^X#i#=oXgCZ!`ZPRqKMY#a2ZZK6O`Ed%Gzp zQc6q)#Q3G~Oh>|$;*s_X#l@ zDSUXCc2<{a0BImQZ5l0ckbma@AcoMH|e0!@n363p{%vRf_eIW@qunfgIRBNu5fE~Su2hI z-P=k-QRP`NQE)+Mn{y3A)d~&;7l;u!Vi%lsk1;gHNk`=jh}O#DpMxzsk7u@h69$J} z{yqBL+1ylEPt`tz0lh7NWx*9JIjNx_{e+r08Gu-F;3%HdVL|<%MQryZU~EpaaE5>? zV0B$Rc~JcXm1@8(BI!Zd=B&l?n9W-qN#^X4U{dV=?@#pL81PyR_{P|cPnis`gpqLW5nu_5~25d^wyiX@Id%&o7mm1(Ac62rTCoB_y9bc zRr(lG;xG76*8P@U58~zB9|d{cwAsp8Ro(^J+Vx$GYVmm^*L+Jir1)%TR_u@u8M0Pv z+KWO&oqMTtrC8Tj5S9Ws%{xMNi+gGB&53-+xmbMc{HW(Hx6iWH?}6gWCgzD(a6(2O zSIqdAt5d@zt+)NEsHexEhzwmH>~^$wmJJ>~dNoy1LNwp0FU!=}5jL+jaIp(FV=7R+ zB9MzvA0GovDJa!Hu#A}_#L^D-JEHx2%=jeePaI+nmL#;RzOKsE0HytkF9!O^5Lca$ zB8B|!@`(PJ7x`!2Ek?D@%MqTeLgDeHXm^fA@JVF0pBDi0(1MX2t}V)bnNdZ}DkVPo zCilC$R8?EHz`)L(cKEj58~MV+l5oPi?!kBIkaFi4Z&>eaz%3EDK_1cuCGg7@_2V$) z$+KFoKV}kXN|7$uj()E~J_*v|O?T!lwJ#kQEb$D3VS%GmSs`b)Jw)oMDAxfVDbp4l z5XXo#>e@?Z4EEGDsvpgB&_u)9{$;VId2G`ql|`@ZLt4$8Lho1nEb2r?(i&lD`x9x< zVnKn#?DY!7Y4}bK09_usnH#d=!lb^rLv~Bjon^p~R6t7AFxI#yfu&i#pr`D^PA4r6 z0^;B8Ta@^h$p}<7>t;dP8k65-#{5qlv`f3WO>v?oN~%gCkA&m|M1w86Z!U3Jl@U$- zR!Pkmi1sZj1)(8enOA`3RVua(yLhPWE!n5l6ilB&rHlL<)huBTkC+YxI|{Y(I@MJ* zP$9!et4YM%0%~q(qml72r-%pw3YlukwYuk` z%Tj04(=ySAVo%I~W{rdV1uKZ*7Tb$F zi@HN#QgOYD1yInSz;Duw{_-Qa7%r-%?N^7Y{JF=5ite)rVCBy7zg8q*GP~he!PmEb z9<=0=w6?b$s_@}yzQ1q7Cf-y9L)Q?T<(Xz#J}f%_zx7`TGMxCVh9;;S4ZkK6t+Q=m z;)s}*P)-&=nD0HYseCa1xcz%tAS|=$Q=7z7?Ji9crYjX)7Y;!yD-g(r>J zBS1uuK=q|v`vvxLF^!#BJ9i@49%#GOVQvKc-ahNNj~s4Dn-_Hy98(PR4kDq>K4>nY zK9dVpnh+?rHj;)nOCp$eaf85~TzD7PSBt`;_)FE+k}a`f9T4O> zlKZ{7u$*#&1{`Q~O6`Yhd7Ew~|x$77aNF}P<$(3)K`dEAAkm0pNe!x3HwwHM% zNjuVPK#ItoE*l|J8-19M;MtJr%&4{QU=o8*@I(>^%!hk#Brv^;F1J)EMeF2_F-l2; z7l)2PEXxzxk0e<%_2#j}wh$ozc^0Y_&f-$X?`XubrNi6t=RiK|G(jVQK@&hd_GDxG zs%O#}z%jC4L$7=31q@N=bjWpEh>HLrP<)UFpf}5o$>)+9^%amL_aQleWAksU;?uLw zk6~7A(`|dGWN|JPJ9%OC$IIPrK>*|o29OFu=`zLHCWx2#SE*DDc}V&n=S#hG0-H-| z-@0X!4~sTFP`ph;(RK1)kElV{{55ECsA z|MQG1Rsrt6`PAkELeaT!>!ltQhGu_gqvd6Ebh!PFzrigUCJ!JEkhMg$A3Us0m{WWE zlbTjjfzo;8aC}|j12Fz@pp?X=|Lf_fo_5S3rN3LP>x)oTn z7Ot>k<_>E8b{eEQy327Qe{e0g4UoCTg_M}t_jQPscdtziN?TKY4)evQk7?L0W~s0H zi_<=rodqe*%7=g^D71oT8qxld?OA~n`RA?IN62|k!W7mD9{6!}jg>wsazG9I`V-I3 zUEu@k^-{L;j#G`SQMpR$al_P`m!LBjTCE{O)5pRUdf|CslRHV803_~C`ul&`~|4|;JIb{X3)|A8!! z*1%vIG(!t%1wL%!acjR)_S*oOt4D1T!A@Tq0$`U_f_Y` ztBs5K3<2pYLsYUb!uPc;ani#Z@N*XQdwIV!YtRO3L*SiEOWGyLS$?0iWPb%m@K<07 zas{aW$!6WV2x&apdhW&A-Hc&~v?sXyaOA`F6ySN<7VUE1s+u5ln5x1In=D>`{0{p; zY5dBvy<)ca6@HXf#9#z4AqdWEi!$62a3(_MpJ{!S#%-(W^x+8%ZR01(7mr~Ju zHt+{uaW`3U#JNjk;L@#edV?ytu&=Xx#!E%oL170#+ z5DD&u(i3!zlIXi8|_Ii3l)BgU*IuYGRK?rE|R__l2q$#(4`E z3}K9ZMsbB+p3(K63`5ML9v{h%vRa>Q4BtS5q;U^DYw4YP`ZvUvk4VOi*K&p}+2VVLst!cEXx)2W@tS9mdZiSiouyvPm11Va3-=1S9;zbRRZ z(F1+Dx81c+b$1}8131s1?jcT*-r?~4<5CQdfF*z-!wrNn#l#{wG!1ovl4TpeHM9GK5cg82&%!nXiMiy6JgjM4-^=g5{^l*F%vEnkMqJ}D@>{@a7H*N{Dc9LU z)59GD?(R76cRJg^OG4SY@2tePt&OqK*W_#Sw>Fgvn2#1(Q|HPmIg|k7U`{nCm`GJFwzywpAjP`a>s6#ZyIZA3Bqy_=nJVwK{ zu!cQaVj}#Ud)W?RsVL<5Ld(u((P&^$Qi_%jCLj~q^mjtu zkwX!{B!9%XIBX{&?A&OkdIuA)n=#1PN zyaJ3{78ik+fF!dHE%kn63-m9K+Yah1fVlI?u0b$nQ^fIrGURKlrgntXT zQUTxB9(6}8Ucy$ELI4e9Lm1%D&zKlQSbJiQCg4_q)m1D&IaK(~s?+_3027-xNih>_2}MRAP($tbTV&KXlabtZ+{|zk z7ve)863<2c588D8Vgee#vxyf0VTZk6({EZ`s%P7cN9-Y=%LZ$+O$`vM!kzm!R2Ex%@aKh9?V#C-PaPOvz2D@8c5} zob^Vt$;iOI>f^)IN&L$sOK#(ORQOA9f;rFSXvH2O$fnz#PcW*{+vCmvRj}}^!S5FQ z0`~dAU7n=f`|l#cp>JJ~CJ6wmx`(wRtY?zjq9BiK>@^K8 zhHUl?CZi`M-0Zi0UPR}DV{7BgQ_%l_b3=jYOjiA?ZMUp|C0G?iYD6Zv?XwL$%WuHb z>3c2_#N-KzGujLXDEecmL`6>o%SMSIeaKpy)peZ(`w~2>@!r=eB2Vrz80v`iCGcz4 zB|f=%GuN%ck6C^D{N-S%Nkc2IGXP5*g)D}S=GY~%Qi9lM{{2;nm z_2m>pvl9N!0$SrF^_C7Oio*!8V2i;>+%&wVGwInpBY%vwuakEm3G%L5B}-fyzf8h~ zU9U`0c0V;+^DhU1#->|%g^{EPYS(o_(9>zq5sP^P@uLkp7aeZ=p?^~iLV_IzdCS@? z=&dal8QJ9lYt&Gc`5%Es=f#-6kxiW+xAsic=Icp}rmFvwd=kE;wFsMpk_d z&JO_u<_JSIQ7N9+5kGa*WMrc6K-Y2mk=C4qgaRl}iQDZpqs)}s=2hDXB)r}?h2oKy zKraa!>m4A2XPwtjKs)Mnz8wq)V)wwy9#jmy9Tj~3;asBuuG6#}Xns|fK7Wgk|IS<(HwRoNu0`~LMLa5h@r6KSP%a4t!QVtYeink^1yDGXWT zVgs0*ZS-%ABU9*V2&&yF1uH(AR^EgDWbGE0#=dHo)|PmO3G}MAGeWSF*NZU5;Z6sT zSu4H0Cn_F8$IRpOid@%q>~r%}YI>15k2oz-vcsSr~Vaz**$!eERDlW3J}JY zD^#bJ_1?!@=xKP@@q3Ss?k#W9OjUyAUIv=!u$$Z!Z<^sc%B9OHbZ6oHM%xn+WU=Fr zY*b=&)KZtGx7uXqlR>9In2&^mn)3v~1R|P5mMBtK-yL-H*x4K*DD~O(b!qaX{rO-= zMR6@3cn&N{bkq@UoNT+}I2SK6b-%HpuBewmoYVzPxm`HLEK_cnY4gxN-$-kVCJDRv*XM4tHB(E>3 z@!?UStjAc)N5+m#lhg*m*%jnZ$%4?9jsN^INxoTU@!@1qjxEkf93mr zwUzz%CwlAvy}pR)q;@$#tk`_h8# z8DXeV-e#uYi*$ZOmhHY;BllVA=T|kD-Ir8e8xC5Vn7Y~j=cefd&DQY0a0f#TX7DAK#=u8b9W@;2JK9vk@5NaIlkg&~DJbdE#cQL^Q`I$Iwpn;>RS*SZ zFsW(z*kEP0XeuWwWistoio>iL>@MYq;Fup;)I7Q_IF%z4udP`G0cT;~u5)T*S9*_^ zi8@Wy*vY=~Na*SR#^smbelaez!A-oJD&oZ#esC>;W?Hz&Lp?ih?40c7nWpa(++e-y zBhMDu*g?_x_#49B+6whR;+mz~RZQbidmeyf@mMRYY|m5Qpnh=;VYmgjkyZwl8aKGg zVZQ@*$Hj9mNhG;Tj)+t4nLX^*Amph+J%O=UEO_m;6Y=8BtV`J=W&d?kx?gpJk8rbC zCtMUASorCpoWt>sZ~4d&XbTXE@$FzAjC^Qo=NEtl#&QJw?y|YcRLAe28(7U)J0flq zxK|WyWv| ze+L9un&=g`6XA?#p$sjQWd#8XDgp`7&7}EpO1s?KTi7d68;1ViVEb7K8%myNJ!4Nd z)iB|tK>uQp8+_fA(@TF?e&krYHL2p-$46n3+WtZg7mEMH$!{s!Vn3O!R&r7B3RKt! zW*SpB4>7k#aCwBWtLB<)x4n&*YoF~Eri2djrz0{|NX zN=RbL=X{d|#t$MQ3I-JwE)ubyDFFL$!R&KFk+BOUwBx_#|3QjCB^-loAL=ip(-(_Mj>eJF~LqZX4TYy2InY^bbmISko`!!Nyo;OGS4zzH{;Ax)(XlvQ_QB4z| z*}atOQD+0M@Ms#z*Z$ueR&C3K2_11HnF1(O2m!DC({C3fVvAZHP^BAn6OmKAK=c(gXZ$C3!}IZ96bYO2K9Qx#1r zs3~_QHQ~*uBl0SiwYn9fvD8kQcBb4kZ2vT7ss^jvPudL%YpLosZLIko`}06WcWKT} z)30iE(cUf7U=psgl??6EDKZW3oiC3> zLHtE!)v8Z)E6Af!Xt;NyNI<2A57Q9h3k!P;LI!U%)>5!%URQVD#u^^(5i$Z7?74Pg zQ#)2w)huZrLbm?ivT>PeF-YV1!qt3Ky0V{#DnD#=KT7?X?s#ory*+n&Fn*>LQ(Q*n{K2jIo{8kOHK4f}{AR~!~io#q{eb?WS7rG{6}ke8TP`@yK4W(H_X<+7nCFPFJ`TqKAQ>xod6e(YO{R}ithWPD z#3`ojF_?2-NqU|0aQjoMM?HxI;2TnZqv@r%&*ka1dZ!$UnYELr7<;S?s2NdS;u+!3 zjXZs&nK8yGQQ6NSC+6jAeB{W_dhQ0+{f|?9&9#P+cI94AU|RI^LBAq2^!%b_flLGH zdaKD~Zho(EQaqSwJ9#A;L{yI*&o30CRX+N*olvt=Ja$V^ZD?{j4vgF$JRMR&^)hpR zy6RZ_r{8;tI|0V94^HRpR|Mwgn)?e+^&50kd-%R?&)iIdnB5}|dCHm586hqujJyrW zm4>sRQ;YI!(7O&2XAMAnu0dF4ofWaC1bD;rR<9EW+u**%fr8C^DI+o9d`8LGPg8nl zMT0|m@&V2;*r3A)vS{=Aa3ZQ#iMepMdI_Jmo+zkM+B8E(KV*^DLcMgwvrK=vm4wcf zHlN%~Z9W{ux_!Xn$Goo%_?3{C;w19sJ@$G1q~GMZJhB+ zcwbq`jY#BD>G4VbYEz|aXxIXm*v_BM{BIb2MnL!94LiM8ED9}vGyPG6w#mGMJ)RKT zb1M+8Hlv~UNUTLc{%a*+kI=pPahdF8@{aOkq0&&ajl$137jj>f&)_5g$a}TH&(HOG zpXakHLo-*}r-9-`nSUY7`uSMDOF$H!4&kt?WVC=m&5V{uz(m^N&LItROD-S4Td=8p z88j2x4$jI=XA58ygKEbhjZ6niW6b8Rrk{u6(L^EJr4fc=vo4`afTxxeU1?H~>U&Bx z>I*QOH&>r((D*DDXtpBirq7eU;%8}g<~nNeGPIoEL7i11G;GW-~W6c(u&<%tfu~1 z1acEv!fqI`ncxp_g%qOsIV`A&j^C_5SKhpXmDwol zyFY4hTb#7gH4X*~o7fjhJ)Z4ly}dpvK1dgYGsJJYv-6McmjqMF1D2=n-0ssB2m$^4 zClmlhQOvjzgj>-lAAK4qIdePop2Fs|BO-6#diRAjN7Jh68Wnq8sW{*yhCF~Qp!IF3 zcLFO{@KC;XUkQjA2NRGCs)=U@|O6_{a|mB0i{=_UBlh}F8W+c1 z_Q7iD^~{km0g6-5uu$^_{Hmxuc1qV_r7Z%c{YZ1Qf45#eOSQR##X#a}2T` zZw|dpkA>a)F+6u|US3-YVy^a1W4^s^V;Y*T`~=g@G$CU{EBV2jgCo)5ZMoVHaMPo`{>~3?p2n z4^0=5%1s;M&VpBrPhkedDjHSq^KOFJWXv{A5MqxafVW0{<-+AO$O|%a6s1Kx*SW3- zH>80osM>&?QKw%KX3sAgFQ*14yO7?q$CF74*)4%^OUp|WoXA0=0{1d2RCmYlDDcJIYw+OzbF5A8w z``w7JDbWS8DB|LPjq(N8%X3$;(~=y#BO>99)`!#?PFX8 z;6OTaO+U5s#z8;kEO?_YCD(H+#{ms?JZZ~qZ3Dps^#dZI3BNGHzV1hRYXXX}IxTPu zg5}IPoP3trJ@2(CWIJ?0hb(^c&r6oTLV=A;6K7hysRB4>sxs5GJs9?&y3IAdf-H(@ z=OV4WJ0&wQ0{(v=oDL?sYecKhk-5lXl>m|KO#X{PzMb*45edB@V{1a&Y;i@eNPUDa zl%XTkDak5xL$@!x?GuVm69OC3i)h@ZuS z9{7XpQwzTXeFu8CWrt32A3NPU1;c zY*}tc&jZ)IBR~L4#-w~u+DU-?>BjR}V|L`Llhd0EQqbK0Aoo2lH+C%ADqyHgbp?zm zs3(xP_D?PCH>1u>c5=m;pi=fs*{gXDp9OWtx$dkrr*lGm+b=)eehwX!$uM`A6d3SB ze6pyvqv%p<@|?p#71?}Ahc+Xte2j7b)QQWGc=iMc8?hm;HPpfziMPBGv&3ScuUtpu z#1ki6+r&%LDzvZLDJpLnxX88a?si-HQO|cgWyJW^V|U>Q>4w=H#kc>U_ZHoV zr{;SVz;x5@;`Jv-rr#S7L18ZSUf{#EtOzFaTC0$@)OdVv@Jf6%!*l5i>c4YMhRG--}h74PPt*vkCcN?4$*+(qDPp z`bEanjJK zR$XJ^--eWS&2H1L~Koc<_p5~EQbonM?7Jp!#jk| znh%dC`}@sZ@G1gSo!jLT2rUb_XQirfqWV?iqtFM$nfD!#(L!Yv;q6?hGCIBQ-90ke zjeTe?m+XkqIQa1@UPoa@IrCy6q}6_4RU$iiaYg;BryzlC+eQ|FT<*KE;XI3H**$Rw zujHtai2p*0&+3(_j1`v>5%A(l#@>8H03QX+B?7JYcSIu8R192xh!;E;vQ#_-=PsN6 z1f@+tRnOi;y``ywb!^+-t%s5mcOW?*wq;T>fz1jg9ibeRKAK1k>VlBh9CCIDH80dX zR!R=fq8!Tg0veuPj`*9P_N{Vx?a$VeU`1ZLJi%sgwO8$~&hJdu^^m_d>4@|^cx~^7 z+JeHj@<{=qo}dY`aPUpr@=W%F7_W0{PDFJLJ-iMHJPFw6+VJLElYVH7n!C*`BdU?K zN)||P^3W9T$IIOxb;SW+e*ep;e`vqi#LmwF1)&gY*g0NgmZosPC zVaN_*<2?E51!fW)!o?oQFo-6YwqYb+=EJ+}YbI1ghFx+l{P$45X8u9SU>7t16)EB2H9EKQ#GO?)liZEn zZ%OQX^=G*d(eutT0}Ud?q)z4Q&KM*59_ERzkwhN zDWMZ?nfz_jtWAG!bu89=F9}?;lmgdU0Vlrr%pF@Lcv%9=X8+=A2BA<~Tv?OTC`GLe z7Njq)XQJ%E0Hm7a7{TZ;w<`*K#B1}41`^=o#0PBr9G+|R=3Z1@3v;TLHqIyZ@(G<7X!_bbspgS|Mpc?KbX;H}vo)wR#fQOXyX z-#58@yWQ!z*w4l@lP>SiWSvx)g+$i@O41xZ7e`I=I{z^ac*^DHFRV5ZP;d6_x?N}X zi84vV;huw>?|jP1>*-v{_V%)vBx!GLnQvwQt-SMW+APD(KusEdCrhw>rg@bN3H9Tw z1~D`T3kTr`_*!h#N#N(sb5!Z1GPKp()4CS|d$BN^{baOwY_RQ2%miy;$x`?BSYIN_ z4hrueKstZE;9Z11V#s`~DTiPvwFyx;Ia7EuABT2@r4j+N3R+Mx7(aeIj$ihZ(DVvH z1BF#6fZX{**rV_W)?uqAtq!@i0I(SM@&8`Xu#t(dN9Ijr8b<3w%~T%NRGai%yQZP= z`iToGfL|yvnXOd#RJVw(Eh<$4UEa+&<3kpSHK<3ovcfZs!tIhh6&i|SoJms90u+$x zQCx1l6E%-o0$5Xz*~FHK$+Ne5KldO^#3x1=VhQYZj05U*VJ0+L2z3K|O(hr$tjMf0-*+w#7{m?r~cn$nU&?%{PI4KmgzXR0h+@UcB%6-eqj;3L zM8HB!BO{;yTpPAaQ{5-)(OFf1ywUzDt=qOxtKr46K;Y?s22Iou_>Ix9`7=(aY{!fB zkN_J({Dm<5@m+^;oB^NAWpJ(scKCR(;+UHi{q1e`WRNl>-g&)O#@2lKRLcKRbRLdS z|9>3++~7Fpj8u2o?y{8?&ODU8w^G^Ui;S|84iP$Mi;_;rDkCzYI`eEokx}Z55EV+k zs^9PZ{R?nD_j$kG&)4($c#c7e$V?S7_ZVSCR%Hw!Srqc&!r-lQ~wWHE7*uV5AqSu^G}tpp1;?&Tj_mL?Q=p7 z@4?6hK<;5YygQLHxIZ!$7?S=ZFHu_M>2d(`;4!OxX)jPy827M8PmXEr-yt{?a zzM&a?QPpU@tSz=>+y?nRY!wYdt@Jt>rP&`E$;+g|Q9|R$*I!)~I#&a4dOoxhFu5ft zYzlG~;v(jQ$}UWXN7@tKoDddu+Y*xZ4>O?PgoLgL?ClrMS9*m zXJLJ3i=xOL3SM#B-C)BUI`Gv=VJzQSVFwnU2K~e9l4t+oIq$Xc*PqBr154n^v&MgT zll-?}!^y*)Zmbe)SL6*6_^*wZPoH;t`n6xGe&mddYi(>}(TQlp3rL=H({wuH4smb9 zr6`bgYBBk-lceV;0AGeeXPxU?#=rCv`;;GQ^z|cL0#_pyaQk+(pqMM@Q#i<&)W3j)^=VYa*_<1&h(4_qi+v9(8Y zV=)n`KaU$OQuBm00p}u&D9dOzsq{%X>N%g-2hll+z9tdwKHs3-5xIo#38mbk2;dKp z%ra*TRvd5WRo>0elzx+ZI(7+2{QZKPKHs#2{F-InT6Np`&*fz+19H=2v7QN9nlm+K z$~q{4Ly8N)w{a_H8m`I>lEkL|rB)2lKAq~%7H`G2(YXaJ8#z`x?d7$6ld5Fr=q2%x&!7a1rPI`%?W!WXKOn_L69#W>#uDpo_tF39?a zPZY!wu7qpQ)+{`D2wxr2psv+qAfO}Oil6FgU`ior@78R4wA)irVu=91%CFG7LctO* zPT8ZG4GAZQb$`YnK;z0eH8fF6u|1snLQ z58FgPHvY$ZDVfMh^CzeH&W1JQto^cqpPNrPp8mb<+_D@sL1+Ei(&?zTDk)RmSY{fO zfN6?CD<732WI6GweoJ3nF$e?pFgXLO`QZKwFK^uq^jNyLYRN*120#f{hk^`uu+E_j z-NGs~=a*L7Aztu+yOjNtEr3Xf-mP-KPq?V~d$dgq@NFzxp=~zIf*Fll^G$+VO;*9u z@hA}PnyXT?GH+uWTevVpYickRxE77B%jN;hTY1@aDbGH#&C@O1=K%Ads%U)IrULak zs0zs3u7ZLEFGaNE53b5rb& zuS0h1^gn>05clB@pz=A}w6(kBtBeN+Gp4!-i0XW{l(n_HkdynQwcrj_rt{Oi$YV+S zJi`w68Qaout?&>hwd_SzpP#u`_iqJrYxlIyhPlqaRGI->Hnue5n|&+dZC_*`3_^Z^ z{2`8@>Lui?fiMO8Lg38@nasY@77?)FmB7h^=RJtDM%^pRO8^(t6IQ4_VCB_%wa`_7 zpDVV0qR)#RjCA({EBd)WPU=op###JJ1Jl{EakdD9OfBL))`xxsdreW-;LKvN{we}g z2peUr90^S_oYY=P%tzL4msaYXg$zIzZD;yNS!T-U_+pI+NW&893d&1|u!;_ge*z4- zOz_0o{u)}?ZCt)EUY;>T&H75_lBmu~1Vy*ez*Qs*vu)ju#(_q` z!3QsV?Wd(^O6G_|A`3}IG1w`X$cK}0FnME}Yg5(e?#wnZZ#Y;+Yt#Kd`q>?`*!@vm zwA2^gJ`V#zA>P|@hcJdf0WSV8?8jDM9cWvj9J+k^N>>YBB2#kW>DKQrTu+B2H_Pv6 zrD!kRAb?)b!T2nOHeiPo_*#I@=Xs6aGMIq%r!sxUhBJUuSPA*nX;Xc3EBi%d@Hkay z0_G$pv-;>#L&nE0DsA3xhtAga$t#(J(9xXoUW{;$0j>yx*lP7Kl_Ic02!U^R(qdS5 zAtPU<#j6)8*z=Wgvh{z1)QL%1yB{^aFU+*En>R!a0=k_I{LpA9Km|)GDz#95{GdZL zrpD$Nsg$s_+3l4D2xrtXMdETnjGc%g?hjZoMz|Fv7I)ayBmo9pG`Kn%3a^H(u311| zh1sRyquou4Qo&YVB2^1J|LXw9`ii&;@6bRt$=8jF00K|Q`GxpZ+O@G4Q}oN#+@x>F;9~@b=7$ zca#s~(VqiGI+v&1b17~P4^I59L=eN+AZ-&1@2tthO*1R!+DMT?P*AsYXgaG?N_&D7 z+$QxOqY#!2A8fxT7;^p6+p;t8-xVt%Fapw-!WF;VFdLq8*K@Elis-&W0}f59Smrvq zS=lIN>SH3q_TMU&1Vt>U;pryZ)6n+uNut}s<&dRAo#4JEa3(LSjL6N>*m(P7LG-YLp zLcRYP3VPLZqn!W#Q=bSH_!8+9kV{?$is=6+yH-dDdCjCg0K2N)^pz1p&Ecp|oy<5k z<0QHE6|_JDqTL2ze+z4R@`K*QeU<$Zmnzr*5|J@Vfo|U5ghef_r1Y(1)}!R6ey+xo zxhYMP3GiINu~g}mPqXv6UVf)UI0{76KBd6R>ZJ5emSd>|N&W0Z3h+j#sr=19yINo*A;>@<*Ylz(i-BBH|* zz9p1`D~S#AmC8Wt5gdMFGA#g^Z){_bUR1pB8BTgA!G3-{iDL8qjkeE)_wrbKnhrQb zyHSDC3@OQy^UMljaUtmHay(ujw0P3ZA?)D}O&`UV%ja&GPm^4TnJKtxH*lv@MV2%Ujupu7em! zQtYpippmBRim78Jl+?mx*U|y}5+G>tGI@_sv$w4xdCbmT&6Y9hZsQ=KCMXJnH$GiV z;e-+#{ytzivQqQ8VmI*`vOxP^qGWmvEdE^d+Atx^x>R+0O@qJV^hE`O`yG&xjx@t% zh`U=4Y>tRBns>CTszm>pXSWGKxs4;Z8T>&r!!PP({s;NF9uIoF$a)k-XNSK&BA1Lx z7Jen$aO`ne^y#JWXUU*fqJOYa_W82$PE8W)62|-YIt&QY@Y4}U;deuR0@Jxn%?8AS zJBj~IJND|480Or!bs5lto@~7+h5R4%pd50GcC;OPVdr4`v*`PsPF3jb8CUi1f2XT< zt0N~4CmZkz*P?{`fbm0oa0?GNoVhCUqePkGh%{p`qz%|7w+OobJgsgB3`YLkt?15@ z8>ml`n*?BDWVm=f+fG;*2xnj~z5UJ^X&##GxLvC4Ns)5QdYo&aLiA2X6jhB%=S0ml z3K|_1rO?@Ao}VFzJA_*rOBy6rvN;rgq9~lBJf{Y<{gq%&OH|9_8-@!6=NnLyztf-q zwatBmyt3NFaN>xD-QimZpsIulTsv&)Mtj$5_1JAWnTM=X33Fh-Iv+x(7(XK<&_b-Z z;tJ^;kHX~#aZ89+>f~gI6G_k^`48~v^*`W!(f2-;v3E+K4l`}^L0r z!{Cpo*WyG~meH1GEwdvn1VaIq_bAru5S^WViJwIP zKO%gZYVYVW_c+*Sq2R)A>Nwj0sf&3@l%UWk_5BpG`iKoDBGB9Yk0o>}`CB=b+)MBo z4iLyn;Mi4#@ewtfDBK+w+N0viDy*Xw=e!9W)w|qg^Wo|XHu;yq+a3<&()-;c@HZQ3 zc07&Svi)k8;6M;k@*!z}tjbXcwKh#ynE;uj~NtVRMwo9 z&~)YN@2tL#QpC>pTh!Cq`c*dqaRoQ{F+07GIbJGC2c7nKvG8!hChVmRcxWSc(Di4Q zgy*SCXk0Zj#4MK$hQ1Cq!;L-Vh9P$L53cISehEu1>RUXlqeAA2grynzkA9we^m_6z z=BTbdmXWL1J)HqE;s#0&ggJIF}HIJ7;BNr)dMQ|eQ$-X2iQIE zd)ig+v%?;Fg&6|{rapLY^h$gYVV%aFt^Ez>$4-%{&J+wU%gpLuU6y#1LQo}VJ;T)# zN>~+ZWL|~pqJal@jfm8pF^RHtJ3qmk;NIgI6;B|g<4X4;-RGKbYSZA`s!5C{G; zh)MrLZo%unjuh1&G3vdUk$W(R%B1}x&6&UfIjI50K&rGbQ&>K7Lo+0ip z527pKy{-BLMXGz4p6q?e6I*4A8GzMmcr=%V7A@&@zT$_qmX9Kcg4(mN34Po;Z1sJO zW=c6<&G$bO8gx!RZ%j{rqHyY?!t;^vqZq*MyJq}cqQ8}h$uM**P@AUq%m{}WfpkvU zar38Mk@m%GBzq5MuBJx6OL|ct6mvq?D>rB<#Mr_&D=#xSu;{M?Oe%kHWV|+%(R()4 zp~c<68}LLs3|A7!T~yIpncLg5^?d>u;N4y3L)sX{c|6a1`uf3hPD0LfT>umb#dfrQ zoxR)XCM{NLr2^1*^%w&R87V?8^QZcg@yv@BHt@Q9!Y#d%IOKMK%+1t1u1t2A&OV(j z7n7Odh>KXs5*(tCa9aMb>NeRKq`J09o3}yYrU4fK$c4ddTKY%h8&}=JC8|J2fq=|>~yfwZH>(%&mxP(Lxl4pe*WhlElWC+dcuByhnPyI_(O?`Ly?lSG=@cm@dm8cW9@ zuB`Td5Uq>xqtcAgC?r7fqE@7qMNcAL`Y$b30m($p9mat87}`H7m@U|)*q&_^i4K=| zN3v@piF~?8j1ENF7Wq;{aY~uD$HSmg3Lh=xy?^&CLDF#CJyDe$X5f!UR%eY-9y9On zr$ZW>ftjJqT9w@j(R=78>94x1S!67c(i^uIx-L`A#Y@ ziHTqBN9jhcMmJKz$%o&CbQU`6UC@1|=&^e8yN@+!Lc)n%l#0-|`GpKm%U=zsg;syH zZjgC#r-O=OEAYArC2g4_p&U>OOXP(m0P237^6uYv`}?BmcyIepWv^F&NKhH?0X6K$ z98B@|d*=jCR2vy>fxRTuV_}31oh>R=$avA;N#|Ua@=mEJE|AiGa~rneazRQwFOmwJ zHaCqgakpQ7<#yN6^TP(GyEQmZq!bPv5xw$_r{bBe8qHK(ULbwuqd$B|qU;X8qJ*g? z-y}3~PI$d~9fpbY0{2(QZ0kfT6U}qBF!e;K5El6M1p_UX5_ivy6HQe8kG{9Tq zc-Dd4Tu?wpEwb4i^*pO14QkbtF9$1tn?Ae&uv z|G4M>AVqh3nFq1s$frRc3d8q8I8GfaHAs0pxF0lsEe&coam20&vAhJ=%o#BhNU0+v z&s3nJ``Ib-vwX76lW&zdnHS#JeMg_m%lJj(M6`gF>lJol%6F=W=G{P*Xpff7g(64` z4M}A_J%ndP#IRHuAG9qrk1#Ln!sIU7ED0?SI!3rkOF+C4E0uIA2duJ;l!5D!$mt`xIgh6?jIG_r30Hsd(WA)-Un<-eSvS z9Z&oZNO3sxX;@_cy!Y-CBRx;@Ie7`|7OwRNFLDhZUARpoD-w7N|1fGm?b<+%=d%?@L181Sgldy=ztkyIq!jNab;upO< zNZE0Q+G#C?c;VcdE@>|m4PBK=gJ=@`*7Bfqx5dJ4OzLi@l{0s-KYVJPe|~Z256h;T z;~!reED_YAGXN|XBa9w2z_yiLN>EBRnaNfbHe!RUF+Lor1(_S&B5f-bS@01wjh$2b zDjlWCHh_n6yFK>k~<V?b=v-S~%`QepVA&6~Cq;Rd|}BIod6vQS;;QUSdW9JHV3L`^Zo0o~5v8)7PoR7GNXDtR zuWc)VugSQXEz(Zft_`i57k2c|Y&N^Y4ijm^e&waxNn$%CHSU{^px)d2lE=u4+$k0A z&=pl^{2ojBG=n$V=oB$@-?fPp{t+bZlgLY+9MHd>jJXWd{8c$wyB5 z>HcF-YZ%DZy6Nci@nHx6L>FP3U<8?5pWO7aZ^mXfyQHq1zFvD#=zA~33+;*3i+?0~ zKkza=R(NdZk>I5hD09aIbPe$%IMbng1a{Av|+?$j%%} z(64ICw|%g9K#%yZ3luZbsKF9VzEpnan#?GB2u}d?kBoPpzW6Pp5$i+BMe)z_Ohk5} zclU-`QF!7i_h?k*&1-w!hbX3Wlzh~%yW;s5>G~2{4K@XOn~@+2izYeDbBN}NN2HTk zQNf%)fm-_Deli&^NMpB@?lj;8JzU_Z)9`YAcm>Tqpy9El_wO6t0_x*cv7$vebO4Ao=uF1Zd5(`SF5OONK2oCc z0h_W!!NMEC<+U*{%h+4UI@BwGKMww|W(QX;*Rx%-Ot~zDT&5|54Stp!I|(;-!>m~8 z)p9@f=nfs}AV1^7d?|~%tZWRyQI&KKgMl6CE~;0i=F~mT^u6m}Fd)AcUb$`GXb)cv zP!yxxDxq_oKI&sXS;S|nHg3fN8G>?1f{N%P2gl_ykxFu%eOO}w7$4O;&+hTHiJ*Q* zpKkWgR#<;n?3126PiWuB@=Q|@vSBr&01yEo4WHbiAhAOkXD{Vu+*sI^8DJbwJ@-!yV)~iEa9LI|UVD#@#O;Uog(pN;Yz?w9!?SW)k3s(U zd4olCxadMmLU$IKOUz+#u~@4(+(I^jx0}{+3c{3}TXACAu2~RPIyhahQp!>4p)p}F<&shmwaa>4*Cq{vc7?z3fchsC5eomhTR{kk`)S%w z{0?IGD0zKM%<8L&yyOpe5@{X47=xN!h~6!e5^O`E;x~Sc!__~4Ci!M=T@)LZ7m&k? zb78W{u4HIcR7rPgg2Qi~Ns`iKf;ORC(`4B&9{ednjY%h^Qv(HzpnQ?+K{NfVoL-$& zG4AP$PLCQ(5fR46r zZ6?>`hbf;McWBN55RTl{Wd`9FkjV_qxd`sMVafql_Z@=2w1RJ3Wl!?0rP+`fd#g6R zmzgaWNEdFPA>Rq})4x?t1W4Hg>s~MdY81S;(qzbLDGNvBadV6^xA-bgd7!c)2GmO) zaq+xl5@U~PR!12|rnd+)Mj+D^T!ZEBtAvT$q4-R;;PBTkq+vc%(JuW#%wN_!HAndK zvK_~GxG!EG#3DUm~?YiHI4YH__S&A^{f?&U8iqJsRmlZ^LhiB@4QCd za-J20v(b^&@b39uDHXc!5+Rlb#i?&;>ARbFOvjt!*f>!r4RFXjLN`}NdaV$OAC*#I zN4Gk=uL{6C^dceoS4tr7`DFTzL^r2hN$3mXaNxc)A0i-{N)vSAD>Oej;TS?kHo*1V z`}l26tP9(WeZ^rZ$w;cVPb2VEY!uMGDU%U;NoqVrB@DjAKmKsj7S8iCnBY$Q=J+wM zzHFw@nGkyZtkyVNy5ix4xSR3uV_vqFi6B{lAc8CNu(PDaM*=C_W`KW0aIox95yc-~ zy+!Sa8e(aT9hOIkYxfSJkQruB^Y@|0Tw-dl{(gJ?2a!s>v7V*oU4B@pjDYkhy9&7zSsFos` z{!W6JP$uN{^3t?|gjo`&v;(io_#QjuI6p^OB=XBF2ch=bn+dZdX%lhv20{sVx@sNk zCiP^47{UXUQsEKz9M8Fr!Zf7T$VcTjLo0hWnjVVze(hhEvvU1CfUCD@4~r}NPCi_6 zn^(9<^=9+e;Hmoes(R1cVXQAxFpb;(3-}X>S+sZ6YlXgd0$uIOBY}iRFDtPGu1kCk z=aK%?(HjGRCGxZB8l38PdCJZb=bUP?f4D_R6#V|@A^XC+pVoaAz&|RY0E;_0lj*|(py2|dsZW;Yk|wM z`Nv&+!IXNR?Z6Y_jz^y>0vusiix}#55qxa>AkgDbzVk8fkG1mzr7BaF9)#cKbO zL3j<>ytnCl^Jydd^+ziG%rH446MFIcGf4|37ZtI;e*U1Noidg$^uazd^XLEFt*{i7 zn{BLLI!t0^T0*=pZGVZu^`%}Y;+CFoztLUaWTUBokh4GKN^I6igmBbXCxYkkEBHAI z;{bdUujSg`ymyJ(CS* zJPt=^lan)6jz5?vt9x}yo=%j;a>;Vf`NN;EkcC3Eiw!r<($t&|RI29FvpRnRL+k0s zHATY1t#c1H>?-Z~XqF6-|1R%0`?@sc+%XXq%GLcNO)W!hfMCTo6e{QruK`tTdk_1p zn|wm}P{SSDXCQ6%9g70Ir`c*-#q$a@bJ<1{d|mAB!TPMVo;%uUucji1DF7Dhg10!6R%J?`N`_16YzVMBBtNc?Im;@dR(6uwv@{gp+uI~F?ekt)X zm;vY}Q}lt>w`%H?~OnZ4Q^AN?6%Vi8I%5N+tHLsWOls7ogCBz#K*zb$Uc+}&spQ3rG~)- zEJUKCYFEM&UTKDITEcWMpb#sofUNCk5oWq_eR9vbS!+s3gmWR2=fW;j|80Ahjp9Z7 z`{PihsjlzElRAKE#}jPxQ1M-H^y-?ea?(=l$hx&YnTKCCFlgQnwY^~~-+L!2@X$PF z98O?IKcOPBn1&WvO0Cz&t-!w`dofp58LO-l;CfHKQc++BY*~=V7fE++&clH5Ad8t# zE!#wYj794}{0WT?vyQ4U~ zR~Bx=ALkYW77qe9vyF5;#2~bc3i#MBFhfB{z#;LG`cYGqkVryxyG$S=Geln%QoUzh4RMJMM^wJ+4^-reX9|tO zE5{&d)Pt%jTLUwFZTIh!;Fz4%eo=?5Y$G zO(Oi)aSt2lgo<^pO>PS;VL4WdyloIlJARjTN44zlNwFovSV%tsDD0Q}y%ZdarsJQv zU(=4)0gh<2Gxz;={m<85T(34ULDx`MK`h2pe0wRYPuX}VaYWhnc7o=u~oVC*wV4UQ7apd29UvqSx0#r#U}T?HW`q|5^X#8Z;=2w!CKsz zZ!i&KvNl`v&cxJqhd)Xc%D1^=`DNzn;-_+N`~WVvXdG_bYNc+Z^4dC<&t~&v$U7Vh z+?&k9*?)PpICX7bMCELrwu$yp^MP=yfDBZ7*t6+__aojHr}!TjfH{TuePve{a4Gm zS6E;KeRd2M-fW;7I{nNLy&uUPco7PHUmHlV5P%A44qPjhv53UZihj7P$qj0r0CaHLWx@t@7uKEi z$85v!ZV2W$I6(HZ4Sz~Uq)gxO8qcvemk+)D<%V<@=;m1Zeq?>kkjC zKME2ge9W`AN79KV=B&t(NkE&-yM4zcCxMxNVQO0U+)cOqK6TKGmHy$1EO89KL`5f) zbBKX+rw6w4nXF#OF-*Nt$;UG+YV5FPUyc7}cc&GQez9bD{usZhnGYg{~kWo+6J+&E!IxpP13nlsdLptkE z5=Ljet?ku$Hh7f!#drO_qCd*dLzLh^NWaiw?#xbgKfTZV%8zPut{OGh=NPXk8@3@=49)I>V1sV%LB;90Ln7pb z1b?{38}z2LmbISJnn6btVR)U>hCQwc%HI&IE5Im*r2H!+dS}N zrrx5e}nyAfo*Hh%fj2}fUP)BK|a#F!e306C6(9`}tm zM8me|&23LY23AR^ES?E6Q|5{i$KOGCrVu*bb!4it+Elxi=PZ=Gt+S)HjL09TqsORd8qqSbjHdC+7?`--Pqs z?jH`#TC=i$?(HG@nMkbDyK(^HD5=LTNB9!sn@%YvW8b^ZV?(n5ykNp>;iZ$Eu`IN7 z%|Xq-F7#NY@$pgMVUcCqLHsVi(-LmwpRY{7y42$SIzXQn+{8trYB^7NzR{oZ^qah{ zFVFG`(~TVa$JGJ}%J245>Fn+7vxTzfsH`6q33D*p1Q;+GcU-SnCCGLNUHMt*HCxS$ zGsVfH^Z;@}%?#OR&eF4R(DNBMPwmuS3LSw=%zA?dnYxJ+m5dOS}~^=rioiEu;8u5h~C-knF+4(xsJ zV3hi$c={k!m})~~nKQbqnwMuH|1>&XYS45k_;)2-xzM9^(Ti|5G|OX40D}b{s4zM| z+$ijZnv%@P~QEP2hU1&!>D1V*?Wuk1`x|DSt z4s})-i+pa*+yCVO3{8Ax96f;+BBq3@a;_FGiTe*e2s^KX!RX3NDW+AJLo}vrF2$Y_ zn!ho*K|Z`BDw`*i<20D6ph9sR6$o0P$j=a-oK(dfi#r#g0~~ z5#Qk|wHQC=JUYrv3PzO9%D6ras!=z)nurE(Ah87!!C) zG=m6_Ou>J@1gkUVHhEU(_2P-GXeQ9Cn{E{J#r#>?tt?Na%7^Z`Woy|WdplK(+ z)n!}il;5%-dDIwz08sDK^22d9?412;f5U-7jn}{nOT!%aG75OLV4GQ>uDcav;3hy_i2eE$Hmm==pT)gu8GpyRm=(?ue{oeMiKC5WZ?32l)!loSR z6L4^2At3Ocq6oz&{4uTkw!rz5a2E{XyKxR+|c+9wsGgMFq5Vhc_fRSuy7O1Kh~ z+0R&CdX6toPeG6jq~V|KCR7W-i#7m1Wg1i*e}}kQ4(YR#DD~PtmgWL&y5@<=xV3R$ zh{t2Xnl2}~eSa{KRaiK>b(^sY6#9VW^B_&NI5jP%==LC!*BsjfW#!HKhQYQ}tkRC> z^~9UV0wOQlr8L16r+!yOn2u9LE1NI%3MT~|C0d`U6O~;^A+@;s0csfmUvBaXzLbXD z^PX#a`E#=(GA3U+DHXSGys(O=b93P2#{heL%16C-gR z#VU7WX)TIIEUu(kP=S@6t$%TNTC{T=1z4t218Waia80Vh>9$>yGAVj zd-c)a2@|=cU zbTPa(^-9H`e6>7tY0_xa3PnaIG5Sj+cWO@Z*NhkwARH27@3(L3fhk-Y|qh7uKb8~EKiSk_Bfs*JoW7ig@gO)SdGH6HyfJ`A-0rwDnuVXkj+xyR+jihD^~saK+Bz#;{-AB?luP=kwmW6!iZ z`SbRp4Fs;Mkokt5n+`;MOU={`cKLTIUAF)5gquuX=p5+FW?RaM?t9Cyo##0lO|5z0 z_?3)S8%DRzmU+h{+H;+{iz1+HVq1YU)2)>vG{!jd@zt@s;hM`0fSRjS3H$l2r9izt zkorfKtBR|i@5Zg7jI&TBCJo1u=IoRxdfR?87}{~(Dh{)R&SzX{k7WpeLb$U@S=2VK zB(CiW09;>{5a$$PV@ugn&8g||cL?JpF#*HCOuxzRVxa?qdDvnDh0|rDA?ayLM3}bmh!7kjWL=K9czehI*gxq|EQI z)8t7JN|Yn&lv%`YNy|SJOML9E0FjryvZ9eJYFeG-)Kvq{rEF;^ok&u8WF%uCHCU9t zehz3xru-Bm&0Q~Z?)vaso$l&jxmu|3aR)oD&PV-sLd;OQNO~TD)p>4%6z5YWoWbMg z^)#<-DmO|m{>Q4z|LC;vG%oPL-)F{uTl)r;ofCfmh9cF-+r2cP-@})3->x z@zVh};D5WG!NN$xA1hTeF1X2=c9K3z-}Zd9wI}uVXiN?@H*v71iK&;jN)t+sFfOnQT z-df#1wAGshTAqI@PG;lH6@_9#Z3Qk9wa&B@fy_|k>~i=4qvJ*rL2ql6MFS54>HW=Y z7$Q@1xBAOrYU;6C4?8Ac$jZBwC{sANBv7+jMGj``rTzqYKwr#PJ_gryi?Q#>_L_l& zegNUW^n{a9zXpEnZCQb@%M6xdUjGNs5`69~6$-#j06Zd9>VXy+jM1x=bl5{oPt{Q5 zz@k0=zK{wx<9m}0d|(^|S++@!w(z+R7d84oB_6`a>~Qc4<3Jn76fH{N?rH_7hRN>h zuf)PzF2>Xl?H&()ke&@gUF@Ssrm0}~rKH#*S}rtA&p#loW{1-fpFx`VqaKJ-8c$$a zDq$$WD5Cg3ocbicW4z)v3T$FhR$|6%0U*fr~LlSH@SZ4EPs1$daF(=9o0nss3-uNd!gdXFUew|M zqwQ(@fL^UXTrDpTpK?J*%i$l{WFnhaAHX5QR*Ygy=q%g_+qgJK0H8 zX|eeI)UO3$msbWefi?(qvV7^XJ11^6&DS|T+xlCBFbHZ(ews6~7AnsDACxIaLX<-) z!OL!yvr$BmOW!;GX~Ypq_9?xPv23EW5c_Dzwbu;^TMX?m9guxAx?n`c@AZ?)EP~&) z%3Z+=WdTB&O2$HhT5;&xTZGBHwfsFg$4u-ZYenw~PK-qWBO(- z1qBVfGX0+Vpf}O1W=t!529Rz{LYyn49$k;iT|Vem$PUjLw}ECbVe}&GZ#x;r7h^YH z3SU@<2z!X^8M%D7#Pw)8GImK$==+Wry?n;iP-edsw+3{yC|L*oIQO_f(TU=!ev;i| z4PdV)oD4G+`@Z{7FWV>lH4O_~dTye6C~=b?TQX@EiJ}E9b$~s*WpPmnN#oeveT0Od zi{HioyQAtIuK{Bdr<*(eM0~ANcZSYsQ!x5w-uc(ls5P?`1hFI|U!_<%?v?;dy8!a3 z52YNfugsM_^V{%-cJT)@$k+CxF`60A60w}sm?s0 z>WiX~@P!Jw)BSWG(c=@&$Y#4N@k_(o1=4)Pe|`xreik}8Jh?Jp0|xI9ru4Es(1VxxKvzFaxF2^pD&t<2rS1M3%`(thg82U<)mg# zWs})TgO&@HfN(|_Qnj56p0!r^D{!u_hR9ckE|1r(5E^Hzsq{cs5amuHf37@Kdz>X; zY^Rw(Vy6fSDPRUWZ^`QKo%#n&;iJs-aRa1nLu0#~bRZ@q+~nS>qjq%FPI%uL$o2M8 zR?qFe#X;*(i#E6^F+G_y_&>;yd^m{fS)*Ltyf2hvc`k8#Nhm~L_%r-k3o_sS{;&Ss zN+}E{*=QxO1yFLL@~GyRln8aPT{)*&!t#$kT-Fx&5chM0t6{dAY3EumUg512M)-Z} zh4hJV4FU1XsyT^wV@*$Hzcw*-@6K5xa|MZI495m%D_c)a&}oWP!)5a;8Uq7#t{j4| z+;OkG<*~M^NBzUU-c)`xKrKx-k}op|py-YrA~ZfC^c zb)WD2gA-U3$UAg-YM{TqXCx?VNKKUN<77e{`_B1;mK7eq@ZfCZ03p73@pahNK+4=o zI1NNGizTZ3*MH3q>W{^~4XA!7iQ=O|fFMu)-l+04*a){O9B}Rug)kQW>S4|X`ABYt z($56tHilIt>(KWUnT0`IkWfA_sc8(jmKd=24)FrK&QbBa(#Gsf>#vw=_P+zD ze2>jCGKV+XcQ(jeayRe7CoqK2|$TZT@lwQ za5V)~9I^9-TJ<0F(^~tN4DMBe>Wzm=MrqA{>A5%jeE0)mfU|L(kbF|7Wt zLWL>}nU$*0@uZDg3_~3{!GAL6}oR@G@7xa}nu7 z8`Y-LgUSx3`d_-0^bc+#uUkS&V*Xo?@{N7>8^-i^A2r@O*%V$3155M!wSzgoH|qVS z@22tAn|TiS_wVa(ra^ZX18)bKkJpk_XM!cN2aU;0e~ya{|6K_Wh9e5U9 z#iP7@T)BMORv&cT)jv)lE@9A6L30mtfe4yD8>~F<#GG&r=G1*r39CUj-P~d)!{l`W zdcGfN)4Jy`+iu+o&hJf~wl3cAWP$v;V;aD4fl4@H zY(6k9iQfxR)%+a&Xl4Ap758^^j+7m#nVGH@^tFTW=1&e;OImHDpWvx_p_BJGA7zJ2 z=;tXn_kP#j7N5Jwh}d~gQ7KOqQB;?fqqG6CUBl)Fai31S-1s>1q22-lH(wib*JW`& zJE!>n9G!PO)$jkt-)C^_O$Wz3WL8GW$vVU_5(mdBl|2d>MQ9v*ba2iwLdKEo2+2sp z2*;LGWF#|WL?x-dzx(t1*P}<{^gi$VbziURdS0w^*8W5mch0z=u4M!DN>#F7od)1% zN|m33=D| zmjjI!ySP%=TWT)0Pt&#f%H~hunazY0`3X)U_N&>rY7xa4XIY~Vl2i{_l%-?W+gXcR zQ}FNy{L!OJ27R&h$*=_VJSfI{=4M?5v)%5~`dOc$rO*E)lVD%eOd$JdbY^Nep`0ty z^S6DnORr}9Vr)SfncqrYKAEjmF4i(RlNpHML0Nr=omj+2YwX64T)eFJC_BkhVUW80 zB8UYtPF1O@=nU}4m_8uU{g2kRIe8*h!4+~sKv{kNe2ky0ldV#w0K0eu(4zWeYo0z) z!%a0x6<1t6(qx`(#Gf!{$Y8@h+c6i=vcv3-)gDg?>PWt@g4ww-1QEl$11WZIMJtn} zl~m0a;AGJbKXlVi`*sQT8m0%I6SeCqN?X4Kd?R7I9!T}M(5aNlYc}>Db+eB}M+3|V zr>Klw@bv3~)>7AMm7{8*v3ZcI{CRAC48c^0(fDQdJlqkDN`Xj@*EuG*QezI~b7VV_ z6-^0XAma3I9@gN54H=(j8`uq;8ph3&rR^P&sZ)vgQ*X2Kx$B!|UW8wliih;Zvvu}@ z)bnY1X5#{-Xo(W;RlItWR` zSba<+Xi(zc8TYy38d_L(AL|uV69e8FA#8h+EgQY3&+N3SvLkQ^G=pssaYy$cJyMIN zWhj43Gb0beKUC*Vedw`nCD({Qs-X?=I412mb=*&*BF71~3qi=;9-UFd^Q`H|v9WtC ztJXU~asPwbo4v_0>h*@{SufPbvbFmea&y5n@R(~Pq$A0b=#lq%2vs^)Q4W6-8KW^6 zqU|*@x}39SNwIIDfKXC@QG_{n^lG~K-(fc66Z?UcU zOHzG3&mHcFU600dL4v*7f0ZWZwB4=rf$Mxq-?bz*tz|wznWTtGzx{j^o*&z;d}?=` zBM1e>lw9@q%!7#KSBKI^o*u8bl$CYfR{~tSAkrPSzKvO0g18@|JoG&oy6w&;_cx;S zZn!jT1ephshUda4k_Ixi#s?h^DlON*o$w4xZqvf}Eyd(*g3-qCH1G`OmJGIfDIjpG z_PI{9R_LQT)fe_{Zrf2Nf#14|l}=n(*Kcp{ds&J_Y%IKky#jje_oVs)>9^4;;^)YQXHk`QYR zfhTOvmB;)z#2i&E{+4Licep?^kT}N!u4d6@gsDSB+{lW(xWC||{J&t>`O3$3Z_u!@ zS=aV;j9`r6%H#KBsaqnBrtYd?gm5G{i9i2gLNkKdKK(u8b`<>0N6i>pi1n`5$`FMK!(=U+B9+o#;bd^4-l ztaF{PU)%$}!+@sm=c}8QLrRH1bl=WNwHfQ?gV4(TJ2NMV!7T#nXX#p3++-Hh%HNYk zgk|*iSi&J|i&SUbw{q17IfNikWojcs*S-+VE$!AkI_5tm1{aSC`wA5P1^1th{1tu9 zhj8V6<;#jQ^%*@nKzHJPl-=LmPaTLqTujJ5L`(@fkIWb&i>qLy<$TtMjNK(q2@+9qc>J$iY-V5qDaY@+4&}%WQ)f0c=(5Z!PH} zJV7Ta`0RDZxMH+;f(b;R7K||s(MF`SnR;WB>`$Q~XMAE)AY<|q@+H~R6W_}TEi$0$ z8Z?80{;b#ay_mQla05~{&T^$WHJt&jAIVLA_YQ>o{i59sn=<(E!H|||@ox;)5#^Wj zudXNH_kI+U+32p>*cwRCr1?y$>CiKyPnPfQn*^74dSjv!6uvW9+~rXc=EKVDM1$Er z<S+_-!S(-S z;Jbyy;EoN$jQ$ukRwFfAb6`cA8)tuU-Z80)95k_T;@nTumR8c4f}Dk`l?+aK@8(30 zf4fV4`Z*OGa)Ymq4O*Rr2xI7!Q&hEH{U66^s@2OTjcRMC8pt6{m%rIf?uY2#%ip6( z0cb>}1fVzGh|At;UZPorTo$<%O#tz)EHL~!++n{gYIZOLNA`S=?bbgWpUmxSrXwO% z#6X#iZjKB~Xv`-2`dPQ-sXA58N>mys&(LPD?Y+9!Y6Rv+0j;~s)VS%}=OC|6p_?tU z5EEMp^O<@4xmk>odi*^>) z5Pnqvoe`S#G?XB_eHeR)e3YQqh3>9w-R5trGG0CIS-cuQY`|~FlsO~rl3M%k#T_vy zHMVR{h}AOPaxBhUSt7`62y&@!G7a-lithVab?W7Vz{5LOB>s8!h#j8aM26(lq5EBW z{C|+Ipz#vVvWv>G+rBW5Ecyx;$C8y;@%3G~5&umTMhv2CIvPkLuP=W8ZTH!9CWk40 z{@0x>=v;zv_*I&NVL|_$q8BGVMd_RUXWI=WSG19*(w^@URqy8)&td*B?wo^?>O_@x zmdKx`SH^O8Iv-8$n{%!K&?+0bUckZ%&GX>Fu3^SNny#r>K;LFCUbqmiMQ_9JE#}>% zrytW^WkN(2YUg<`k9St$f0Q4pdJ7Uh-H;eaU&3=wsqh7{-r|)l%T&}BWx4eQuD4{k z)uW$TE`7D<#u{kmb4}!mjLfrn`c^!|P&al65+|p#7#^7}yz~2=SOqe^1HI@|1Ibu{ zRF6THEI(C?t9Tf&B+ez5T&JIuXd4ei2ax0r(lTMlG^2vWIZ50QUUVGJYX0vVoGfpn zq9uyAJrz4TZmTkI^o;9&oY?MGuHwX3Zk9(9`iSgnrob<*9VAMYc^0DF%4Nqj0XU1e zkm>{*+OTQqDZ>P*lh%R0u=@>cpW6`mMg8%gHPfDhR1$__hLwhEkF4AFW@4|-Yz#q4 z>BvwDkNIzV->!sVcJVf_DNen&!~@NZX}EEw-9RJKzC-VbW^+=Ns}^X%uRvSiubbW+ zxme)azAH|VdVCz(hS^Rml#gZdk#=LWdu|9p5Bd8mho>Xo66RoDOjTY6AYJDCluCf$ zIZjPm&kQ#vZtGP*9H@Q`!_^V1?+eku)6&9)n;R}p?$~) ziy;>0Gy)qYD(&5oHaLes5Na0txD-;QynRly<1L;p@1&td1o4sjVW`H<6X-}e^P(x_`+VbJMpl0R61UfzF0yv6oS4y zvm1*|Us+BfE!qrZj2QUcqt20wehjfzc(#Ur9&PFW=98_c-3DuW3`akAXIXrK6xsv^K z)OC^NqtjV>Fa+?@(#+gBgw2mA@}S?{ujn*{#KSR#QE*@QY2(W!(IT+L}r0VAI}aX%_I(nTIb%2;9&RvyA=r$?#yh4 z(x1ovSsndc@baSi2r!A>_|s(DCB3q5_jxc(Rft#PbR#85c&b^==LA!MZ9R(GGCM5j{NL?P1r9<|MKS*EvyN4U^?2D#ZlUggjXka3?M}{?ct1 ze+(`PHx}Gfdc+myFzJ_{)4jzFn`2w}e4+e9G@)z!RFoSjV4fXwe`NZg6&SN&JfrTn zIxLe%tV-7L{*`B6X%o$C*71jy0tUD)N2DDq(o-&`>eY!G8Xjc_F*VuaBKl59l_vbT z>mLisj<+k&0S8#>sH=^d$&p4xS|hFQ&}G1|+l^cV;8BeKXwNr`AiKhX)Ke$I(o8)1 z|Biu)PT<0#LWOp3%W_`-VVa=9ItmRWM$Zf@34T7l8%I{p$eF0V;e;Iy&G>R^6b2Vm zwKPKyfBpWmd^kFwzylp|=@>Alw+ssllesBkHA|W^tU;whe@ZYwUA;oK{6^Mx+Ra!q ze{jOS@kR%8rqJ~9Iq2>c7KK1HWz*!&2MNPC1ppw9o1_{;_7`urNGN|av&sz-@_cYl z2zE|-2y-Og{HeK~eq$T*o>g>fk0DS%yz-9cmSnodW1`I}7UkSBkA|k(;00JRd+P+y z=|!un_Vez|ZvrVArkQdWS20L8ChfCL!h01!h8od0LDB5%otXyuqX`|`*Fso8ywBed zBnW%E6smDUR-(m?@>9w+v>#jl zWqRkbTXYD2n1IIMYry0l>l>HltQMb;DW{4eMmc7JH)X8%8#q( zpccnrTQ?52HFf)?8~;t5(Y98smrH5BTOQ6mZ}QcD0NTWhXT4E!oHD3d zC4LdhslFoWQb^>&f90v%(uXIgf@1lSzkZbWqcR%R?f%GNh)C|Ed`gQy*7sBB4zK$w zrVRVAO=bWoLx71XK*8;Z%{Kz|bOW^@(%dLA?EP0`T0D|v<%FBtkr3=ZR`keCcd}BJ zBAeXfp%1Oy`Z$;)f0LEY!5)LNZn)}Xp))C8s?;t87Sz$nrH$+8VOtS`aSad2B*^Lx zav}cRs^Zpnespy{BKi7}r+-l|pbM?B%v~}yYc_Nv!~Wj~E*EuWEn4d%5m)S#BWA%ZZ-+O-n zW8l;&izlaLX{txYWJnX{#v7m|PG4n3?x>3EV{&72)1#Ck|V`c*VeQ_@Bl2%dLc|gP9w)cSu_YHe@d3 zcG#D6v-{*xE3jMixqIzl0tXvQGREHNp;xZ4X<+0@BH&qS*h(bRG)#l@-xf^ECQQbz zzDWcZ9pzzm`b-hay`_=5vZ*q!ndRVo&_NHAGczWeLA@TtGhz{Kr+xnB|6st}CkUj? zz#N0&@`LCSh)M|3Z-gPe;_jz>dbY186p0p?d@?GCaqO5q^*HL>$&Xo?p1{7 zm&CB!F=Um}{+h;yn}f_L$8Qz(O_i!n%^>{AZ#GqQ)3mUCPiRl_rk=R+%n)SF(vdC5 zRfSx;6H42wdhW~Tef0&1dVYtx+5g}=ek_XFw329|fshpVH^I5+cm}0lLg9t+dC#es z#m&_XydbmU(BfRP%lKg}+xEpsU{jU%^i(M{sL_P3x;=MIE6~LAM(Yy@0c+Dg7D<*j zo23hSMF9_D-tZ}5#0D^~hDrwHCn`r~wg3iztAYV%L;l#XV$a@>7VVV=pM%B2FQ;)f zdk+)_iFR>VsBa6A_6(e??jCEauyLZh40^uFZ8NYqDz+Fx#? zDs~0ANJ-EU_`{Zk=hLS>a2$4RTirUvQ6oz^y0j>@f@fp2seSBF?;F+@dhf~>WR1Db zVEsBV73<0KFZ28Ubb{e4+)NSp*wA}sB??%%#qpmTt(-`MW0v6asU2jp_w|amRFJq% zy5rV@K>1IK2NU%HReIP-Yc9lm3v|ge8Bwl}*xo1(VGS-+2hx-Ag|5)18=Db7=kx5- ztauT=;47Eubsvqw9Eh>GpA?a?0x<$a+YfA7Z;iB(V<^O?O_pGNyEubL4BcM5qI z@Gx!B-l6nTRRfI%YVarBQOGVeH4O$fCuT(|u5%UtFHK@0BgGLb9__NMxfQv>cCe@4 z3}hQtACfbElI03hs0C={RLX>$>-CNF(zBvIux7*)I9b4zxqw-`wgDVlrL@xl7i-eb zLFR@yTY`Et4ak=?%F@>_yGKDeyr<;%}T2-u0O4h8ktpP;HA@}_Eb02b{d8?+uM6pY zuXM3ZjKN(%pixPf5Eg>PHh(L}7IX)ZM9>S^QpjTQxHGhcl8RSEPV;8^yE&?U{($QhIyEInMN5?K(^r;)7KF zoc7Zm(fbmMP_-V^C(cyBUk9eQNeCW)pCMz`bLiI-kmO$CLeg234ek~%S3fk|GZ(*# zdiFzd99j&YMNmI?px3Q4{S)LxKQ}1C23zvA#QAGAtY82GeCe2mu*;@f(JP>Es?)TI z&d*$bls`(4A6T+3FNK`&HewEIR)#-+R!yBT-Agu#B(iLzd^QTE*qhH|S(pQu+oBAX zxcC+{1hu=@uuZ!2ugEJXi@(&wi=5N_F-!_3c~Tw(4?A6Fuqh-R#cM0%N7)ylv&U2)Db1l-CTQC%p5xvLp{Dkp2ohk^F(E_`9)Ih&%v;zxmjPaE zfw@+De=JRsT;siSuo@6_3@!H*?gnh6iY2H2cGOy6_)Abg1Nchb}NF~ z6y8kXj~1Zila$IuX-jbYRT&_5=FA|Dm!&fcd+;(7CPzy@UaiAok2&^htk$HTmNd#0 z021)xA`90hIXfJstbSbJ{QFpd9;T*D3L=fN{O~r(tXurUc;`1MeBO895K--xB3LEd zVu*+yjc)x>u}m^|BQumgw}4j_qfEnkvyM5jXj__l<3c9%Mq=VWPFJ;N91Q7NJDvag z9jEV~-muPS%7FmrLw)pUmt`j261J)QqP_oh`I05oG;|9uJck`b^1)voQqZ-VixAt@ zBQ@i|M5@2ymIGcM?QcydEu1uG&JqRGrb{-P!OjL$f8lu17L zj9+s<4A6BQmqFcc(bDYE;UdB<&2v~?E36fSRE-EXNE;HGc!8-5fzS1A3PQJX1Lt@3 zzuqnA=LOga%?2D7t2)`Qc4=xWATUl6yukRqJT-RkXjul=H9Ojk;TD0p&4m~JjFjc=5)8&=DsGtgi0#Mu2Y6MeA z2Qj9ms@RYSkH5}Cju8*SNi?*k%@()u=F`sEW5+{|<&I!LewiDp zijSc+BK}l9((1}*g&F)M!Nkt*yGt`N6wamCCs{DdgShjT$&%fMHt(%%ab;e8{Btoy z*M4IE_`)}+(jg&c>+5y|ww2Luc~E(1;OE?JHG^~Ui^(O)F%{DMpxt2F>!Nye%%QxL znfHf$?_&o?^zg@F=X|xyTk4G%D+qcN$x*F-;tTGFLM51mR`c)p#@}{twV#Vhd+IQ+ zDZP1MC2JPlTA86rB_1v{B^V!N4V0+ zIVdk8Ve~wO3uZ%YFclA!mf;GDEX;(7>eSPoE@XTk!tDyoZa_&(wmJ(Lcpj%%Ic3e< zllhW=5DtM$cA=arf6uMxCze72v42Yia1-_2Er{Vzl1LnH3WeylLz4tPn*R;Ja(tTWA~!_;UKf&|9;$3-}^5uzM+oMUjF~L2zlw+oEJvdBj zzPSsoDbJ$Q_~cX_&bA$TD=b`pz7b$$I!$xRTfo8IOZ_^6d-Of7W%A zum$P8k3l6FO*7|>mAbkpwoHd)h_?9FI+YPySl|mcbx1oLG(}0#xK-(D`=lge7a~~8 zfqu%R>Set_JzmRytW{6Tgf#L`)-3>s)3uTj(_s;!sM8&G{ipo9tii+j#7Ge8_$K0r zRNRG$n`8JL?6W1tDv(q|5UM$=J+a*eIbI!9uCqZq^T9P+VxCtpIEOIu9@nIm6p9S( z7pLj!HY4RO98eiL*O2;iG1@wbDd$n}t)3|aQ%}DZ$7dnb$6OuK-BbfnY5p|RixCVC zdSY{YChVY3qtx`Nab+vd=ELit@(`b5=Fj({*YfQ|qBjRewoAgrA~21*#+=Byw?|tk z{_E#Kap|$zcoJ`LG_)uZdL-avg*P${ynHPy_vCRe2q;IjSpATeYtk_=RN>{py68js zFdr}G2ym$x?$)vm5JE?Se0_AYR*+oL0B5!0H{kjBW=e9-e%w*D-ZW4x1BXIiYUzfT z3AU}EBwNAIMp+5&2`zltxa4y9oMmAP7W`zN@)c`D;C(7!wsw1MZVza}pU)US&}|B_ zQnyTxQnh^j`I6tlw;||kV}c&2Q4So!@$siV60hu=Crf326y7G~Bhm^Y^ZZ8+Y5-AF zoe8=wrrMN?l-ft0^5Z@O-7ybxXP#T?IaxbXB9V&o<{2za*TaFbI{{k_HKN*z!tu)E zDdjN(IC*)9+qJ)5`s^a-flJPsXFU39Z}jJaO^npHxb9I-k^W|e6QIFcWl z;x_vxyT4!>1~xgSIy|p?tXJ194R@lc?3R@g890Ua z37(fO%)ao25_*=4y z=U{SH1O{o3ljWMPh+&I@}t9X7`m_SF}<@Rn=wKr-`J2QS4n@+X5OKdWQqQ(9(E$AGL`%(h{V zB5YHuDoc7X$n0m;S9DMj?l*|(6>|wcp_{GfRSqQ;OC6JEq8Hak4@cL@WKq zod@cY({mu)^ICa_*FDnnUFtulq@o-utewFgw!G9|dvaW@w;mw0)mnNjpdeew%fySgbu0)pol)v#^ z3rB;CY@F7SO}^1=*x(WW|M*I5SpA4RZ``My-PN&1udDKuK~@O76-2)csGLs5&lFue zSH{lK&@=qN(krtt;$I`|a_6TTw-%e;?5q?#^@yD5mPt>HPn!ct4ds%=;L+MyLm`*V zGfyQT!%x6~O{92xxm#qa5D#gvJx~sb`2CZ?4)HVHjr-Fg$4_!6NBdKwB7fgehVtGy z{|^cq%JQ#6P;9^2Kk*`(6M)po=6p^=Xk#lzeMn{+mTZRn0kF!hsi&D9qR2qqZ@qxLwf)mPC`QGDWY4Uy1atNUj0gDgvJ3UkJEhR55;_Ot^IZF0E)9wb zv4x7ThQVSEO|BofiUr!u(FvC3ph@6JDyi0Nt3&2*uA8;jb5pW6K<3r*F^%8Wg&v}} zO6Su5O7242ObNk`3u-wt5sbNI`sV~j;Zk^sX3;r)eWa77NE=Tz=R=!^I3rVX$K$|640g@z2w7ND#h4`ie8{Avc#rkBV;N zFZ?ypI8huDJbShp!uFyL{uUY>+$LN^H7L#9}_39WbSJvw=wN0z3T;nj!GDD7?t zN4wWQ(W+3dJztAo$9ADFFa%7O@ErSB>}ccvNX$uzB9v;7c{iln-5~FZfTC<$`5y*q zK!LaR8AzoD5&xagw97B*P#j&OAPW|b=x+=I+zbRMjL3Rp@r!+37igyGe|^zK(__WG zAgP+U?(&* za8*=hRLjIvY2w?fkg`T(?_&u^9?VndO#K56nrQyS{d)LhWFdat=KSdvY{8hs?pHhp1ZT$YJpqLZe2 z-KNiTAF?#4#{x>8&a!lXYh@u+_Tc6iYR>bY5VU;ueOfODVp}g9r*bn;*7q^A(L5;k zJys;b(S^sCUSr`CbH&8hQ89K|VjSqvX!^SB+zxx? z;H9HghqfY8y7GNM>zdXFSb5`Yx#AGrw4(AD$=pk>Z?8oPyy4E21y_yWq@JC*ZkW-b zAdFdkDoPCz6zzs=;Xbw=M!yVSQ8450Rogt1FJx^yDe0yefHC?=*K>BE=ejh}(4yN} zIQ9G5bXAcgO2&vp1zbtO$r$DJvDozHGWB$S;{K}_8+f)i(v+OVKHR#QVXCty7f}mr z9^hVtk5$kYR@29kX~b}xy)n^B5su{)*1*Y|8kN`ZweFHMTDgb+wcV1ka?xFJZmM>i6qA>@|Apzgtbx>Y&K1L+_Ypjx8V-q)Y>cch4TW$0r zSNKL=`OgTEPkkKlHoE6bXWBCP@H;sYy#|j_*H<3z{2kl{Z{DS{8Kw2c3dizWrOT;9}FCtopz)>o?sNI&$efSwc~kwExTL zH_vi;bI5LItx&fAB0!Q>?FM?^Ov8NtzF`$%tT2p-tm{$)M&v<#tFM^>{dXqyl!_&_ zJ9{Q@lE0S8!eT~qYFySh-D{OOLMA)I*0y@Su-!vpJ|xsK8uCaT#%?gZg_Be#RCJ&vRXe0Q;ju(qLA!D;ycw-d2R4NY%2{Zdv@od9JY9@UO zlsl(Vo8mwx^6eE+Q1vRlLM*7S;%IYz(lK%l z8_IhDe-?QhnjY8BykPMn*o zK4wG9i}m?!lM0)T{0$l|l0MrmAM`txZLZjb09yo!A1_6p(aS0ave?+= z{wl&l`WeEg!8`ZD8I>icRSixWqcMQvl%W=-cBpJ#xmrz?=;ngS2M^70h{|aC8krJ2 zlFu(}B`yd2&0WbI+S8w?4^Dm_K)huoi}D@SqV)t__FWvbsYK|2Md6G3s2e}k%(*DN zg=#vsa{MjLG(!9I$yQdf)&q`eF{Zw)8REm(liYW;rA>D-V|OwpMp;8uD~5!UT6utk zUhU6&Z}>C;1vG!Q2^zwpqoEZmVg6NFvE&dUJgfiD&96 z@bc6yUq>S98<+^Dp4*ACBCYI8TyJmHIOQR8 zGJK7PO?P+H4kSDlwxTn@xTn`N9-WBA^-6vpNUh=|<|_0@l%!=?z@7A8?y@0_!~?_Thy2p~C1w(R}kT?)q;?9PPka{WfA zR4x1t-(2*kY5PTLPFIsrMvSTdDB@u&kG>6!+*(n4`Z|}^cl3YM@&*5IFV(D%{1>H@ zBbd(#<^wdHQUEm{Q^;~UNPL6e)#oyBmlQyFgsAABx};avR%%q8>R(`+%o6qK6m2C?meIJ6(`3LB{d~nwtl}p8eg|z{) zed22*Z)EC2H?m{aziTkzuM7;}In0R2|ROrB2}4`z}c1v-Cna#e_gTRWnkj|-2OHIbg`E9 z72O|ZegdW0>|ZV3ZWRNWt`d`(a)Lt?7$i>{{k|Rs3pX1_UoPGmO>qNcSepS$>cCX>q zSsMJgD?L02_cs_1k1Gg^9z4n#`r&=9H)5LF6{zw2uQqT4cHLteJYRNMlB@P4Ujd|_ z33Kx}Jni)=>_(rS--DC8q`=O-g%^Q@e6LGaR$`mb?d~rG&#`20;*|7S5h~J8N?e&M``G2EUr+z$*K?3bwXge^LI0yMLiIk*MV@H=F^2F z;?^!JK8TQKXxKD5XzWm%2A`$H zec}pbPrHomjKsQF6Aqkq0$0P+uNbpQ;7{mPCisfz$6K>SlEKu<{kFOi>nD9M9=_d} zy@l~vQ7txY3i5?d8JEt-t9dwd6ww@VbB|P^9}y&o+x_CJ4?BsiqBw6mY#P$i^Qo6-G&83(NE!CsPww7FchGN5RwF8fDlXUgV_XIQ- z^fl7{Znm0J1r*rM`~&=j$BgMN7g8suzr#QZk;yDG58#WcpJ;p5v zb17D;g<*Cy3&Ha8$eQ=94ntzhi626FMr`(7t}VA}XJ3O3mg#C!xfs?(u6%pr*iW}+ zf;~Qo{s>iLq8@Qx6%V9SMY#B?LX*2oL{nTWtm?^ceAhv1LC5sq?A4kMt(nd~!X#F`NOLtciGtg{! zETYU@a^)zG+=z4`3`ns81s^OU5W23{tJRuk2}*ll@*r|gi~N0Gi35_Y5);1| zJXO>Kz+}`6_4aN^P-@fWvL^lvRSpf~+Dk>CCW;}0Rr%wSVY1WIzE=kkkp&rD_2Vc}_nueXBn zoUCJ#>}XBi0>iILwb6NaG#`N#1+Lg8vVj@C)2%&vmrdL^9ZY((MM1gm;I?6vsi=`4 z*}8m;TGg{798g_|0=8+(L3Tuz?lHYJ6;J-NU_bh&^vO6l)aC!gh?@56jvNauGPK#R zS&E1gV0I}yUcmD|PvzWCd5D;LpFFC~Q=&{DrLW_;!)ez2rR3GLo=2U99!9T4u=U!& zFP~AVs>Yy5WriJB46VO__YzNw0?E?TsN9!62ATmnsH-)yeJ$jHx~3bFEez!38_E^B zdYfeC-Fi2fH2e>cBjI;x1qM~kx3Pw@L>pE-aCJ>QQk#T4cOpqna)^Gi28hxi2m&S{ zp&3kf+yhGyxhx#y0jB=ZRx^YK)mqBJ?44zL(X)ZkAID+h4NcwB(BHr67GEypv)GbW z$^cM6YyGCgPzx+$)Gj)0u9`Qiqhl)%B!GfUhk9;c_54_2G6Xdq+-e@=dD+2!x+7nVGCTQeq3PqRY^Dvm3X@pzKIl6Sd9V~ND_nV<+;<7`1i(_tglk#3@#|3+Z- zATq2RO$i;#BEy10O!v%yBCJ6zO>mNoSdcm=d>Uob!T>!9-4Kk8in7J8NSwu`50V5< z9$Ue4!rVhlEo-gaEC!m?U_Fs@fTkcUo*Y~l+I##*+{zMKJQ4){P4!(OoPHOkDDejw zVwF+YT3=X4!e=t-a%2#Y1tg<9grLQqUX{WS{N`oVuVIe=1Y=HtqnBoIBRV0dTt>57 zR!LJwc;7ilCmJsh&-`d`aXC9e_|gE{%WjtoEH~ZRu&|&TWyawuvK8wfI^3$rQ0zdo zE*B}fCc)Yd#-Uj57n69N4*459$N>A6pO`$6fCk=*;UKb0&T|FctY(jK<;7mQ`y5BituTEe-s8WB72`B zyH!*{rv>QQW~ICGUb<<7;30fRf;}F&LjXtZT}N-4%P}rB^U}%0Pzo?@>WsqVs*cP% z9C>qBa4N4=Ts1FPk>|@N>?hz}a};=@RS>;%Pv=FQa(2bTH30-QrHe}uet2+4g|>_6 zTctV3AEh5<_MlfD0}9+=OLVUsV$_y(Qk=QJzhDRM7;(S^O}T%6`!G_D5yowE0z8g>Q%9g$p&%r6&&m26J_grR~I7ggtqXEB# zxmLj-WIwDCF<*d@>G=wvmfu!EXx_h(8i01d>~&P_R1gB$h#D{kN6kyeeb#waUOiq7mU(w26z#pIeAYK$!F+x%7a6>h&(8(r#RB6=hFU}6q1 z=(@1{`Nzh$&syBw)uamd+B_r6Og|g3~5-AjZDFWd-&2{iE|&cOM)}NMiDQ z24S9B>Oim#8KGOG^s{Q`^|v~=ghhabS#Ytv_GCYFQ>j&MDY3i-P@XsM z`XZ?W*!Q91PuSbBo6-A3A6``0%#YX}KKy zEw&A7)ylBCZO8$)*9`)w55<$G7gx5oY+VSt?*d+Rf-^XqU3Wu&VF`7Gb?UdB=;V=I zK4<|$gkSY5;reuw;eY3p|3$x66Y(G`D+7@BtNx%(;M`H{(2sj7e_`XP)#;<)x5%*0 zx+HT*)k@9uv;Dk))+OH~{B9k+Km)gfiMm#4?HCGk4sW$jf$Z7wthr2bkMJP{sae@7%{s1LZ3#98>m&NtkNZbH$oZt?8fKAB1qJu0r$7bUq+(3Q z`$~&tjW7!Djv{RJpwD6`_Q_^bRHRsxb1#>#ppNIAoY+`A5^Vh6UUBuw|4piqh7-OF z;f3Fep0fq>pZ01>mNO%(q2e@F%^=YNsx2R~J4`?YCOq;tO?@XE2?BGqn@0+Rt$Py-oPVta%NFNgDY)<_uQyfo|9Et*D~N+8XTw2`SI& zy{H&aIpjY%eZOYtNuW5(Ep_YD+42!MWuOZ-6}0B9eNw>L6;vEench$nhAemPhR&3$| zNxq7E!9Ef-x`g! zb~$aVpjS@=pl!3eN9gt2wGU(Wm&;lvE-LN}^+I7F@9Ro*=YI^B)r#&R0gE1>b|I|v zuNJB&{O12BOIGTY9O!r&G^scpR(+0Mnfbch&$i@;5M^yCAs;_<5ptS&hK zkrpBO9Y%bVF5V~fx}kvK!>A0HER~B_P_;QN1p@j)ZQd`b5ez;xrnYQ3{tdQ@uR#Nj zx_t^j9tAdMO4WxD@XsJO!#{gpY=2GO2;mbqU2bL|QNd5x@l+6t&TV7p#7 zzNG1=6C;BxvM5%HmXowva=ghp0%({oII0QEDKb$?p|$EHgZT$en7#X9dY&xiT;L95 z#yLu;ZNy}~fQHh}uj{&tM7WiF+L;JfRV)fA_~!E0#kQ5JOg68d;47A6*2%iYs=Y;I z_)hV31sg<36+jLJAh@lQxD?Hv_VnUA8;;j99OaeZSzE;mFX6=P78N6-J4%Dr2A1N{ z$b8C=|3T|S48}oTJ5+w(L6uD0<7259-VN4relbJt&H43Yg!qj)4wp zFFjySHT05>!wNim4~(@CN=&X(d{JV}LFlXM+T3E`Lwn>4XP-YJ#MyjBlhs{&7-eHk zOTSNk7{w1_btey>i5Ixp-GkrzJ$~^1qLFm%)Q#-;RVu=@(Fr7g{ zv3+2&yT;H>8%77ZNNN`K9+E4X2WPmnN2iffvpHur#X=64 z{w@UFv#I(@EWMY#(0qK^9{D+@t+yi(47LwgaNd1Vvm3_mSD$OdJmRqy1*fATwL~M! zCzWv0hgP&=^HB9}Rasd{Mp;SM2+HO_e+>Zf95Y-eiNK4@u`6Q_>!u_Iv? zU70InB2X)(Me*S@-1s*H8{}RNNt@}T?tAf|%iWYz12^eRr~hKr;exa=UnBjmR{FlGT|20#(=yUb8Rc zy(2dwIKLTRB|t@H^sbuG{C9eAqr|?__LdVL^PCG2X-&R5ZC0RfT2`i76Pb+@BL)8t z(lA37`gpyqU?0uNl(I2!^54Is351Xx2(N)xSi$;~DAmRfC?wd@g?>^F{Omo!!EGHI z!f%E!IyIE4@D?Oj6Q7+GgM7h;bjGOBH2hm=>Gph+TUw?7pqpm(r?vi~HrYQ4XijUE zAWB{?@-;pts-1a>yWwX#FbX7oP2OAD0%R7`Y78%`;wwsH0~jE@89Jzg%I_{o*y~;6 z4opi)fP5H7==PB{ehT)}a4KkX7rP&Xk*M;D-m2;S^?$6)cul zGR>mQOeidw4q0hH6T?D!zHx@$l&& z7E3i2oP-6ao>sWnEHKvOJ6(D}pSw_(u~12gQX>Nca=Ac}k5A1OUle98)vd=4Lb{%V zF}LA^;VcK6LzmGHq?BkxTo>>Cq#(ySXk9Uj!u`+8t;Vk~q9`45DY@*x@Qw7i+m_|q zin970$`t0AhX>xh-iSg>qhs(8GNwv6Rz#la`G>vpNfwo4NRspz3MdNCTuEWhJ2xTP0`6UVcoM%zUx}^jYC7guCdJ z+(`1IQOvsyA;*ZTQt(nF!HuM9oSXl5vM6>oR+fRn^oTd=gF(3O{fhiw!~T~<3;UC6 z`k0ylWX@z+FVrrJ7UNfuPrOq)PojuO-VD&mbsB|tN4gOP65E!p|0-YBNshW66DsffR$Ut~3dT(+L=JRaIp}K{ zSH5}X<-ve*fGv#G)606OGZ+e%ZkW1LZ!>F37dh9pOihg-@}G`K8ekbpe2RYf_I7Y5-^!IihI1J7UY2Q`PWPXPJ=n;B z&@@M;uUW-@TfO6EQx*S5j%D1BJolaXhXw9LI@R~;PRt^E5-{7w4bUpQZ0_mKWg-5y z8uIx2gx^R1qho~`+z;nG8FflA+bIKOOr&RGGNoNB9eU+UwsSS(GGvwf_(pV2|GUK30QwPU z{Hdls{L}QddkG4=+ZG+qr02i$seuGs+3iNI$Ehm3d|KJ>4E%ZvIFv3*A;a!aXzgW3 zjbBpH2Vg@~Pi>%&uw~{gyQ!Cq7S>`{(z2=)j$4*{I zpsxS)UJPkuDoCsv2Kmxw>}CvySvZlC+ZR&cm5*N=Vjanfcy1sAZTG&-`HX(oZR#gE@GefF>S`Ji{+OX(VK&-sAmqNDs zJvq_#yvJ4Q&vMx?;J~ry*mvm9x29n)7`^zer|eKf;gWvEo(mcTpZ(77JK{^p4{<%; zca3v3;=3h2FlB-VEoT^p4$Z(~VKqbN>$tn1w{1xUR4gzB&xe1W<$Ed?cxUCjw5!M6 z-s7Pr>>=3cFg4j*3y1pXvu=65zWTeifOp_1PVyDlYSyIPu@y?*LEmHj`$n5h$A0OM z9kY@jo$EqmD}Tp;w+{YJ{&9>K7fxj$eJh+;Aj&w-QqQ$FA7#OTN6*H!DO~~6Me`uu zjRcYab^SgZYuylO+?WOKIP0~H6#jDOm0p^#94O)-&*b24GGrCmcW%}~Jk#*%etly5 zPwbc9U_q3|j(Xo7I_v379r$9x*8Nob`hf@KYr8AB$kLtU+5<^11>f5ckUea_+>xa? zs(>dfQB*Wzx`n$JpeCY2Z|L7D|1ym2-$?2Pe5V^RI_6ol2;sc;#eJK{bSXJ{GN?4@ zH$db!?@#Y{`u(?_+tF6yx?i&@g`zM@qutlfBi$;Z)ZqqV`{g$!Q-|K{?}wLzN#EW{ zfnNEIRo}P<#qbZ7>k>U;Vz;&NJleAjhbr57bT7FMq#kP+(dumE1A^JOIC%-!4P*U{ zn=>NbA2-gyx*|c`u)w%%?^1R;8@*eFYWribqYm$k=F22XW#@B?niWg{2~Pq7g;B|{==?S_ zq>^In`HZZyIOKc3Bn2;v%S)Q3VD%-LkNqj*RK3G`?%lq%l?X4t2S{_lau~mrSVN!? zq~poZJANy>caB0%KEvNV+clNzPv#S#tHM6tanUqH@iRjCmlMxji~>>=Yd(s`I<^ld z2CjsQoVOz*_mTrgbKFn6?`5g~i>Fr_Twb`I#l6L%Xoz;V>U1XX=X!uOXHSX~q6xLD z%>l8Dw+lUyEQT0bCJ8IP4$&Xay;E`xXz2+E1ao>6doOg~OC06$vj14nv>0N1u8~*~ zd+xJZxgcXMuw|6stZ|a?Lb-;XE8_96xZAFH+C2hxg?~NP=Z3>ufF|>TYW)Wgqu)v9 zs5?pOkAwpl7DwR~vgNaOv5>gw7qc>V3$|^UEj92*UY5n8pDqT)EUo5QY~z6W+CyZw zeL|B84iOD7;%TrGn?Zr~Ld^Jv^#ZnO7eg$Z- znfDG2TGDtg7~hxPO=$@1a|Dxw7)-&8`XN`$3OO3EM%r&M38NEIM`NA_YXpLU`&{@q zH}pdzY+T>Kd=&mK*+~ptAf%LuMdoYOZ@0@YZovEq0+%72EoU5W??^w6YvWIMIQ#yc zkzz=Y46wSdGn)b$$!&jz$g{|P#?9$1-CM3#d5`1T=S-W7p!ycEQ+?En7u~V514|R@Jy>8hNmYDu zNQCw5(C8bI<#TKPxj&Qji(Ww${_SO`)FXb#KNW~VF2VKj9Rl8Z|AVY_{wxha(2I?0 z`U`D$UnI8BD&_(qn@d051|dhLx5c$Q9Sea{~F zW{FkVzfSTW43}P|T_=iI^dD(ul^9Jk!?)?gUI6<>4J|4fUWBMG8`AfC_&7ju@-BrY zCHz#8nQyJ?BwSfPYGN&xDa@d`BCCJ{=vJ~jv-vAh^ls1v zqb7*6IXtyLU>clEbvuBJZn#PMS$b>UyspUU7m9rMVdkR(VYDyANwV?7ovzq)WefC7;wF0O)U!BN*YfJA#{D)CinMzjdxNWiyl0q6J z4tHz(4~IhllyP?ZN^+6haYkTU((*Q*yYc9EO7h0qeM%5yZD*yinr+z84lJuij;`JAvTVVA`uEPXD6hI!W0AjuDYSZ+q9Q_Tq-LCV-YS9I zeo-PI3lx$}(8Rg$SjWHvuFh6VX5~*Zz`2sDfJZT_r5Uw87Bvd2>5`^HQIK;SOVRR{ z&sRR=JbNS8?1ab!YmgBb*|T%MDff4nRjfs0Ductirj6M6zbekw6tH^VWLkEDB_&@j;A+kdz9~rhi{Pe6su-X3$DWG z|3Ksxg~5DvKa%8;I8Ehtc4H<%=nV+46$f=h3k6Hg8(n_HnQ>%q`Ub!!TQJQCF@FC^ z%MaGc3}WILCLgXFLDv{USedbMvy~T=X4gG?aR-|gGnbRALnKQt0OAd+EM-@iCAP#&V=(db`hFicxOth+^G z`?VjvZWh~T5NrPh3+Z#gG{=1tT`o5C&e0!io7D;emQ9P32S-OSzZ+ca+-9wWxxdBXq}JSoUT=Hqx42Oqa!VfT!e%7{6W ze2YrQm&e!AVg74OF7mw{Yyy=0q95f!QTh$K;W7_zQHvp-%|?|Vkz_v%Zb$ByPw5S? zjkHfCtTaAi@{p(fAN2CPsb#3|SX*(5lf@Ok6)dRHGdIPGQXd?YD{-AQ&LJtSiqXo4O7aT!5c%Xvo-!ApJ?9#~P4}DvvNKzI9S*-) z!(m;b8KPRLIw3$cUv0T4Q~%ihoEwM13e_7*UDL=OjJU=Li)A&EpEIP(iMPMnn(%W7 zViU>!;A`E;@&;Sl6?(X|XI1O70)pVVNKOIluex|O!xj_LzK+dKwTg%;XhR2FFq`Rt z*KH}$HLnFLK6|k#@hVJv++PL33<>XuDUGD>?~Gz%Tx|V8^(!1g%sQ*ie@i+)`?EKL zVa(U7)7RUI9@s&$z>p@n8dWBkWS}+}a6M>jC=epy1<=L0un` z#Y>|8-jy4+=^55LdO~AJLB9|UlVE;Ew>NyB97{YM)`wZ!i@7UVP`YDQ z`@D2`lz{j$>pdQ=%OUY7j-sLBl1()ivy~L;G3T|CT`3~O{%x*~njag6P z8h%*Ydc}K#HUl05DY$5YQKB~LN=n_QZ>8krXyJqH8KOj8(vA8U&aUYEAM5Bg?@EO) zxsjvpL_t@^wH|tNYFW8Q*vW!3fbX!_h^oxBAwZ2nO@~E48IAyVLLo+)tfSN=3E0<` z#JQq>12cBjC&vDy-}}Z%GE39+R%3%`r=Q5GkckowA zPf!2@wy#LmR$W27HUjMG_0Fc`CnzZXcu~PNHiVPPSYX|VF9+QxlW8vgY@T4x`V)MKEZ{($u$aF zdI@){D+=Go1?>|NzM9R4FB%nSBaXnn?oW3aY=D0Q`SYEM9!-w{+Aji#WRI)_2?Av? zi4-X(Ww1B0)ox)m<|hdjE)>_BLCC1NC^a@Icq?u{H?nsHSByvZfW*gpujb;pcII<` zTR8f{u84VQ@VgLs=3yJc)yf}B!tBs2OQ9Pn|Hk5p^{WbqT=!>n9QtZ9`3$_hz>1@^ z%@Cc70A%?fB@VWe#}gz@JdYcHxzO@oE9$M}+CQI^Ba?=SyiLLBYBvICv|JGK3qc;6 z2SC4i>#rMoi-3vgFRRhsO6G?|?ZY}if~i!33~B^3;IXy{!OF!|Frj~Cd0Kci$`o}W z@$_YW)3wI`L0PL{{w{L))gAFbY88P6Pq?>b@qZLuFicRQ1V`B z_hr&XgV0{D-XhUNhr zy;`7dGDGKxubuYVld=;j_zIAsJ-?_+(h5TPBOe%Y6UcqbC@5NxOK9ed_?9CnXWJ3W^ zOe}YkTZ^`Tln0A2i3ZzP5Ee&iX6IjB8xgUvjuN})hbbT_nDH;>meGMyar(8^uleXQ zK3n#_@(Uh{=i*n>(xRXB6M1HYFxJ89bv+C3ivxYgfI9*5v-s3ye}336LnSFTdcJp7 zGVnHFtVr8U!*h||A)M~`zzxhTE~*^yNLot;0)UJ{@(mu5j^VG9jhm#lXBg>yzBw44 zyArYRX=me9+O`ey@oj~aG~*k!Xjx^uey>95mYb#t4MhBE#-hd+gKu&80FSEPWR?LPpR) z%aFaQBAy;TmYcGL$?cbPcUlt@i0tu%52eZ&2;UWt`kue=p2L>~k`w^zL=$_x#cH#_ zNE_Mi%id_Y#BA5sL=K#lPAcnb{ZZBrg;oPESe6}uar5j8m8}oo*lHc$4$rr1mOJ=0 ze!!pQ(?%bUB5gLj(dA(xvaA-~sH&NZXXtn(Q$1l5Uztn&4E)sAD;>&Z6sZGE{P}nd za8wgqNdn#c-*CeUUON4wy{%b0G_n2@;E=RUXI1>IL}U#glf}gKPZ7)y|D1vdTz%&_ z`O73-SuM?K(}OOORXrAnA|5P@wekva)Y$Et2Hh7W4@eg zHP$Ehj}-J;awN!FjDh;0@S!4Cjf`6|ns?_Eq^IZ})2^hNoAzbf;)upt^tja|5J=9@ zbb}eZ0agJl-b`PqcU3d$!%w0C{H(zv{-Pq}+k@Y>F{&`C$o0L@Ps9vsRD^C=9`_t+ zpM(~*M54RDN4B&}YO+>A+UK;~#NmtVsj%fIE*d)e7utMa07qksN-&A+lkWZf$D4`I z^hd7xQG)6BAYfvdbhZsLJo7MQ>dx*EtMUUc^%dXlYj40LWZ-n7gbDk9hYgm)?r2({ zhYXQDAf{5R8#Xg+$e$K@WL-LDZd@@|3(R$Fn4=VZ>Ml>afb{sO?gBGWI>-$1%{fNu zRN8OL0?(MDx}{0rwe1qgUz$Zr2^C)yaeq39n!@%-K~lVz&h|wf-aC^D0@9Iwd?HD} zn&y_3y%w&*fL%$*49%h+J-qraFkLc`!23ii=0{P;v|%Wlk0gl^%A0T#wEe6|-ICGb zCA_vCzQ|$tgTkcC&5WBXO4-(Jy12S#b%-8CN&wRksYX07nWypzCV>4h8~w;QkweGL zOf78^k#MU`Y|utHdNdG!nptF(FXZ4wGf4NWnFq%&d;1H*igMC|&O3+SLj8rQta7{s znzm`t019_wWT(4u7qd8NTC|$%!W&}p(N1un}dG6u$51d6=3KxhH zO^zNY{sC^`it6JTbu+Snn7+8efK1q{*r$sy3cI3sucOl>xC zgHyT7&lp0{q)<}%2{Wt*?9KvSIlM^hbNX_aS&oD=sUa_=&GygK=*pd>2C-Fs+ zR>ZiO8^8Q~*cCn-^3Qf-^<=nyZu>Q%8Mh;4C&s2$bCkZ~36v{6Dw}(^5Ey*VJ!@6> znTOgUIQ4t?K)Z|h``rSsAF)aMtXvD}x0uV;?LqnpBnzjSk8-P(qeixea0Q=ueMpMf z5pT$1%b=ILrv2QeO8j5fb`&p#afv;pXZdM7?nhCpe&%&WLnXBnN-pa~{Hgd>19rdE zJ*%Cv@aL(WHNJeq3jdT}#cg#Cwa;Y}n6@#i&UKp>B$_m@g;X~HCc$$;5$#+=5C?6U z5U&s}Mts=3x=uV(ipDZmTKOr@b;UnZ$kl_sU2~(J zpj4l<9xLu1DTF?a483bF|KIbRksQoC`$8HkN29b?$9b~!YnByzc35`90(H)qEnWH^ z84EE&2_bELsz%*GdiuSnVurn)tFtv^eG^aEXKMec=!Qo&NY@|Rre*qg%EeISP zt7;5;`ztm8uer%*hFD>rl8JCKh z{1oF5vT8rdoIC1OhIg)VJJNn>0uRjB!d8{_&syPbf*8IB%JS`QDfuSK1;)v)kYD*z+n&E!``v0A~LX_G;vg(O8;zD648 zk_&?}1Du!|mRtE^!@AHK@1OyPMYv3X0XX$m=PaK88b-sR2Em+gj&xB=Vmh?RV zGIkTna;^6NP$(QLcya)Qkggj1{^aZuU24GBTdlQ$9|2(Ea&(AxUS&YHAVcxp?kT3d zv)2GA!Dq2$afNq?lbXUN!1@;GQKj}Mdz0aVl$y<7R`-HAN*RQ2p(qUS`1e-Cj=J1` zskMO*XBYq+NM7}%k{B}+5ct~?o{V754SSFX8TachE^Z(Lyi!FZc{@xJ@hmmG(Q0LD*dMe>krnL99>qu;mchM81&+O4OZ7o=W&{4yxR0df5uV`fu_;+ zooH*SsxZU?RPMYk!$Y}?K=LwLhLm#G_@P;LNY|I~o_52RL1Pj83_`8+5Xo15wLY^C zWN*6AHIW0^1U5xBeU=_y?Q33<6!_)|vtthA?hVpWx=&+=VfI$N<6@xo=QCXL>JwNT z#z5@c>(J}xGe+bY_d)=wQMXFQfC;h3aY1l0zLf!czzaqZ0~JOmO)rfyt@uP46&L}c z@GKwU6V`o1tEV^;ahaVe`~+n3Kz9oYxojR_gLXE`(?wS%){;?fI?6p{;%n1Ku^-6CCKXd>VUS42e z1D$l`g3}rudaU`AB`7k<*UOm|&DekCCTq5-GyXK%(d)w^RER6@oc5K7&pfhP%>WnP z|kQwfW&K*);U2XG9EXO z)LHT-rs(G$wSDg%h;Lu^S7bI_Fq;y*zrY%M^HaLB`s{s@y?s*$_lC_o?V}y-S5t(2=$% zQNAnjYifA$6XehH+DlhohyuGGAbd5bd+@Ib#cKViE3DusYc2qYJ}?q+Ce>-7jg5*j z#8Ter*Q7TgaXxvK^Z>r0G4T`o@%H!R>FrGuJ2*QgaE*E8eUdhwtzV zJyo|gQq|#($nJf(Z~OT(yZlBBkp4_7h>@xfQ)#PSW8$-H^?jGnhojsw#7vK zl%Ed$y2>+TeI=WJgRTTxE_%dxRrxIV6}l`d`gIt7F}o_7#$R;Tf>rRu4B0p|VBeQm z-6j-PbvB!wvE*h?GgyDbO5b4Lp-)vN~yPUmhsMmHcic~qLfDzFB9_sZrcN}LHq5Ujcntw{nhtZ%)-ZVV`Eq9)K|b1 z7I_SYWgUzwo*QqBQh=xm{AW<&`XvSD#`9*--v<7#MMjH4#>Xy1nd(cl?6uShT$zB1 z$lloF2lex%aCbY8`UG8nK3FVj#HgSgZ`Qe3{nNblHZSn1Wg<3gQv+;I<>oh^m$fk~ zZcp=bI!z1(;H|OKF0MXKq*SRK%k^4eVeTHFn}F+g*edf}Ydb9CvDl+AV=x104t8c= zTIKq#%{P~d%p^uj3iKUNkbJ2o0=9(alY5rTk2y(YM@Jy6oF?-9(jr?4*=vLowR&YF zvnz_l6BgdQQ`Mt1KoZ1A+n@7%Iw0Gx5{F0ueSvs<(HOh%0;SpMelyqSx&GooXE!i- zc-9U4ibe4*SK8;L*`vviKOMX?%t%}@ZL{+;1i5or*|OUvqx7z6Q~uV2p6bup z4}0_)5aLw`jV*D_>Tv=FxBDOaKXoAO)k>BCzpcXGb(!QgR;e1T>~>HP7piNvm=pvi zchwwH;9EG4ClNY5Q$G2Sz-H}b{>HVXjG($(0h(_>Cd|8^73^`m*VDOiD>&MUwsq+& zV>&v%Cj}=`(AtB0%&mnQ9BoNGxeomgCDen{k>BwgJyx4_oX@h-MjC7%W?r(`igu1h zQSG6A+KXqD3iC?T@2v)N2OPX_Ykem7G~gSZ>n@&c#wh&x4SaPsS&i+KvF)4I7$k>( z+5{jC4WLVXYE~i>9C~X|sVaY#HzEOMhokjU@>6<5-iE-Il2L)X)4B?7CIs+hSRTSV zJKC*%-VQ29Ug96|e*njz7C~Ud05d>@P7m-6)Z^87Sg1Lww~_G#OsFsn&w(SomPS55 z75Du|=}G+iOP$OVL6~nkBwBR4Exe|Ms>xHibdAeN>yCJcW~&+E5zOq?u<20#`%|5v zwS>1_vtVc}t}mNX_!1$WXrS49f2@ZuRq4JIyq02ZDSrvBLt2cm8bipiMVLkWH06Tv z@AcG%Z+*gi^WR%8^kR=x97MQ5U?yx2@klUkJh=8u#oNuoiGGmpCI|h44LtIH(3?-Z zHehkYj*`gZh(qGgKP3OdF9NAG$HJAI_k|?4YL*?MOD4!?DN4QLL@*zM6j|iQt}vl4 zD2Tm$V|V9RdA?BPq9C~3DoZOoGndE;SL6BLOD4AEjR`zdbN!X{2D;>ThTv)lm+>=@ zWX5k&!%`*pb`h_Af=DzVF=OTI48g_7pL$TD-{nPPa#}JPe8^Aa#N_<~qr(13lZOHW zMx>_D>N>vx-mm)CXphMn6isyL$M8*3Lcrg6GPLx}>z{pF%KXftMcHI*~nLTPmqp!}mBx zC+`(a+1AXGAXC0NJZI^?=;La~+}|X#vb7pv9$xSRGh9_C4?FMJv}&EmmiqFT;$iIgtu@2*iT(o6+cKA zmM>k`vX{o#UN)nz3{;=aq-sj46^r(+_7&DQu+9S%*q`5eIDNfFQ(sS1bTLC_pWh7u z)oXy?~Ifa}7t20^3YhbwmLr^$%soDLBsqCQ)#Hpz^-Mjw=}^gnyk z5;`LAj}$lEL1P+`e&S2#x6ue(0_|v$?~E4=$zGo;SOO9eM2h-fUQe=39mY}+ z&|OK)~fC1u3+ZehU^yM@nG75OSJ#{v#I*-I^0%f<4c{^%L}4Bl0&Dz1Q}+bO5gfO5-`;X9V}P)5v1Q%;=#TBq1XPgo%2$- zl=tpQ@lUwODaq786+vI6;GS-99jc%`Y2)Ke)_9r>o!CYn@+Z(?S2Zw7n<=)V$IBe~ zSALtp+D@9q&_B!E-8mnM1pBejor8|*)-TmNaX^3Tm)@k}X(LRqyLfotI&lSq3_+7) z3v~J<4tNFp^MAlztXXAqZST_tZh6-mE5Oh|(mX@tW4DxGgKLILo|o%(9jH(9??XC` z(&)~;d}EDwo&1n%#9s7N(pw%VJYadgpKe;f+#ZFXnSIZTSeuAk2qpp5#^IW>ymgYF z^wu*i!*<>d>H>wK{*(Nrv~~GWwX=t>yzdq4b0gtLNEaeg)tKk^`tYplIO_HNoCfak z5ihRCB+ghcwB+5_<$gi8_M4@C8y)i+({C5Vp-BVTXETJH5ld_@7%8@Vq#I19xFgmK z=KMmQ3F}|A6uzh8cP;c#WR8vwi+`VM%ae#_z6mAhlwTGO-D5g^yy#3v*Ks0mK*dIV zvs}ZgK+AWc5_xyAN3ROB56{8ublx-z$5_xqVJ`6!>^!7gX(4cR+D^XCWY$*-JlSga zxJyrFxf%v3zJt6yo8YULZi>+2>3et0zto~G$9T3z$=omvH=}VdaL5vJMLPFg^B$bd zpBai2QewM+7$*w&@aoiO(F9O_ls1S5snigSs@GlU0`WXmT=eL8r{qw&Dlznpwr(26 zy^au&NRntj_0_zAyj-d7Qrb{%q)L*3jj95r$IO!4Qp$B^6iGsP;*A5bsIf(F2EmzR z42J&P^KoMQ7RVs{>LM&k8KeSdx+QrrtB`i+-dyC94swnpuX)0TF#mlU5q4W;f2aTM zs#Y+jE13Yqo(GI1UN>3w8-5IBK+3zY>l)cEgXa#8pTR-HR57vf%J_k0jqS_RnqZ0J zi25|#-ou#k%vbt5ruUoPxYV~nQ^0lRj&3y-#%j5)M%$D;VTf^z-BD+R663nPX zZ}p!kIxke({nO@hAnOd4yjO0#w$Wq?WBwGq&*TL&tRP4PyvUoNS(RZ{^vCer0h4$O z0u9_$Y+W4L9cXwV?q^ASs*AYaCEsC?X<2l`PG!-5JSU-YX+UN57;FzYOl4)%{Q?xz za)XLQ!&X9binFwj$qRS9-XpJ2TM^iy!kXac&iHqlR2&Ig<86vi{Z{yki6zMLO^L)mAmOi%Mx-E`E z?luy|uO9@k=RJMJ@S}81=QGsV-3EN;PuWuJao3k`+q;5hYN>1SpNv)~R9V>O$Pz7$ zE8B3{i`ZRrcLblJ{FDX$tBrvex&ATx-H=F&O3G^3C_;ncLg=cYx_&4UZr$s4b1jBU z`O6bNmm8vb!Z^I!zQ-x12AASi0R+Z=E;M+D;m<(sL#ok-RMY~xlLVAU*q*OFAvAIJ z#eM=sX#>NsO)`N(tCN3;Sp|07rRhD+&VN%vN!7UyC@ z<#QqVQpK}qS$@t<%Uq4)M@W9Pm8L34?fk5I4?Sk?AMM9hoo0Aqcm_%peHA+c;pP)6 zB>M+BKL(m1(D*5q)+d?<#aD8spL@9qUwC?GkS>3-&GjfMgYN-n-rS(bYhTJS0e>LGGh z*@UEp{8?rlwDz*%a{>8MXaxYhaWNiXG#cc7A0qZ~JWt|1SvTodtS3x#uJQB)LH{os z{DM)*ZBgTv`N>;HtwI;^98$#6n7nVCO)o7i#4CkWxOEOTdpuzuh{YWkNFQP=1R$NX zdg*}hsq9}!unA2G+DC_|O3CF3QvkF*xJ{s)by{?jtVO8tz3Wp}EmGx{h>KXu{3n*Hk>TBp=`lRX2;GUx_5wzaRc4lm_6$LdtJ59*QuWjEtww z$QtasrZv}4ApJVimM4nQ4}Bu)uhvp;VUqdC*`O4&IKTJT!`fy`ZDJ6FGF*}O?==VW zEB>3eACWSH+Q*lPBOYZ`vU9Yh~qzN4hEy#`W+ntWW$p!Rm@QpOfnLD&j%<1H6dE3r(A#m@h^EX`*Y#l!-VE+FSe?T%90hQh8S zHdBz1q4+VR&*hEQ_vKv3yu}ta>e0*vJ9AQ}P70sDA|U-0ieU@j0~VCM`yYH;1hd}Z zxN8mDVfR2*9($%d*33VeaFHcXIeLM3=8ESq9=9cpMZ0S3gJ}*DDQKWow86C!WxL3H zu$YGDvEuIS-?>mF75+T2K0Aaui60K47Q2KjY-^7v zY%_R;IYDR~>skL9g5^gjHZynPkc<5k_NjdC&k9|VYHf`rE2 zQfgYt_K&hTWn(z0LZB-L3hyU9PU}JN|DGgt$I>%`^j#=RI7nl)T7B$QtR>MZ8U)?5 z*B(83=+dIG7?Dn(AcQQScaY}a&qrFWUrsi4^|Zx9G|b@Lt=TNJu+=cD)bPV14-F#j z%L0+<#h>HAg`&8fWU4Um8Hnn38j)BS`u8tO27I;XUYS@Rp4B^^)hywCRIB_sY`t(V zs>MdQ`Sbl!Sb~uZ5*9dusIv3;RMOtHuGr$piRb#jD=bae7bc1|iN|EY-DMPHtM zEDf;2vPBoaUV5UQX@a_!5U4Ahm<_Q?&@JJaZn5p#I)(Qdh)r4{)A23HmqAO<1-E9K@ha_n3)wFHnFpR+ zUarp3*HmeXxKxG<<3t|ACYrrjH1nOZ6c_UvA!UNX6hA2HUfDD87CrrIuPjghV^_c7 zc(&q-@wVq9HFh)uBo6#|o9Thy=%y@)MPBM0cS?}(VpA$}mv3uW1}F~xUm<^$s`O;f5d z1Isk6lKSN3_0k?kRbMbp&XId{L{M6c>dyBkIHeN)(`PgoGz2N;)PO>L)_w)kGKvhr zfg~jc`mQMP0SvL7eDD2ZLzQRswRLAR;YmO%9W+MR>sx<0Ao}gXnflStYfk*DtC4s% zDx|0WcOTALT4p0O_f=^A-+1kKkD#OTaPbc&uiF`)b~~$kCD05e_>)XHDYltyJCiyn z*U-%g>wYWk4dq*f3`qGN74Dj_f3G2mO^qV)(Nc5Jty#JFA3K!MQAp z!4!sZWzls5M$;hPA;p5hgij*Zvag%tITakki#T&%af!uR=EDS}V5u+Xou+U-m{6UUZF|AqqbUQf^8z%IwigB+;!WXp;ePqP9YJTWDV3V%Q|Yp98a+>z4L` zcxEhqsxd?o@!$RO_qq2ZK-k;qbpYt5fjt&Rv-lqrhga2z0b?a(S2aHC3Dk${Idw_W zw*2Hg^-&uCH$N36CD%0fyuFIetPBQLPt6nI#50^yv)^XFsRi1|>2xGgqkF)KAQ?U2 zv=kgZf1gG&^TiE!AsJvRd9;A-yCJ{=9lnS-nDE4xKT|}!JO`uc+~$j9SvuMnuhLUW zC*!AWK!^jY|5J0}3vEF#gR=7Wfi)usSoF#ZDL6-KucW%Ef5(trO?0*^GQ&rqZe1zM}BIcZa#DM z2~Bid-)FiiJI>hbWWO!7W6+L@Bh>`)*bQ#fLd~{C(eI`(=waavv!TP9q+*_gp@EH9 z$^1n#xFLOZSAGd;G8i%PsHxZdxlgkI_zF1zwjf7}Ion9ks$D^Q@LPxaY^ZE~qls9d<2?sWk2{|pY*r6@&|uMd)ZqT8 zk^c@5K?e~gjZiz18-3IVMmOGfo+@TJ#Px4bfNnk|rQ3(A0ycu|FS>M(_~ndj$=0YUlb)M9aU0=0&MJcYje z+B5h_L+1By#-y7H?t!AI0rNm-O*8V-{d2o4E8=^Fa&L04YO)GNj0`SqBZDK8ZXdy{ zgEFBo(TbBQ;nlnC;JQ~cP|C{U6+I%3m?tzSBU_UOET33(1H2uaa=p1cAi}~ROmwdR z+-Wf<6kvJ2S;@dfi zQe(7jEJ!qz^z0qMQfSqxwVh*;Cj%e3TJ7*k3UguX)D1k#6imxQ{R%Gvy_`}dXd~NE zZ0z3z(t~MQ()$~Ghnf&K5yST}Ox6vz@GgdS9qpPBg-NxJzU^5SXuRHRwPpIZ@=o_Y z((+~9On~&(o2e4_va~%?jXni$lE^}?wQDb3E zPOHH1Mtxdj5wtBH|7AQAyRg3DC9le1IIJKAm-em7nC9};#eT{UDciptG`kW!thKaw z28zsBO0FK1{EnFTRwPyR_vJq#I&EJRz155kIV|Vq*N&qwr)464DL1WMx`!}ga&mS$ z-l_Ns1$mc3|M!MVyHFW74~#C=?v1(`!eoxiH!rDJfd`ACBkg&@#;Qk63@x-5EeNwW zT*!tyisMA?>#Oa5*_3nK;(~t8L;Ik3Y_5QD<3 z0^ABLx-$e64_hd7$``O2VJpRqLoK~nUL*pb3a}^7FSjrLz;?VKc}U_mlfocZsh!p1 zppuLWopffWAR61dV?HM&!$y`=Isy>F_|5bd{=sKGkat^9sQz+>9)1{H? zG$Ftj+^a&`)27gJYfonEjO)?LS}5zQFzcp6!Ps-CyMK-84_?S!Mc{9d#{TPMM&7@ z|9$pQeS=x&`xH=^=m$9L=E4JW!WDCOk;)#q3D7G@mZoP+WnZRtewIcwp1nLH zYvZLEjb>IHw3Pk>n8Wnh>v-1qqF9XrcwJ$!{6R9aIgp{_YYfp8o!{~Y6T;eGqYQ8N zeFqs)*~^$FVdg6Xy%%#A!zZCz%)Iz6RsVM&FGu>+M;t&GrAF*K%k?Pc$L6|V2ERl( zi2a+ICpB2>`x#RI2UQpD^M+VH^PPmboHvA+4j(&NKTbCf7Bu511bbYow4KvpXD5#X zjY#xI`u9c==_-q}v1);v3q?6xaf+mM3&kyn!r-%Jodr<`d_X}k8{_%1vt`LDZ+e9b z#UbwXZ0ni&hSj`DS)8-4EtbybSR!xbL8FdE!Cm6-TnNLw!lLOCzf8Dtm>b+ zT&Z8M>}6hSwM4uC>{fy!EK3qb$$?4s=|W1Uvec8#L9=%<6&re`=JmrY&tAFtgq}b} z@SD-*Rn*u9W%S|cH*?C6mJXj_Y=7oR+Sl=(@auJ1najc~b2}!Q-dLT-U%a#t@w;`t`G16(vEDv#TJ4J&I+q|u z7S3yMJYO*NrDNBn2Vbx2I~i4^qWFK;{Ov2tP^RbYV$6vxJ5(S9|7j>FcdX@;(lk>h zJeO}3fAQbMESpoQPJt1acYf%LJsQ7S5+V>6F$sHM;fm5x3YM1qPoY8J>G=;Sh_q~% zYaw97RiAl$v1cp#SF(S!&DTfyeapQ^H~xiPQ|NTG|MPnA(CV3=w{+S_wCC}c5y=E7 zmIwAW}7o9sd|ichv<3zdvgRwc@ar278epWi>w7_xtsJ zUDx%z?z_G3cHDcLVzzD?aNPCg?y(N2wxbm zo`eROj*3D%9}Erhw;Eyxq*R{}iQ?U*p|Cs5J8NzaeSKD4dwbS)p97u;fo?4LT*`y9 zIJUaKuD4(7Gk^imcYv_Z&-M7F-p4BIx&*(gogi~P`7&3SgWX)oTppudhJy!|?h=?E z7O<689eW`KqoV8zE*vjT(p#Ln00s4GuWY_)R5rZnxv&@u>JT_bt1xY9X!fa8gTR+- ztHNu};eee2i*Ae5E=+%~oKkU3%_UnECil*hA?RqDLo>QR-c-`|)+bh!V|w2vPN`VO_sP^= z&L#OerG3zZ=|9iIwM$vy>Qqn3u=o^9E!FFt# zn@pY&M!?b;RxA-S;@mC9@&Aa!s&euugumPX%pRgp@K2_uv)7g-k?+L%oqk~bX$#PV zXYasMnWZsKUaL8%u2h%fq!e%xK9{^ENt{SK2^TDjGyZk=Oy6@?Fd@Q5bpE@Q< z_@IwUQRfHS%wudaN^8U@)DPx%clSz}4;$oV$8iA^V&JV;5l*MQe<@h%FaUv3D{p-< zz*78cXzwrEU6W_C5jS4Mg2j!pLGmLzm~#vcZ(&5F44^6 z%iJz9VQH=~9~F@BlXT%$rD#ZZ<>J4Yr>fa^=BcDGShBm({s%0Qa|Nf2sXL1s@(coL zb{Xt`Fo|eCW$(V5I+nRxPhzi*dzl_RlDL)jNc}x+h~!>Wa!0`Z8CFYjZZ2N#dEOBQqh(3x-LLbAMzQu zYgcA@UebXIIDp6@k=zYN{}6df0({zo>Y`idg`-O)YzUk~4(K&mgDZHa@LzLXc`W9^ zioj&ew!AS=^Sj9pu9@mkB$n7?&asUU_ylt`T!4l2Zq2_Gc~=>mg#z0KrvmKkM`_-I z=o0h7DuNB6gHmS!3(GtDQm+|qA>Fyi5cx3Q-TM(aO%k*33H+P@yG z#XOI}eGR|8C{nU1v5wm_MS zTLgqWOMbumpI6%8Z2wSneVyqN67&I=G6YmCpLxjtKL|6d5imE=OpA4Y|6F?&bhr7q zvIdL*@@CAnzVePsDwC$l!#vIby8p(JxX{sUsp5RLX*(;M>6L}4k;xsWS>woIep`Eh z>Ej9y%N8z$*tFcbP3Xm+xAxM+4K#l7ThfVT_Kab~^r=FP*Ru=?&17P>Q7+VzLMZT^ zE$`jn)peJny4Ky6de!fpte`Rpc1_YHdt+(o0Jr+RwlYBk+2Q&kg9rLtMCM!7(2Cut zZ_cmhs6_NG&Vxy-6mD2BiF1;EjI(=wrx>0*@6L~y1J@d8M*Q^pr=`P(a&^SXd#xRl zL!b0Y8Ih>8#rD|vbjGUm1=9$2Jq0IfbeAO@u~wVqc^&_OsDVGKYrwHK^FZ6#8MB}b zYqZ5A7)~6&a7rxwWlzm^TV%M6=C*srjSX-|kzFL&wQ;wy@TWgj_*Q9aeQBIHuQ7}Q z3fI2+qH&2LsI01bM&uf*OpC*9f%r>OhqdWIOUXj4Xe8pX2qI3DZG&R;3J9}T2r9p2Jz;w2z;f)hDv<1RYj9bRK zMQfo3vyp#*Zb%j;V_Xt$+m`6Dz$fxRR82!Pr94lo`*AG8^=Dl`1q<+xrsZx1oif)? zI_U|kdvls=z=pX^+bsaE`?i#hqi&&z#QCCt;R~Ifn!WLSoGApc#sO#Q5^ayy`*7_5 zrEyd(Y579=z&Qk<#|UAqE$V#1nsc0!yNeo_eX-H?JBi+K-cxzb$Em^e7M)eMrgXLS zn0{A3eD@zE-BjSWe4ke0{J7$60Q2{&d?s(?C)f&QaiERjI##%63P%aSJX{t8dP!J~ z-Z2B}JYF_X`@YN|Py@3Voj)mi#`*qrIfHgPUZi<)br2HgS@V++Ov;QUC#~`)@j#S6 zD?J>MJb3+MS>U~ml;EF6dMRZr*Vy!xxSu0N{0eH(R5j#^i5j16=spnivMlf$BnfHanv)bZ zuA)&YH14d#MrQmEI0 zUzMcZfdykYw0tHGaMjZL3fF2a(U65OD3K`IHCDC)0Y#p}PkK`;lCWaq_4r3xu!UTK z2tjAz{SrVDG5)C`M$#c1@Sdsh12!F?B0pt-9-m;JYLIRZz=@Hi3?<0zRi@ig7m$4w zo9P{;@b_ZN%n+pE+w}Kxy;9Ety`nO}dUN)xZP(SNz6kvIaY5rjjddmXcyf{gW?P2i zjCZ6=_V3){iXD-yuDw=J%FNl()w8^w`{nd?@^p5W5!(_Rux{Eb zC(|kPLYj(}!=DGmJ6g`SY%^L`!tb1cp7T5)-|}#xRb#%{Rg!f2j8WrhfXj8cgO#+_`G}7C&nzkxMNFxcXbVL2rpJC7mdC2r!7RRr9Y)F;(SUa{A8AP z257_JE89g>Sf}|c<(KWRv+b1dzz!xHU)(P+Q{mDQe+&h3rt?J2otE~vH=1rH831x9 zG%hZNd)}6cL0i_zYd*XMq=gRVMSLmUUQVkuXWp9aOL`^~?Re@ml1}tA8=o`-9#5;e zHlLtt|7MbzE0R{qs^R4*djt=@#QzFk&b8QNMm-ufmxQ(F)r*zixc&g=g`co@5MWRo z-3`g?Tv8&OG8|tolqyZ@%jiPgh>C1a4HV=W{n;x3I?^QCGL804&j<^c3>uJ>>~Q>t z+l7#$>oP9*Tv*R=jI_ak+))IM8fA%mJuUsNvbSSODA&$@X&lXqD-P)e9&vPaPN|gV zYGIxBaF^cmvR*V`6ct{5LwBEr*$w&2HlR`UMEP^K?lnAsDwx8}n-g>B-M^J|Nm(10 z78L8XwqrmZ>B8;=@=M6Z73%8zQO!DAUJy^O>wC4^pWs(6Iir;y&7~z{KihU@*07i> zWmPvqj`sAY!~iNE$L8yxKWa(}YZ?X0D+gXMr1>_Po=9i7pE8`gh)n#8C7^Oy_L3|z z$?H)pGp({5CdmW z&%H^7Ua;H?@_v*$TK2D{k&D&hcHXyfockjb_yI0zqB2)8+9g$bwcKJKgyGHdOO|xj zkz@@mp$e}mqcgYfTP4tos%tT*!dzVK{OnUM#Y7{Qld2Hm%aWg$nK+v`w9N!K``vAH>}e( zxyHGk3UwR$pe@bP=A<`)6P|uk@Shr_(&TL^ob2WV&L4plY=yxm2jj|ju(7eP8%7bA zmsoh6Pye_6Z|*2oX9Cv4dr(Gnh^0(ZmVSpTyM-@Axp%gKn$@x9T5H7J%Ps1xBEN?I z41DUBot{6=m4=DnL;_%Eedq3*sm3^36M^l|HQu}Tztup#{-Y%~k~dr;UwjWpq?%!M zqRwBxn51PeG!jr*w*H|mc~kuRsW031Mt^tr=%v4Fg~a|9JSDH6hl}4f4UDsHsD`B= zr|Hi7Y0q?K!`f=Fj~i8m7Z@|Yjqn-W08saT{x-DmC%Q7~c1wNGPdSYM4??UklNV6uvWDwZ_2zVvG#fX*LpaNZ`upkGBYp>V`GvMs?9g;oi-m? zfDivf(|F9@i-5R-jyjT?)RfugxipBFDSIqU@W7)4SWP?29K|VmQi41gHU!?rLW9IS z!zI)db?sH{qlyT94z-mX-T6``&P)ofo#{*U4)=)AAK#WKiNY2aM-AjA6(}e2b23A; zfuXG-{0J|+Ox}I@j1gJu6o~dB$AoXD<1G^$b)z3CYi<-{*7Fo(Ym=W~U>IOi%fdEM z9Emf8b80ZboV*i3eU5K|F-L@d`$)L_N=e=r_mH`eiBrW+mz2Ocqv$8(3pL_=1J+q$ zd*$|FB+1mXM)b>h$|wi`9Z!ci*BhS~RIW}n=qOZM&ieav8`Rc{dk;K#!#rtc=wQOz z`!)qPj0J&=CT|zMIHNCo?JLsF&#n766fz7yk_3E9{5mXgOn2_4|NGR(vR}1|;Ul2| z_%_^Y?dgcbpo>!f`W-vh>c3wlus-F|o)BeUPCtG2RMpMA1hwr+O-LD4s|qWSoBcXK z!oW^eeWOHH~-w`Qx z1Ziz3swn6b^gK4p5!grx@!YLQ?Rel6V$&Prr>;J>vc5W6 zY`vO@(%^yF4$8o>d=E1ETHV5)ws-utip z$X~f~lFB!hDOs-eWm}Ea^gF*^S+;%Fb!B=2ittIOl= zn=YUVLi-=VzW28REb!8FMhcVJY=Qk)6mW?qGF+XD*^Lzz?5I;7>y8`JY{!35-smKq;P=nFUw#Wi=IY3lP40FJj2pB|g=-(D*1)NICY%7{sFN4LYvAAJF|i zWiHjur6zu328Wf%!s=vaNZ$qxoZs2Re?U@8?ijsBd=vqTI1IjWjRI=g`CS z3^&TUeT!ZOH{3OAXEBSGlR0NerqQm)Helf&2rnA#S{2P1eu~n{{t1o`bxd}KQ; zCdASf44e<_1<|cq!Q&{rHq6RAfujTW{0Fv175avg_J?+G;hzmCD;!3T9|lUon{apF zd3YLvX9cZ@`@p#7)(D}G74PlErr-8?DHVo`C&$=A=_ePc+X6t%ogQYwm~X5y-I0cT zZYQ*Mxc$afN#aA&Rb~;|ZpYKe?6pK&u(t^pFZbKwJJ#msTp(fM&a2(}%d^B~TK63# z0irF;xd0q8>1&qV9>#tx%Ke~}@h9p|$eYJ!S?VUJUXOWT@=J+e*UxpEu~yicze$=i z*rEy|I*q=CNE)<*axRv%OxOV+tSRsh>Gj}0W}Uq`7TDbkzUK?f0eIo`+q+hbiUgUo zez1!wBA&)lZw{VC<-nNe>4Qv$x zcmvzZj0=!+iUY@`k})2NWU_PAUI>9ppcfJt*_=sf8ZU)()R~~*9h;7Nmlx93;pcD} zzzQ9FE3#q zc@$eYNi0*j)GCi<^KnqBBbHIbP%Ip9I9g%W-X@`v174KB;64+Wr5_=c_Z$#!sKj&0Hw)I=+Nsac1iP4!p*XN~-h0fr!VC^cqv%2HgCNezYD+14mvyf13R0 z+U-yIwGvH@ck&!w_Kec1S+{@<(R0e6(_;(iLfMao$jpV$vNN6hyZE4MsfZCiO-fJ%onAVRx|{<{|Eh=gFSA) zIV*TZ8P5C5&e6rgO4bVy#8Q7u;y>+*GARN4ZG zkx|O%%4RN2W6hH~p(=-<6LWIu0hP?w6Wo-<>yIxtZ{UJZJ%k8m!02VZam>%c-C=!2 zL>;eX=FS;A2fXogt2o2fkoPln#}=w_mvY}wLEFaNOIh|AKA8&d-p>@^B)o5q=AwH_ zQI(rtKcNG##q>UnDv=;E8FmN;_^Pmo`z|9WEZHtn(N!zwaR-or!y1~-i!(cW75c!H zaeixTq|j2P2dBq9y$eh<;MEcQi;2XgMw-S+EA0ALgOY*=`-8e4XYh@fq5bUNMpP!~ zi6`&6{X#IP${vJspHa+ghUPFe4oTI?K1Eu@;9TAsPF zoTow4sP*5F<>97~r;+N^f7i^`BhK=hU{@XzX9?Ug+*uaHmD%O?OiuHlc(8lpM;ry0 zVb`5VblNDbKt^r$J`(?qf7R_&zKCyIX1Q10F20WdrZ^{KdSpV>&3YrRc_+@j)>xxX z5GkUZiwfs}oPal zfUfR)5HN-yHD%U=JGmhuin@kGz9Q;eU4nuqivP~RIP;_)h2SFXge0+h)lS5R#_j|Y z*6C^FeU@gLgw@!?)xWB}H#YUhSA$T}w+f+}!zvgP!95^6yz#1h@L*Xs8Z8Yz&ORY8 ze9+F35;Nu5xxw9^;8d_VBA;h{D}m7YtX^M^kK}0@MpS@5pI{SQ z#{cSp+Um@KAi>Kf4$|p8;Q&@S#qWOV>f*nCkFn{JxPL$4hQ|>#LN%pyr*?;# z#;{GY!O!u7rhX^o#YEh1TJSBBH!RP(+-yePI3(+~k35_FeBGMns%{P}Kn;EtMR(OV zaXgEeW(-w2DcOM61aothbwh1tEn)Gq^_A9!G3h2xS9N(UAj+05Ce0QrGYnr~@+W`A zECuU<0zPgU_c@uBLxe`nJ1gZc zGxAJks}}e&^XSC($F*{jXq$5Yv#ut`Q)~WH?g-&c++is*-Jy$aqEA!$rEnBr^cF%n z48NaH`46?{)H#%nGt)}FKnEKvBq>Xkh7<+?@EY^&czGVS5Dp?R)ihK-IsD|KA6I5?IIo0en{EpzPV*EVKvBxz(STEFHjn? z5UNsBwXRPOj6_d*LsZqQG};)2gd*rj>pUir5;AY%c7XvlZ9u%e@GIQ&RmjC>UB1Sy{js`F8}4Vv#upUJ9Gp@Q!SA~MreAn; z)npZ+Qv(+{^$O(!{kmD*oJq7e53(M(yJPRQkM&oX*x$K)l|s%2=Y5dN48`X80i-sB zRNTjU%(Hh5Y^rY8@F+?$YAcp9S`PZl1{buW|9)cLdi`j!^jW6S)P9?V)dC5Z7@JY^ zYlE76l}T#-85j~6f74bu7sxdzh2q?)>kpU9R63_{fLllDi+*aoVc$2nr+;BPnEwCrrm`>jnjt`YHrnX&)J~GXg?zR`6Svnpga+6Y zbw0tzi#^&XNI!IUe*yo|;zFHr%y!eIbfU%Mr;xgPhqP@NLcr(DS(awS5m|Z%(3K%C zKJ?su@4fHdcp7>2m(4Z$Y;0ge@(N{inocUQi&G)n*FoLdU1I_|i#v^BDHB8R zW$5dCL8Th7ZomiPl<`1kxlpq=#e-yRfL0$r@~zuB?SF=T@6~q;KY-#Sr<`3R67^x( zvxPNC<~Xt73M$)wJ6NKVmy0ytN#$B$kVJ%Aw-87vYeI&E<7hgUSG$AapG7RQ=x-wP zoIBtoL8~g*3Y8aePT@wQD&8@2ApessFz)@?8NWbHTub%*owml&5CuNq9LIV+!{_>! ztZUbIsj%Kp7a@`RScr2fCWlgwxQjuqW&{1HUwBC|VPzFft06nmv7u{$yyHp2hAj|2 zI22U~oYM?sVY~r+7o0OBPB*qeW8|vDF22gUkF}H+{70G0HNRV|UhGj0F6G|-;|*(x zyIY1UUJkM<8oZBNV}o?S_%7}cH;fT5dOTCa=ZC`j{&{aT+zp*I)r@_ct`|^Jgh4=T zZ?TP#A<2Edl%$DxS2U$@jo#b@#C~r-0!uHHj`urrz9tE6V5OK^mgXs56{nK&g#QK0 z=&d-mzvlEtjgr`)g%SlzV4M*Dkl=>GR4!7czefRH9ExS2c9Nb=$7kP9YW@U1gY z8vTb}A&J?N3wB74xGbZIH-!*W6I~PY9^=%F%MOb!qh#tP1oi+mQn5p~eMR15H~8IN zlf>`-xu*5YtIB|nOC65}EB1ea5!~dYPZrm5HUX9uvakkEj!4J;bMkjXV|JVP#1(J= z@vm^)3L!;Vi^!{GqVQ{$h)SC##6I^=8yL;fp;!sr3mlE@M;5Aqo;0q9#!W*EBMTb* zRv@$G~%pN6rT_LU%6gveW)qv_4Vr{rT8bqaA%LamEoQT+bQT<$M zhVQn>le~3sA?uLjp_lEV{?W4S<(*TtmZ5H|9U4X#C)rX&4nl^QGG!4nh|{=|`vDfrPrC1Vvy8U{yS;{`v)dA#l1JYE>`-mOR z_R7Yn_NS2H4SYwO&)aa5{m`B2!e&mmvkj6L7$Zd9^2kZi7R-Hmq1mp60J}|~l+Z7j zi#285?5DgrYb$KZ%f9CohuS2ZItW+jG(nZ2(H~0-EyX*YQ~&O04|54_0uh-L!@RfZ zy#2ZnDcMCr1NtKT3m4 z6sU5Uk|?ZjiIZLO{BgMy0~7H>#y(gwU~HR^@xxY@{b=L1cAUlGw}{y{#I{^PIkU;SoU((x|>QK&k}cIx`fvT*Tcq&5U!Ot*pQOd(o~y2NnU>y z)d=Z}+e%NBbWdo0bmjf1f4YXn5RyHuwVf@~Nq!J`TgTTYjusoJ>*OX*;@e;d;Eqx5 zr`o=ezl6aM!D_*N!|RGx0g36uDsy_s)CG^8eTw+`J#~JTm1!g<*9NX#gI`)oE!G)T zG)DQ9(!)=T&jlsju4BkLFIujI*#a+0GFg@<>$*_i9objq_s*bD-`uji;c6r)mhto4 z@u|g=FJG_czkoPY2y7izXdid|MWY*+YX!Fo;zh*n$9GsF?{hZz;YR|XU!iBkhPXy& zbSiE&Bd}f?KiCNp1YjoZ%SIf#Q$SR681>3@2vg1D5_n7 zy2k}|1ckt<$ER(=0?G-%zIo0z*&v`Mf)G*4kizxkcVh7IR3r8yU&JzcM|l<4gjmsD zR~Gf8Z-3Wy`6Atg>+am{<$*c0d4=LW5Uy#RVetU^Nf`f*DzrV?P*c#mS(q1LaK%_? zkQxsNVK7|!B`9a)NcHFaJ4iApn}a2$2CMAHHWk*5w0*fa&_Y!AA4VRQ(xnSDr%;a zjC`R^99C41w!jP8xLAz>((jv98Pn)VxLY~Pf-3Pv!vfMKYq`p5Yz!Dx*agq`3J+#K zTc~|tP{=fA5GA3!c{#MSJg zu>kbUtq~UM9F>F^WqxPd6kH3hk_Dgi8XnMxU-h+@Y-q7RkMotIa1D#)|M>zH!xEG_ z6+W@MSQFzIdwM-w$O?EK=+)b2MDm&l7U|Zgi#}W|SD{Dl{C>s+CBwAma~;$y%n1*_ zyB=pIjbQ%kq!fAjanwM+R#?mHpz4a-+2=+ZtUccu!BlEewNJ1{V0Z*k!Y60$7S%M2 zGh(njP&_hl0Iotk7E|q79?L&qT_B^&mD&J|lFl$U*}!VQ-Kto| z(pc_8;YTCpGw>R47$>gJ&FZ&(M>#AAzvbsItPSm6-xo{kYNiPT?=h|KG+m`qN%v9A z5B7DHyep&%^n%g#Mu-sqPeVpgY|+tJTFPSsb;4$>_z>A2{xs+XI4w}A5i`;7^jLMWV+WhkJB zd?Wn3M&?JFh0V0@>VOin0^PYrQQCz#yGxj!henV?b3&M0hvGH`6l&1vUx`3G(tMTw zk$C&16oZA<15nKI)BT)BN4BtXnMI6msxg;Rhu`*y_B3Kg%y`wVXRAlX#4T*=Mx;=V z8jBhM1lBS9UuId#AqO}yB->suLtMiDQIr~hMZ3!0IG+`g?lYi#v4gxD4>}%XBbi3g ziklK3UT$TItL!;88XhaT4CT<9);RpG6t)2cZRaM-+cCL3Yvi3CohOA*#+W0nGxs>G zm;Z3W-ow_lFH{cL`_3^z4q>e;kQ$}mEz8hUvnD#8{DTH?t#y@)xiCCtqkKeWUr90; zeDE#xRL#r+&NtO&1*z{micK3PB7hywgxg41)G1)a+5N0Y=Kr9&JGo8fs0)ybOzzUW z%i9)wX_$c0n{_aDSByVUZ+k~|gRF;k$#tA>FyMCJA@X2-3WX|kd-3=$~RK`Fy|kB%OR_o)I+cZ zU-FC~U=fwPQrzjPk|_zp+<8Vs357pZMRdfb(?JsBR>T;)&xKZ*RViA{lUgN!Yqx|U zp`_X0@iTehxEQu#LEMD-6b|G@VtDcc@cg@ZI+8Hn#cH~4Rwah39lwD*tCobvOcF*u z=B5a{l>ihM^mrC}#2-E2rFsx5iQZZLR|)jNIX_V)V~jtkkWco_@nKf3j4JKD7i#;ZPLis z6)Rikuy>2{jwA=Ba5{ls)Y-#$;Y&Eg``09CjJsv8%2Beg{6&YjtFhOtNW+le9AuKs zacy&>r+g)=EabkhGvMa^ysx$vOpm8cUNqu~%q50>4+I&J4{a5UErd@!VO4Dpoc9V1 zBl3S2&q1H2TP>LPpSG7B@XQQc$z4v$T#b#RgJe_TAWRk${)_FEj1aCx-dy+7A8#mK zK^~41QcatT$(0+gKgra3+z8o4YIo%3uudA~y?C8H`ra=UcFe{o@aGfUaBs)5UTT!B zf}ENrcUa$Ll#3N5@p#_s%WRQxkG~w$d15Qu1C#a(+0nFw`|M>S7GKB-+OqwuEsewb z^Iq8*{R94y()KHk%xsi(2iL~)XDU7JN(aIQA+_}d8J~pfTJ6i%FN8kr^`|Smk((6m z?)=>wYD*JMYbyeGF!j%N-%^v}#E#qLzY3NB==qcC4_B|+k|nCFx*lwE=MwwG|0{47 zk7P>z-9;~{wj=bqN>1tgeQP5tU_O9_7aY<3%8W|wz9HJ!uqUJL%``V@LaMFAY! z`&`EpR}=m$Vq=_QE9~;<`nAvM8gC_N{Q^4YF59{Aj@Jt>*rfszWQ_?roh5rC6J!3~ z;&Q5i<24Ntrni&EI?Vt*?x}F&jNpaz{>F2;6!%5aR(tgYs5)TpGL1G*ari!Pu<=wZ z*;pi0U@!P~4V?O*anpzIo11Tqj!~n&V%;g?{~%+(nqW;!7!uQwnJg zcB+hT>Q?7&n^G+BQ$(dB6mnubhCake!awrFspSRNihKyx`t`B&#=)>K6(Y4Zh6h~LkKczCzU;R9 zCopRT(gZKV6EU*MUh{tb{t|4^$Vc^#_QhvP>V_^ayVD>IyH~0Ao!E>wMN4qf#t+7| zS=MfY=L%(9&>Y85H3w}tMCt+VCp}>}&O76X4hVq^BTz$>IkKm%5 zw_!Wp2t^f1CvnqI=+%jq$D)YK5lD$%HO@S3RrGiSd15E!oa4bDTe;H*(2I2SddUgr zA;lrB8I}A>@``^*k_52iwObWna#lb(^x&}0LOqkl#jDPhrF`B+=HK z&qquH@m!lRx=+gji?37|WjyIn=mMMqKh1DCUAIYVbVCkEkCFnB68U^z#Ddg(}l7 z`F95dFpQ7sIvn;;66COl%k+U*6YZ)Y*tN=3sQ!#Vmq{p%e`BKiJa2Hn-WLU#d0DN8|R!izE-<^+BirWrjAx#nKF2xMsO z&WlJEv5>6#>`wr*atar5T}VD=OK>Q+!c%L=#1@eazLG1f+$fig$Xg~lPwZAMoiHIO zDQRGGih}%<@i{Wh3t9^2?W=5#rnj;2#R-k4((+a5~kq0K%aBB^* z$@eS$)g_zE5v6qQ)*)r%AHZRwA1dfXj)>?*uNKBuJGxs|9jU?3AMbC|=#L68k2BcilP@m2`pcBZ*N$jgXld@u5+_ zI9_1iicw(QFoNf}BpeJTbjV1$Las-5`JPd2U)OFflk1rH0Io*l{Xse-GgZfEhaOZFus*GObY8Ww`*{#pIb~rC z5{~akZR%{9e-o(?3rfDb5m`!aB2zSub)yY;)X*)@W+Bf5&Id!Tf*4cuYkSY3@%@>* zr80Qem!q?Dg zPg%Jq0K7{3clX|2N*nmy|I~o(w5zZ)>Z2sgWS-sxJ+)lRd-UNhlMxoUdPKB}rj?Tr z!h%28P-x)*@OVk8`#X;YM(7wyE z!sLqozG8%eI!O{_=V?UzTJaU#f~|`lLQ2{!y9Y<52sD3S$7;}IOmWVeOe$1z^Qe@L z&(t4}+)oX0TN6oo_w$+|WYEK=PF?%+#apP49Sbs%b#;1UEa^t7@+$j^*z_D|FOsRB zrINbY$nrKIfi1xjvAh4fGJA0*Ik>Sob|_f=&SfY&7|4gQNt1Mjtxx#9n1ZPP5gYgD zXD1^xHgd?uofzS50Cg;hgG=H-Q}1ytR@`o(G-+*Z_a}&R5JobCZ{NM#R1(5MKvnZV z&y9s|g%nG=JdNcGnRg{&FDX6SLDUE)6spr}nFlk{ew3m#k$$lDS%KQrDw7WdM$4+u zZ(fl~5hK5n?^UiubCq_m=tLRGIFm5MsKejVeI<%DOe*jBEfE-uGMiB#Lv_6(|Elf* zlu8i0>ldu`Qj$=g?r07eG6XK7`OJksP%Sfr9sTekmBcG7Cr(%^e(^fLe#5vQqu2B8 zC)n6HYJ9067uqAIlCaVRBc+Tk>)`=^W~e9JR>x%ziL&qjaA;xOc&Mc`=4v)P_ zP<#0MgW4V`30iE<0ni%Fow@Y;|G1Kc4m)f8Z=51B;yfI0gQCZ-O!SZIiu{bW&UZAg z+A*fS6Z3Dl4tBlD>-UVa*s|r%t>{;_Qi>N@I)*G}d0?jFKBcLSe7=8ne3x4rLp($; zbUJMh*OW2~IwjJXJ9dmtWlmyE#>a58}@2k1t%3j%^f5BLN zerQGad?sOgn1^!w_$hJ2YVIjy&SB6Q(s9TeqA{C263A} zcMo&fKsbx;JWhHh*N=N@^wu<8O`9jX9Q~dO-rx ziS7wdplvi@(-Bo3k{;*DYaC9UrK}Cz=i=zsdOl5;l;L*=->aiy|C)8M(rz_9CXR&R zTd|8GdJCLMLR{F zh4Bguz&Cg@vnn|l1Rj9(eC}R*OZ2d`#Y}yD#qc^C_$ln$uPdeh4_Z!#gJn>aE4&4S z3_zC-qmWo`Hoh12M!=Wv;ib?tzPNB;*4hwfSp6U59d!Ka!L8&4b+;biSfnZO%VF?hXkpOQv(NXjK@${Y&=Tq*u`-pYH>5#J*rt z;FlE6of0!7X%{QQeko-Uy2x6`uB}?TIjlR&G~%Hh z!V{5;|NO@%Z|iTH>ge6|$TgtL3PqP?vV>YqT&%jvoV+j;XsZ_820AAgw#SuK#e$|b zzgb_E$ULpFaJm)`j6T{tb{crPj%sdkWb-3vM{626P1!Jvr-4FJvC*{eWq;|W1ULVS zNb^2IKb>d&E%#hN#UecNRiSci_9PhEonLf`#7fjg2m3mSRu`ZbY_fNNuXVXIU7RzC z78A5>1kEu{jLMx|*|R z6hvTETf0F2KPV^TSw)o+ubH)TOD=5?GVXC<<_L#^wN7j;jkp&r7(nF)c5w-?V^s)( z?#mL(e&BWsbHozqy8hJwQ60JRJ?%_A>4t$@p>?YXNDosNuunWykpe2ngoO_f@n*1U zSxz<5+iL&`5|A^f1>48^3-+h`WIq{}77fmr10RX7TNmG$eOiN;#@JcEmWs~qa>;j= zVF_l1j9=2HiUVZDFmm^o?R_RBf#ci|Nk-a)Og@EOznt7qiD8`Y9AdWyDZRQ##R(0G zm_EDP-T&zJy2qiijOZ%;{zKh7WgRHCB^o3dF4<6MwIAE#9AAGF>%D}rnbnnY)^5EyK6x&Y@+{swm4B+KrPPfYCQdrF@n&OTi}YUp z&Qv#BIVzmZ?3P_v(KqbF7@`a;r^{4zG(?T}7$-x%S!2W)&one#h<)z;$s2w)_vM9WDSYi?Z}zGPyu&-c z+djboZ78}qtua31T5AH%TMgg)UkdtbACXSL&5twv{F&gUUOHn9%rCYaXl7@O^dR$0 zU~iZGwPgAol}SzDk6Pe3htA~1R$oZeuQ)D|gf`(Mp|xRcZuv;&?mJVfc4dA2zIz^5 zJ(ntfaTBX)hXuOB^jHN)>G8Op|8N!f)4X#-WHx;;IJSpbj_ilgH<;Es7mNY=h zeJJR?fXvzH72{yGuN>excSLj<3W;68Aga_nxt#7Z=VF13eGk&&O`2I^GVNq`YJsyY zNz8&lM8GjE#DZ>OLk@p zd6WL}LG!pgm22$Sts00VS3I5imWtw8KU=MfN@t0nar~jQ!&3U86&k+3I9O{=J-Khq znXC>z?e1CJLtl5PZog(TRo9WeSCgj@RS+7vgIQ7;Qu=9AWU*jHm6$|0tMfqjyT-W4 zX{&&}DfrKQ7TRhea7*<0i4Na1pq7XMscAj3(;C?E>Bc!*_4P~8N4p5G6f zu8vJNgyd(IzT$P`(Z8&nH+Mb|Xn@e-oVz$17HEOsnF&`nWBISV0R{@z;S4%a)6nPR9akt(zz{l zawnxy!&=vdz_2N+Ygr$gVeRvCskupL5KOz1ie)*c0u(i8P1xeI^gaWpw}{75hjWB( z*G6%0i%j&q%M_4^Jy<35s7yJP(XgU!cI^$4uw;MuD3MM1q9iegrx&xjMPTFjGfP$E zhkQikxa(_Ww!p5}U<_A%CupwFfG}>Xqx(&J8lHIk%O7{Z^Lb8{rHj8wJFJv$fHHug zhc;0&4^#e~Iql7;nSQt9M=4!objK)rCMEt^1-S%wd|m<%Oo5?n`X2&&qUNlmCl~kh zWO?_=2Omj7Vf}0EWcky1xF0&7i%p7NcJNs+m6IeKD)tf7y7wk(gxy+Sh7Of4V` z)x;LLxjuJfgGn^%s_*ku=;~A5gC*e_HY!ATQR$w~>nka_S2FweOBtgxG#Nf3Yb9=F ze>Tr7dz#A|R^tIIDtaPXHn98JYb?{d&lSc6?mB@XN_5DiPhsM>SZBRufJsHVJ|+VY zG>9AQnw}o?7l&()YFV0ao(`+m?t~PB zD)1ZiS5&4L-|;#Hn@b`tOiRmN)!&xO7;1U>ZcXcln+UczjwHd-^zdyU;~FRwwa>Qh zl|o)u{8kJM;&!ps@!O}2j{ev(ag^jP#->1g;yD6?RF!#h*bWI!9*%OIlej zp%=uF=BxzPE?H&YMMZ3XGo#o&x*E8swnIDu`zFQ26eKA)mGRu=dM3QdBYx!VN)|zl zQu@Wg&QfT&Q>h9$khMYE8_Qatf_{^C?5tfn{%JbNw+Ye>Y=8cs0yJ%vKHwNCGvDGRx}ggKf^ZHFyQWLCXOSl^)P^AP zV|$?eh|lz#g6KbgF8)~jImd2fRStw`rZMi%K8wBWzT$}x1Zu6xnoY6;X}&K@ z%aSz6C;l=UY)Bc1Y(#{LGy4TUCDE}n9xZo+{6gIbbX_;-E1NqdrJv~4)Fbc9hYl8AJ=al=b-sWuE^p2D=ymDwLupUo` zdVhcN9|>B2ybF$-YCi)UA$)lFjX;R;l~{Rpmv#`7&$6?~(a@NT$=MV&BSzS~2Nr$1 zPVatpMdzD>ad~?ZClR6NMC{yt3_@o|JeNh7?$%Dw+eWGnbg~d+BhDp4rcsR;Tf3FQ z4D0JHivyc~yl=mhT>avWFbwEAF?c@wjfeGPL2a&*jF(80(JFyMJo+V{6kq!6-?P5U z(KS_R-6x@?^vH<;kqGY^v+?f%E4l5Cn=%$)6Awm40WY+edEFAaWg>dA#m(x2Ip1xV~L@lgLqZ)WqmFigZ zvV%!J{|aw9qRy*c43VV9`NQri`iTl>@3HB|PQBhVoo%voIk*UNuy}_o4u}JVo(a~B zfy2^eY+oSyf63N5I@_?l@l&^*@9z6ob-LLP%F$>n%k3xho>U*)bPRqguUJPO(b`)A z#Kc^(C5dWaO0bT)u`UfQaNGn=zcyik8inxAxQU|3UAn z(q1_S6dmGtxlAO(ak!ZN=@}rbHqc02}28;LYj-a*A9wkC^8i z7`1sQl6KY?@@cHQ%bxJF8TlK~p5flfr;=2sYd$jrE1(#1O`RcYh-)q0IrTtm@!9l_ zh*ANQ-<)cyLe>kTst&U)4sABPMYnzB)QOp8gEk6y$?) zJG1bttT#U@&{rd6vX}W-kbuyR^H_=+T;RhB)F1*Im`AuDX+p=G=3eIv>aRXnvHaPv zW}Qc61+5@IGgr4d%_^P?ccggPH@2YnVf=QHHBevQ(!q3jXF7O3{UvxFBW|zho+73# z0zIH_!#C;7at6->1)TN!ld4a}HiD$afb=saJdp{Na5&}P$ulz!W~T6fm)ut-n9`>? z>vWKV179^9-9o5dDbv>0JM%;_f8MLo(^|+E1i8|~PsHww+}r_rOs0t4V)-YG@K!&0S^2iY{*r`+wg0>h`5M^BfaUU5aD{}#-qchP zQMZB_0CgBoZU6TrlqYiGq$LX@T7SANwUuYdC~e zEc{-S-u_k8p$J5JG`!7oOT-R8Cw>G6Ydm57TA!-%;P5mO#2fF;wUq~OP~8VKT}W3v zx@0Qa2Vw)jQ)x6+fI;_qp8`UA9Gsq3YiKl{DtA~@KqEMo{}mL>F2_K5LjshL!D<2ZyUWYd_%jaY-Mc^7Do~OkpYTCOPO9u9DK(P8j)fD43`s#srcERql@ew&mWEWpSyJ1i-W_msc`A@zs^;lh0vY zHWNbklS<_xqCm&_nOm=2j4Mi@wJGbW+)pXVKi5Vf-N75xNX_`+>ugc*##+_kHRC}k z&Sb!wU=4iu&R*}tn3a`*uESRQg*k}UPmpo{h7*o#&Ep$+8S1-G$~FJ?rdFoFjsRy3 zpfeBWY=56ug?UE_Ivh)R^=>&SBmKEK;B1_n$Gga6Rx#Q`Y~y(HI~fk+Aye?8IwVUN ze#Ldj|7?@%c&MCfbN_COwQ72`gvTL(F?c_@;88a=;vC_Qz73x&ees;Sy<2wnFn=5a zNU}j@zOzjGi$>6r7>H9HswV0A#qcnfj2_p{^G|!B7N)z;K$J>W`&DU| z#1-iNf|fLle;a^tfenj_5DX8Yb51YF%>0K?uI^;V{J zf81X-fOczDyD7~fdt#yFW{7^?4tv4?=CJm=(@~YfziwkkTdpt^$7LcuZ)LQ#Zk;Bp z4SO#GZLQ-(etqFng%%P56ERssLMMan_h5f%Lhh~8WzRAp^=a|u=|^?}BF@MQ_wRQA zgO1z%(@$@N42|ARfJr~`X*x7~o1r9-kqve}IjtJ!HJ1#@G^k1RYHPrGW;O@$cHxJr zz7%F=c=GtgKuG0tU$3ql9dEv?*~UwxDRj?qfugGK;>M2EQOzQg9yv#BwC%oTNh6>R zf>bY}=J{6n3%AOx$*NrH{tODYuKfgKodxlv$(%#u5G!qP;y{>NaXUxuciyEg?l4%E z7S;c@LHO~_JUI=UUqJoZD~q1=yLf^vUyAq*P^RK4Y|6-qk1SGimSi*%td2d0OvCh- zJ-OM`{%JmoD-!Ne-rmrJ@wPkVm{fc(&cRh2&3h^!osGF*;0!uh{&%_8^5&O{zwtF8ic#=7PZ0y*M$mubda!lkQ$m@kyB$o3F(QPMQGWjJPa)ADG|g zCV2}z!B>pdR$I%g65X!Z>>&iG<5$Pxz+O}J@9xd=r<0F2o`ys$6xKs>lEIXOgeZnu zi^PbEcRJW3bf%!`?L7sToNy8~%%5S|R8Ob3L4dpefj*{pRb7B;1O}dXdkk+C zd&d*fAIDm~`0JM#66%G&#A)D!SdZ~~wd))=@_hHtuCUR1jL4nuzUB2Qdek;QgGOSeJOmfHzi$_Qz_wbCzV~_ca{zi}u2J&v=P2YqYdIe2C)Th3Hk%7u zyKmK)xiN)5w5|Y+wYVVD>Fa)DVUYx;%FKt~GkF~NtoHt8Lw7X*sK%eSSxKM%gh|rnz`%fUn={slJfRE83r61*CwiyP|(46N0p$5VN~zW z_gOgJ(%c96jX|`+2cVK6j!wIB6z%2XsF=k4PT`}Pm8GG_kP6+DyfQJp- zgY!Of=soE%+_woErt8BjGJe<8@!NtoZn6AvxWYDX%!?Pcz6#X7DsU+zV66SB7Sbj> z;~(EomWRpsddT0rlhX~o$uGTa!y-H&0AoX0cyomqdRqr}1kRuS!Qa1z2K`t|n3lVF zPmfM_jRGn@Wrk(%3!sm4=Gu{7>#7bdG|U?|d%S9J(vVNLV88;aABUy))AR#zS7yET zlUJSZ9AnUoyQ9zhNq++I{>n=`vQcxQ2p`L@ljnq@i`N`{$M=B&J6Wuar|FbBokjdD zV*{qWljtWGdeNKvw+*y7awuH3-5Q@G8NPKc)pZZyFAh0faNqjsk+fS@zj>lDwj>hX&fJ{o3(tDn^H zK{N_T4ggnlzlcre9gdgGJC)PLYAZO$ZjWSpfadgnwP_|dckQG|B1hF^Q<{uNE{GoD zLYWM6P>87)PN?xr^Jy@vvn!bv z9kGLn(LvFiWXfoc9ca-B;;S5_uSxFFSRQz&%5donjt5HlM(U9#@92!nfxR*Z*>$>v zEk?_9uVNL~VOKw<8XJmZe=&lpLWG2FoKx>N;s7~6KXGg<{}%W@m+ahM&{JExIxTBX6DI zm7TBLfv|()+VeKJLZ7K8=#itA9jdq@Hj!{}pyY+E z88F}2+ouVw1$>*xaD!r)PUL#SV>p)I+`K``+UCRSPB5T^#=;!@n6j(*{ClkqzC`2B* zQs!a<(rNdqoK#`{#mB!eS@|#%xcQ z&{Wn+n!;u2s_&uWF%Zf4(dBkx1h0i~u#6&?)m#&RPPjn9HkFC*i>7)b<<8aO*p_Yt z{#I_@gsuxJqWWG$po)r~MbcQ7IGl65S(X!xhD=+Glu4NqfcwFwe&!A~_ksuSghONB z;llr*P{Nuah;otnoR?`)SgW$Yj1>6dH5=%9)3ZTU?O=kztY2?1VxjuV+Na6#1hdmu z>QxZWg2asWX$aoC(Z(jM%S4a2IQgt2k=Z)R=x0d_x;KHfmFQBbsa1cG}pm5@{tOR1?}B+ zsoz(rD~SqomB4d)u|>jndnUZ)j}oD+joLx&kG_hbz&S$QFqyk@Aq_NwQ*B?R z(^%d^@d;`Gp?clZwjYK@9NKq{6Ghw4l^*%S>Wcim4IO>uS@sM9%L-D@x;OnDWYe2~ zcTI@~o@M8YX*xAjK~-&CkeU-s{sVLfcH62tR(hzA(wXvB7V`L*#_tJuin^rw4O&CU zxnnLaWb>JLRpv+f6O4HW;>PYoXRkfPt#s39QI24}R2r+VQ@*jr_D*sA$yG+*Y@VK& z7=9d&$CFv>7MIMgtm3N={3p$yFC(rQ6?JGrAI*(N8J~KZT+wd-?mk&0ge7>Q8bbSZ z*`Nlgwz(>gt#L__iI#l*huZyGIZhyz$3~6!Atjf+wIr!n*Iu$47$)Q~k1wPi3!b(i zzSvAl`^@*c8mG!FAG?F&H$^^w;ah`Vu!Xe_h+rNRc9%EkP@XT!Du^utN7(f&Sv$K zgDN#mzt-dgpf$1wR2POU6yMz^9`qZ-l&4mS>}7r_{q7{v=lfT9FJ-IJ!KbwffJl8(Es=R^s33F;@60IbGRjnmY&tq80_8X3uRiXs8NWeg>voAltFqkC7zFRHJu=!4SJMzTP!5(N8==0b`4$~36rFuyl zAF2@a$k86`(@yj{Jj{`ZjJM}$SEFRUEQjnYiwjQ0A265vsv^(2S6s(64Zb{44q2uV za&5wlD;&rRQ-`I=+**W{nq6@2Rpr^uD!f0-reJ8EzFSvSe=Hjrok1`rebZ; z3pc8k>W8tB11Sb-)WQ9>6`VlPpwoBr_{SbR%s=jRT$Rh+V$v}IwrR$6((nt^n*Se% zkSr13|1cF`@ux=<47q=IV@%Iab4$%hC#MjN;wIoY?#<)KA?@RI=1|8E829Ubw$e>K z7j|LtS{+jF3q!YTNJ?IUUEX2L0QVbZuMo17*KPPOM`lm~BVWTc#H@C)|ljb^&a_vIMajXo1S4X9Kr+`bj}YD#dpokk>|&ns@Lz zbqP4wMW{;gV5=DRLHHot3h$c`kcv^{0ARC3tCTY_M9Tec#wG{b)dY(s;OYZr z(yRKEOE)$N`%RgFPb%{QdqO(0I6~KP+$80ZPM#nPTwzS-yJGAkY2NhhpfTf`pe9`(}#mmAA?&%~ZA00e92~75GD<6G(o9@$t$o1V73qKom?(K4DUoVxp4TOVL zNPL(ndvQG44$>mi>}d}!waTX%sjawU=Rj@YT-0w~VfScT@+-6E&HatchDEVwry1v? z3Ex2k&3v6(HbpmHT&qhj!>t7+>X{}J_Osgim=0Y(V~D8qum?5CDDkbp_s`GC;^Pde z)@;~cG(=I*y`YytQcXmI%SEK8vzn8)>Id12Jth#ZrN=8DTa1iIpd+uaHNs+DvxV?J zj^p)^srVPEr&z^KM;q@n>%`$9byl68KJn8|E8O>Y7)(l* zK_c=8=~T>iim#kMh#f9=qwZhEcqWswGO~AYye67_bWcO?vl0Tdh|@-iH<1pbb3g%FL0tt&nKH4=hk@i5&6gd=tobgjhQqNWqJ9j z<%5AYdu86FUYu3>uXDbPoR5MyWlzN+5eoWYF%vM&J6^c2uF=6~jHGOa4o}~{Gc*ch?j~m2 zY8etLFlLaQd(`Za&AuxBil7N;>cuZS%qqf`xi~b~P>psOh{yaHn+82l8>JlT>?<#T zlrz*fLNAshE)WZmJjF{N)DCJw@8(r4k`3YodT-jOs+xmoq3QW_l4qoFp8~ri_7mmr z=4YaC>nY_9DQhFr#4FB9BJiZ}lXwdga2ultMmfw2+9-59w72aBDY9ZA!AK^<7RQa8 zHr4qGRwS|>#V?BzrHOEAhu)K7tI}^)^D#jSp(OBAA|^3AMp?9G(P0R`p{t(LF#B zy^?!oHnVK4($XHz<6P(&C(mG;jb*g53boUCx*?&$ZKZJ!@-~&6(_WL?Bu{CELpp0A zOKlhb2RTA+4x#?Q`9Kz-#zYAq*`i$w7G6TzHV%jq-AayorjXX8DRyTk$K8S>$Q%4c6=jPzwaQWQ5kOzX zga8!{)RfAw6fSWyyCj>l#A^GaJj2+Qtd^fXEW@IzbA|BlO>nT`9%PQP+9^II8cK)P zV_ttJ)!kCt1#VoW*%B&?V|L=ohGFww=HCLQ@oa66YtlqvO`}aQfg0+kdORh4q=WI# zniyzFqkN)4l(|Or;`6wgxRMQ7oYm<9@y|pNiDTG)9BOKWeanDd8B(%DGkJtJlXEx} zOP9yCnZ809Hj5T#gub_a)=0aV6N-9G|Df01!$g!Se3lp{73K~X`*~K6P7mig(E8hb ztANH9u^Q2l7MswS=sEC3@zvzBV=TCjiVcFH-2k1Bv-B5tzU2Ku3cPl4ucI$h3e}We zfc_x~uTTAAgd4*S+;B`0*E1i9Z|!wZ-Fgq=NqJpnM*;-F1peO5_l3!24XlV$J6DODyw8n6!=I)BdaNG{*j|xg9tFEq0 zQV*DZzX~G3WgWIbrfNV;cYE9um0m91)WiN3i)y9H%@-GApweqXQ^<`ep3)O!nq3`X zkWU;t3qELyNc}^?THGXIvYb5EClFGFzyo8u;AoSp&??{Q2Zxs`TXXduP|@C}Pw9eT zSnZc45^Rg`{tTx{dRp)3$9E)5EH0&M(Il=dg@O5lu8paCaAH~265?E!Q3usS^uJ2~w!JN9@$ z#e+kA3HH2Z_x0bJ5s2ZzY{5Cm(p9F-IYT(pvoLZ@p$%&JcH2%!6*kaFW9pHLTH!jY zm}%Nrr9z`HAg8mHxA$r6idrt{pyPM++8k>=(YIZ3diM)$5YAHilM9O(f<`jzXqc=I zGXl82Dwfs5i!`?EXL)}K1MFDxDv^)pVhD5Y`j)w7=$OUh)ia0BY!F&a2QGf<;(ixGeAV88t+{Gj)zC%CUGA@ zeXz7fwx_-i#Ri-2eZ$?^4?Aw&xjNJYI5Eo~P!B`2Oi6?{lVvt}DZZWODJMpgfhW&z zyiR!eY@8tjT27R{B>q+rr{D2xq&Q3NlK}Lkw@xNfY+zO5wx#kfi|^wPKD}JWaddcA z1}bmR*jUYp{4C;E zqd#wt-A?lTc;EA|)1TqooH4Ao3~W)cqarVnd-B{az1cv{F3*C#c0HJgy7b5`dwWWL z?CneODQGZ+#1yRrbyuIj{TodXdwGp2G|(W=knrmBxi0St>zgg5{#B28UZjiz08l>h zh!nqmj<5gql0JA+f|j`z&T#AjZnx@cY1W~ZykMmHabcvy8=!b`3i?Hn6n&sE`0vZF z+DFawNt!4K^aeF>s@#YLwi~t7;!t2&SGY)y{^tNu`J^fPVfi4k{y2rRw_lqOi57qb zb3brHC72%n8!*VQVg<2}!M1P$GHJ(?gQ1>a`vTB>@-xQC9Fj@AhE@C6@KHC_*UP zu!IVet3e!aIy=n~_wJ zPGGk8mNi0@G$l4ZdZaIsbhgXY$dg2N-~zP_=E?2!f?q0|bun)mR$Q7#=N>G>08v}S zCCJpKTiqv9SESg6lKUECC3c_#hK9$+glsqS3y>R_KlZ^pXldF(WDt;zXGqUD$pter z%jp>IMsHWRBpVP>K>$?-Pu@<55Vn0YS&>uvvEo#imELtA{_ofG>Eun`kquJ^_1FuM zQS~I7=rO&YIH7B0+`iQ>lSSWzh__hdii|kl-dELgjzL|MC2Eys)6CWsG1q4v!+K6F z*_@wM%5$20%12-NZ)$12;Pj#>LbN9g8BxvWoQxIp;gAmDh&s3f{F_7-Oo4As=o8?1 zt&?=MqZH z2%n0uHT(6MczpDkt9y?uf@-Mt73g8zsy$ehXN+XujEh?TIGjL?=Bd4UlwPc=Gj>ZP z9D~qVDqJf5Ok;9OCLeGp(Sr1O-<5T+uffotFFu2P0mp4!SpK{z)n$Kac`5A)PXMpB zy2k$?YTKwAyl>j5E?${Glu$1u)=h@w)=t{O?gyg-U3u=Ww zkJdA~P5O~OYtJEOPhqh!toRhyt>=Z|Akyi>bQEHN%TyqSK|7QVK2|iPw>W>Pq!O?D z2t(B^VqU_rWnC4Gx`K-P?zevSNhgD_wMidj)mfHQyPT=BbuXoQyCD9cGeI7<^fzqz z2fyZr0<5Er9{-d?FHw4Xh~aCygpJ}W$gC1O6t@I^uhH~`+lC%L!t~zcOPmmvgcFSc z#MXh7vVY2UR&z<|o2=vh2@i39hMs#GZIX@s>diMdG8MaY#zaYnHi$aoeyeageXnL& z4P6+#4%P%6bX)rFUPaLH6GL2v`M}X(79uKHv$tP<=_OCdAozJnx`~ZzS;cM)f+mH_ zB=85o_(n#cb^3K39^YZ_QloE14=mz%N__`CVYUyt-+uAwaohk?_0X2okK0Y3QfX)eWI{2_{{)PEPz41-Ft5)%SEPRqEmm}_D#0xXstF-DA)@m^3;J6MtK_MLAGiXPE9_QnwE@`_8_$em~e^s2a z5;5o+2!il%QP=pxmsz7~Pehnf!{&)LjF;b!b_Y#6{tfJRIqe{Ck7k6Kp1L?h6uNwy zPv)6xw^(3Js#n$quW6T|$)lgVKu>7dc&3Ry)X739? zCRI8faRIe*S@L!vAGLsfpiNjrgL#DQzlA<_m(y$P34R*C$0YLqmPfrg>F#iLYwx zdyvEuj^CW0)bRb7WG0g$uiVICJL@*As2Tcp-7w(%%aQB!fp358PPT^%u_HW4Tx;NL znvFhpGj|D*t+HotAMxO=fVbc&EGV)s26%O@LR{vxLc?8xzvw^>D4kzQIBcJlLyA2Q zJhR=~`+Lv^9|tmb((`6K%_35Mje~xCriEquMZ_yciAcj{7+d$F5Lq4Ww*9+gg?A}g zl9V3=C;f;~2w#tCdrWVi*5M>it#+8G@it!B4#jco+`l^45G^X>4#yW>fASYKuaBGSj)hA13@rT1MLJ|6wOsrs}H_u%{gL2q$< zVTH=wySgaCNj^;=rWtc-yF6LErnE{d?#wZT`*x*rBcYSP>>PCLTv1I) z=(j$n2k)?vupPhKz`5;xn0%C=M=>!(k1-k1SbRi3&%9I$?bcjMm86&|GD{Ps+Pv0| zs{xlEqeqvt-EDJ5{5i;;&gaABLfWx6g)ds+q#CW;>LDHv{P#f~J!>cM2`5je-qKig z6-7_kAk?3>e(4T+xh#xC*uO(JKP=<{|G;s#&XKPJKE4)JdwZH6B*`Dz4jPuP&xZi} zH$IC<=&?U?*gJSFY#IaQGPz>lUVyYqrHhW7_IjTB5yz`kp49d`k{8G%@X$1I$no3#({76d)K+@T=j^9*!X|zH9rSb7PSktDYxx(FA-&D+$4_{ z3Nz}@DsD!F57B^XoxQOE#Wb6>!$0l7tth}AtiiJs%n2E4!I-88<)F2yiYAu+(?gGc zfonYM(nSMVOB8L^_B8EDFpA3zEZ?sY7oiI*1dy~hwQ=7|Sd;~9O!A~p6Pk%h|7`&? zZedgX zEA5E;tUi&+VUp4cqcX_gY>`XHu}V!keGfN^8+=2K~0%s$~QL*cEdgwh;AONjq7#vV9?r z5y`-@cJ^PL(vnOhK{{K*G-^n#sRQ_zg|D-Uoo{rnh_M?McUF5`(`IaH0r~jK*ib$9 z)+#kS)aZAxX4_F~K;%%}h4V2t5?x&+x+f6wkI`iuuGuU3OPYDr%L7+ZlGehzxby%` zxQe0v`P}yLT72OtL_?TiOW?b=Wt5d7TxNV0SV=PeqNzw4TD`t<$NR7aQ(kZ_wC8+* z{pTQdm)`H#WHddN$2p?b zBch}(8ySiDB}XxjwKKCg+PZhJn9=zNG(p_1ZJl~8a;yS*_Wn0J@d3JdC_!) zX}}Ahv2c{ykmO*zzeq}a4a-0i=)1SO9%a~{`|;@t8l#2 zayZEbaU7N_ZM?~X)hv%#{E!N-3p53pT_aoq)rwk2!*9g1U+U>5<3{w>K>mhOicy|F z7gJ1FKmmKQ1A@}M3vLXg5aJfgGVeSw9TOx%4bR#p!En3>VTixOJkFK+jw$7t=LEvB zE`rg*AGdIve19MMT~)C3YLw8Cn@p!9%)94cu6^r+%&$_%yUd1izC5$j3~1ZriCGQ< zP?6O7RG!YV5Qp)q$+TL)mI_J?Vyx5*TQPM|L3JXCz8vKh^0bq->CqT)i?IqI}# zu0I9O$dfBN1F53;Ct~e|r##iZ;{i<9amI&*#Pxk+94_$k0ZD6#nGBlP2<-XJcFqB8 z8T-!WcPVPO3yVd1{CKj`bSd% zm`GK6&c*Z({Z@g-B}@#<97TYXR(NW-Idoin3TKb;T>c3-67+|1Yr)*lk1Klq>Rt!( z%0BuHQjh_V!OuEQsCnM0GyoogEBze?8HoLIidtdn5i zfnp~RBe*e~uzqOTHKk#+JhTi*#3Da_s%z9VUfErxs=KqOI+9rpy0T%)ok`phMsK5! z)-iuKCklQ1K#5oPs&|;mvTJZlmnw~sS{5~uC1R*TIJU5|ZTZxA<5#5MZZr^HHz4G~ z{Z>Cx*2e`cuZMs8`)*B^2#uf~5#Il(zR|4oYFl>+2zcCEe4>UVPhN7K=c!r?R>KJ{ zJ2_ix{@dta7K^K{F#GVzUn`N+jy;lxO(gRXRRQ<6`_`N74#xbwt$WaoDmQJu8|AE$ zQxWRuV9M@nL)rzi=IL_|ZS<(PVcz zl-6@3&yfC_Z_b>r-IyzqP}$<%M8ZgIVw7OrYxR(7t##OA{!cL0v6)ch(@fRw8<|Gn z&ujQm_+i1z=p+sWxP)6>kNP9lL4)wwcVLOvbTA>W(_51nyIOa?;*c)cbKnvJQrS9u zs0WYn)Bnzh1y~6BDo$gr$<;$Axko;A2+I}T%N&?OLm*xV?DYh>dTu&)Z_StGyu~|X z(P>aT>ai;8uy|BOzBw#)aQr`vsJXFx;>RtUwl7xj?s<914!DCuU1eSztAwHtEdz_s`6py%4wO#!GnE_kfTLuGPFk^AFG z)f{t0X{p%o9)PTVKJrS_9}>QDt$HKkS)dHMkdy}Ho+d1>VNq&TSA*xzE~m*b$tSO) z3b|syNcrro6Ip&r)x?Mm=5Lm<`PaU=1iikKKMYnVfW*z#+EjBUONq3+O!^TxbUVz@ zo14%F8GprjWYXOYu#)zX$){&U;m=T@Zt&<*RFRZRB*lNTdY5AUFzA(bUccTwi2U~B z=Lm0BG}0j4QO5PG{@tE#W{}{~D)CqR<1xK!%nG7l&9Pl(1c>c;vKlyH=$Lh z&LW&eU0?|KVssuakbF2$gqDkKmdR{4Z4=biaqN5t&diZNRKfzwADd>$!YFxEe~1OL zVYZ5^F-J!q<6Tx;5^DkzrXP}RE`x_a@udMPc42g6_1ANdo7zKR$q1##9656LYF=kf zM$5DH-}JUt{w@!--#hD{Z>&A^DbfHvDw9|6%iqlj%g1!--yWpg(scD?Tia;&>O<-Y zSpq6Jgy?&}R0K-#&<}%4FY_n1L{*PIW#-G(k(2ZT!WMl6-k|Z3!o;tfy3TC$S?dv1 zD_xMqLWL=rQ})9X-`BGVe7vf|lG@Sq6?j`z6688EkA|h}GbZo-8oK1H!=^^ohzCLz zdZ_(VKR=U!dJuR?+FObao>QL>!H(kuG9-nLP1q# zbP!DN@FbUMA(dIEe9877gZiptAuI^s0FS*^%)@dc|=_#Bv|S0(pDMsr*%=qZsb`>_4H zA`6wd!vBNHAt&C<+Es*Y0+PJeqVau8KoO==7s(xpJLc@wu}weqEQw(F@NPaSZzX=k zIoJSzZq4dy1dTXM5#oEAv4yt*y>l7#8`WAf)tPG}SnTe)RfhjHvRX5274@Ho)po%g zv+|_*reC@3C(iabvgIU-K0Njs$9qQLgpy@f@cBEozo#+>`Q68f4W5`KiQbqA22+l6 zmqQV@RcXGi0))hI%|<5M#}OQ*@#CO0t;<+Ra-639wO(=*z=Zc7jOWgp>hwWOYu|^_ z2W0-p5#cb!$k|wdNUd!ZHsUB?dV)tsE%7-%xLnlSr;L84WK5i08UW@17}u()dTp{z z6k@kp`9pA}=Qx>FtkVWQr4*|9o{gd3@OlB@{VYjUDCGoJiu`vO=RiYY)^`W z${&=5WhTQ0Rjk3n&O}P}d|iCgDrdvJYYIl-QVI$1Do8z~CyCsc)Y?=( z&*RID1+x;m%rU-bWfp7v1+^{9DfB&mAwBIKDT(7S;1rfnE({24t)gY0+t`eK^Njd7 zfbK4eL%;yJ=e+6uy}~Du_Cfxic>C(kX+J2jlkoqCo_351_8|oOI?jCP$=Gbry_pTITa29M z^9|49-#b!xUE1J{yflvd?hgxQ)ehT4Z7Q;^+q(_%VeO1HZz$02Rvp11O=dklcOv+bGyJsr&h^;g0#y5BMm7 z=|QssGe`9Y(-(?!Ag3Mu;^Q}Qwj_9A%vAV6+Tq(Iiq6(MBL9B4d;Uk)kQcI>U7za) zgJoTO5!_i3=IdbktKIGqbQ<-@cJkqAF2}NL5ir)49$>%k**D<> zh#f(_#C}y68;vW_S|-6nc#LOZsj?p8hITGV91lL|*s#TsR9qAeEVP(H$K)9Vi36xR zdgdNCcOL%Pv70UJ3;cJ<4X@{rz%2l!GgJI~Fiw$Pg@vKEC%Z7pB8lzSxYXqtd{uI- z?pwL`o5a`!>lt}9F9|^9`#geaSsVBRvEE@J=ZaO2G)P?ME{_rO)G3MEYBc8qqj8CJ z$SLYpQi%uG%`0i1W~H!%#{t4=*A>wf&SHuf_&$NQa zFcth_4~>blwtXABsW&NUw?@kBKnl|s=hegThU;Q#=eUosb1~qK3H3@~!6YWI_Hk0R ziZ~(N^PLnIzJW)h_k5^^|2#PM4FB!IRKj4&iDO1p+bo0LA*uqP$a?Ru&uHJKoJcYI zjj6)i0swr8@c2w_kG*Q9iA8I>XFh)>5J?L+p2yg0`&yalppm;YsKI^JHgSymYPrsn zb1F;>)%}2bsuH?ZHF!wl(iJ)PQps#kpKA*GexvY4yO@Th;};ViYh&ge8uJj>V#ieb zKS``Wg~>}kR)Cy2|6z~=hKdVx`R8BAqiPKolV?y;Ut`uM!N&{&`cn0m3wQgVQ^Ckk zD)WWt%2#G#T2z*UbI_?!bDckv=8}Zl8-q*b&`CjutxMxxRzb7e*VzaD)7r}>DbcDC z%Qk57z-rMD7{GW)NFBUiu~P$jNzsiP9PmOmD7$*j>Ly{-h#zs?4wyYL={ssO;pyFL zl`U#xORFkuN#E#_6A-cs`9rNox+t9AH}#;SUtJz%bkjoBM(=HWOSkbHISdpkm)2Om zx@QeA&{Wm1bE@3HkMELdga5tTttpi1kf9_LoDV=JVcJwW<%qT&`pup-4ZmnM{s?3r zdv{Qlw|zt%inH4SpQthz*An%_0UXYand-^9Vr8!X)9&=*b%yZ-0$~j3BtkgeTe5SK z)S2s@NYVV@Ii8Y7u|d>1y|Z18=;~QA^Sc~Jg1IO&u&e^Jp@Pz4n&QvyZ+(`cL(LeD zW8jw4XL6u@6sID46V=UJAoRWD6%nu=fX$pmhDAxZp- zl@V6rgaygHF1YU_+g>k_wd`~7t)Yu4F=;nATb&*h16RrqA)>;Wr{?N-mtD!9PAXcO z<+h1IbTDT{@MaDu{|6ngOryIOl88DRIL>@86aF2wve{MqP=RFFz$6jcEbqcEgpLa+ zeV^o3<>593eoV?^_o*01+TH6+pYdPDxT|Sw4fdC>-pVdYd&Xuja8~FnPk81T$mMML zF+=<92;eZ=UB^*eTZIe`zREMGHyB>~><8jkHJ61Ps;e^Cf2N0L?HPqPV|gll0UE zwx7bcqfRHazZ*I!(g;6*u#nf+vy(T;Q(oEpDk5dm2|t@@ye9xzuBzgl8*u28guwf! z&m4ZhP-ciqB^>P4a&pMsy3XI5GM!PxS%F(vgN*;UGD>uz)pt7Q-IYq{0}G0U6Rdme zsS#`cT;)z(KLeR_TxA(s0F=gZmrE{Fjo$xWg3&?ESDqk6RXy+8IjX2(Fay!*lN)$& z$2M#Z?x&=ia9m%lc!!QnN^qgG)ei0oK(nf;SF-S7;q-6CF==1^{nn2bz5@PBVmw;k zM~pBaevPg1}RQD`pN}}S-*<)J;WsWnrZUT;{VZkM6fv9dB(*%bOEj$S?z*N>OBXe97abrPC zH|C8e(q~whb-9qBLt_t&zo^5QVbQRoA1ajN@WNL-8z%CMTr~;l=o7@gzT$jEUJ?bS zuO#0DJT>EaUa&4#Z=Owizm0lLzgo|7(Ju5B<$Rmy5Io;nA!Jmi1& zU;yLg4s^D44VrD}t=eUYl3Lu2GHt!-pE%AB)Q>UZaf+fkmO{#`SJ9GzqZ^_@?r=W|(NY zj^p3T2&?kc>E|xDjaKp6(Q3RB-t-h`#4owx-IS^EJG}Hh{0B;FdGB{I{7nK5F`;lf zWXy$kYOg{lDs{)mzq1Gg9qfdreULsV86TN1Z87BjR4^m+vQ1Jv4Gc4Nr|L&!g&l6i ziapky%45A7MvrI>?Ow+Tz%AO$dIl{8;Ens$y|=F_L?qxN$mU1>4C=)NQ1eDDA0gN|>)-=g<&gnrmuWo3=st{u@8pmK`atF5%6}RNj$C z_%K9CXt2&NyN>WCN>rtTR6D8#ZA@*;Tv6WV#13;hoOc7qA6Ia`oiV$?@j0hcWQuD~ zXJT{JkqA*3K~<@~+LBnR!t!rZVW6Nwst2?behHP}@0nxglKQDl0zlO<8&{vUCU^ozC@7AaF|a>|+3x9TBu`ZKDaslG#` zt!Z|Cr&4Spx8(%GNB6SF*qxEUxeX9wckRo(Xu$s|I`e3#-}jBbX9i>6Wn`=~vV?5e zmq8dy_MOVUlwA>Rg9r^7``!rIWy_Kj(ahK-O1A7$AxrfU_5D4+e>JbF=jV6_#7=1`0M>e9)wa|fA>1c((FzE{E#GFd{#=PKV9a& z2NR4bzKhNpvCAX}aojf=LpGc_QDE+3)d_%d;z&79Y=o7@x2)&feeYebcd%sGVS`+o z&;5O3D3UFWWuLZ@o7Y+YgJ0?l&1+W%TfWm3xvg-EWEa6u1I?*-jY7)u|MiE=s`r6Q zQgNN@vUU)Tc&Prx=%UiL^z+F5wLe@qJ~!qcDtB&=tj6?? zSsw++Oh2VeK8f3lUZ@DBjq^F4N>SaGz!6Bb z^&p?fP7vq)?P17?NId?2wSs$T5JNMS-e0>8_D;E0ydNU^8CIFs9Uet4mN!2NvOu#9 zOFNAJ1|FHB_$M(D{lk(fy>=mMLB$^b-%MI}?KCCxK|j*F?@&^2vhHuyc&-;o9lBkV z1QVS=_@^w`yt(fl!L(7C`tkZ%u?*2igic8n=oe$lQPEv5XuQ%;Wl2Aq)j6}wd4euV zsk}c7-=A^ACW&&idK#(91o2efKJz6`&pfEcbo1GUwGF11Y%*z7%L*r znO^$*d=o57hQSHTp^sx;ipj)lc;K&?JPFv6$5_Um>;_MpJ=FXJK=`^z=xl@=Y|2>2H1Hh5kX> zw?ekT<{_Dy{>knIfiuyL!bH*depLY}|0zTtudaMAHqm)~%8Kd#10-Ak*yFv%gSp_>9h)Mnk}Tt$sq%?($@@)!!-P!Hr=eCJotH)BM++~~DNi$E;+%!L$49pT z6k}cqCP;Z};a?p6WD0vVeU1P21fo%1fRw{Hc8l336O|FXKI0#H6;hLVGdhSNa<~?G z_z8|FbYXt%;#f21HRY)&k-l!;K~{4?ebgC$&#o7%bD;cZdNoh?@;XQfm#X&-WMV`w zPr-!xGE`yv6Tep`YD%4vW~CE3OArVs z^Kj=A6nyS<0A*KO^LGn3;J7K>m9s-{%`%nLYeZ9C`OF(Tvjfk_XWeU3$EkX*>D)e9 zw4t4lZ(BIJyR^57ZMAOp{~nG34qf#h7F8&e@mi%^QABs9U$utJx_n1n1iC;DsGW?TjS zZO9MXfx=XSE`VWHo5i#fpTDcU!c)iheS7?GQjRPmz>kes7b3VHaT7_dnc@}nW=8Et zUnevEO0mwRdEk+i7DK~4E$xxhz|RT_p19%#bdT5KCQELdewg+)<~Cv%$G4L8S7u#x zi51czJe5k<*}{}6nWaKO^=TgYwq(^X4wd)ZGRS|T5NPmvXM)XVnvYEFkb6;J?eX0% zYZu2-O+L2#(c|q73UC~<@+Jpm5cJ?J6W z)N`UXYJSGuKNU)yDQTH&St<`TEUoIX`2JP`wHcA21uDmeS`vXJ<9*E&_xh_25p%CX z?SR?j4oOLIEv@fCXN@ac`%u@>BMsnRS$0>J)?v^`G{}ap(>BFofD?#cenq{U7)Axn z*j#5VZ?`j7SKPISMQOC-Dq=kfYekf31O8`;(O}m2uS6(8K`J&v)BeZs>(QYl#3Nuf z$~B=0njl3>AE(*HPgyDBXKoH^c%PV@NI_1jh4J%@&8Q>1$U@ysu5N|#>;ZM`|3g&1 zDhkR_?)rUI{$`C-WbAFvNRJp&1~c2ZlA94Pd&GVz{;;~@zQpQT`kDKzS%YMTv6ewa zPPTfEAtp+o+=jxK_N3+V;GsA{CBz@h2W`y-$tTkC#cx^Jih~REf71c4KIxG{R39I! zM}(%dyZo&}h)7Va;~Lk{jW3HgQhTK25&r=7C5FuMrSI2jak@-S??I!h#hU0$`OySI zSORlv8N_nFQtAyE>8PZVr31pxi#ASrXL(X{Bbd_$)CyrsKfqZS!O#Dpk;;|OM7%MIRh<}X z9c=xbILzUG%NiP&#j&Pj9amM?oVHTsP}JQ7eony=Wuw!>O+g40ZPdj{_0OFV!cOFV zqYlVwmHS_Jw3jD7Rf4THT)lGl>pK@Kt>&L(<}(5h8=dOdk)FGDM_T7b5+8aO!@x3z zSF^5zwST|*8M^XiID8btgYmfQ0!DIiI>okpkiS!$dw%-P%U{tv^+m4A0zuE7nL*bR zf}gyg<_bV}?)`P}Qo`a3jKhUBzDMFXa`m)po9G{$J=`@_qOMa=tMTO^L7Wxi!SPag zBC7hi6#PC5p`w%#uu4yTI;C+xjZmEcZYGxkC%5+7+e)G1)haNZUZ&vS^|b1tE0_Y4&Gl1(*Y7vi{`KjkxBeqcKFt=gR8#S z8C8sE$dzw6#%F78%^eThWBrPsq4(gH?zh4Vp;F{x*pM^DK*Y8u4cG1SDgLg(K%*Y3 zHR8AUf!M<1mrv_bqE(uPoO>5|0m(&K*38ibKA3pK9y`_kxy#tYU^5p1m~yL%vwbgF zQ~Ve!(r4DDuN8R&`*6}cqat8qFdCWVPteHh`R>7kL9Ut6;=}VCdH=Go;Rnwjs_DSU z$oX=WC+d?*{ji676q&Z_X4AA~)D59XP!=bdamgHQC3RtpTYH~6H9h__tiT_3yIl$g zDuN)>Kva_H(?t|&pX0e*tN$@ecd5(Xc$F(u%4@`LlyUou`Z)SD!D4UzRCWz@0nxmT zV=j}l#ozEA+bKWq=WqXLya;CR0ZwWu7Q$!$JWu*ZP?P{r(FC$hsJZtufyqnm300y( zmX~c(StLwdWUR$_wI(sdT&Ur3EY#)0SAM{cyk1}a?fsGMg`$EP1qT5Q-Gyp609gi& ze6cD(um=CzcI!#Q9%GXg43&Lur9z`G+7pAg9iiqoS-Vd4I|*~o9Nl@tKccPa0{M46 z4OZ~`QM#Iqnl{&XF9!uAlaB!HSbf|Ks-yp*IYXeWDe6fEIds8?@0>64F_Z~2x%e*| z%4qJID8g_cT7ckx?`NIfMW?mxLAo`bDZo02%{aJ0ppmk0 zO7F2tAwqU8t?gbGc++$!oO5{zl_$!TRnlKtQVlD-C8q4Q8H?!A;$*YJ4u@Xjl`<(D zxrtF1eo8odKZdpFv4+JqRk)zS_mkKx!-Z5q9R(>^QG7=xSz*@sF*P+s+=VXl&il5= zDLF_uH2;`8_q*&ki(vQATX>u^a+%I}k*;5V6)Ety8u(baX z8?Tj`a^_;3_ZaUQXlB8&W_S7zt%k4e?b?1aHm^IiBu&-WATVLsSngwF`m49CO?oO_ z2xF_`@0-aiAuSsD3Rm(O1Z)neo7P9Ulnbf&g){p7CgaOEWNyZM>vqg(%Z3?_O8#{3 zL}$XtO*lb$?VzPBMsAQy8J1y95*Sn`BT@G79fF#dracFfR8iPETk)es8Z6r?@#GoL+;xtt=ux^t6ZB}|!6_sYGp}jdt;75Mnrj9Uv zbM!n4V|M{iO2zc$1Zqd6%u|B|id(g|7ePYOD~L!ej@RFISV&b$wA&~8ngFO|Ku)oy zcNT3_J;OY_cNItn3TQqI_(X$6hD8}zBS|;}uk85#j*ziVU|hLEJOf@&O$8cPQuE@c z`x86mXT)98heiJbNtCS|gXX@&l`=oEqGGP?i3x;F+*m|ayA&N0AA7_C1ft^w3Rv4s zxjDWFlfM(&o&W##8M(Cl%J!zl&pMd*0Y-KRwa>3NMqrTLS613aR$YxyK6Pc;ItI95 zO*=bAqS9mc%+jkP5V$+u2a`)yqT$(`GqY8e65jA5)Tw|hb zxguYtx4~lj+`jv$d1uI^Er+&=ynw9(`y>8IwBWu$spgFuM<1t8>t|O1NeP`r?{JooD?sX?%Dyw3`32) zfMJ>R$0ZIgILifm3am?`*v+jG4N3pSQRYb*hk3pZ=sGwRv^GQ21;8PI5)PBO+2`-V zvKB1&$nqxe5Ti#*?{lo%xvJl+w96C(JN|kjQBTw`9I4xONq5>vEdTdg$cQ(wT(r(Z z*qQX!nG5^j*f0bMTo;ZC3f-)Yd_3{U*ynVwu16X;1tqJG?{T<7aO}fe53~62QvHgw zDUjFp1Q}#}YfJ)W=C1wGnlW;t4+0KAfemqdsY{S>7oSuHz1NUFMu5z~lr6vf&Isph zBG`S8AN6Mf%7ck%ef)9x@RP69yCx_hesKzF;WX<;vE2NwZX~YbzKewztv<~@5wxJj$uoEa%RQHjx<*Yy%syiBQ$3y6*`&!{i&dLx1ck4 zzZ)v@m}^TZcAzy9d%Vg=;J_8lxfQ!uox^ zVz@D{aL*{D-GVCcp`ZD2$H+AJK>WhDv`hw}eJS`G%%GUvyEBWgcM6};#h*=bVKg>y z=lXIHvWh@xo_s?Cr^Qv-xe8*f`L5{ga)b)R+E@rf0)8gHS&luNYL~8v`+iMZO+S11_QWzZUJ z%SdJsJtG)+7JKhatb>5uM((n2qQwQy@R>r$u@FCAn6^G|j1zmId+T&bMypnDSJY7| z&jcu+7_(*cPBZN+BKnu@I!Uw#Y{zWi%3lkR8D&X6DSL@x9l;netB&Vp5(8>7sjR6= zuMQzM{Cg9Y1wP|6#v4=t(G_xTIA2qYcONUq!YHI?_QGZ3#q<3kjQmfXM-BKU@vm~> zxeJ=X)R1hL#2@KgBMc&7Eq0}@kJHV0rNGtLeP8p~8sTN`b}c{koOXM7mStCBGyNS^4N#wns-x@eZ3yV>wwQQrhU#Oy?zZg?rGb~BrQJ(P1&rL;TzfP!BmCjbgyg9%ZEl9}$F6suN~%Ey zTl*!;ru}!os9=6NX5x~zB)6FwXDpJ>vTxSN|9~Z{crUFJoeE_xJ1TI>%nN7*^0e;8-TU6-u9C5%hq$$ouNa#Qioh z<>vfFZVGJL6{B8mgYnfVb7gXPeJ)U0I+kQ9nHvlxZ{v8U*N!iam}oVHg;S44&04+s zR{U1%fMo|RQ*n{;QePG^JT>T@fSiRJ<@=h?4PKZm31#7r95xc5_85rUYh~qToKRmQ z<8U1QK1!CfPksZ}4r_geuqHQC?0%;~&Qtn<4Ta~uQuEcH&bFE;i6L{o7z6UF6K721 zNhWG0LNzYItRBn3n{qtz!wDQW7i6m&Bf=BLH?0lNxZ;PtUqI2{0fXb-8|?k|-50tc znqJjR&SO~my!U_(`!QuT04Bp)QlwM+aCluk9TPvA8@i$b^9T)0S6(M*9%Ke~bVfTT ze^n1PgBs8l&e9osQ>B5K>-u6T0-%@HbL^l?-Alvu?likv1Q`uV5-z+o%C2QV3>PEc@I_AS@C&;#^yNxCvr!^$)aivVYQc4RTn`>iB@w8QZA9 zo2fjmH&l>=kq4g_q|N(?u9^O*PbGb>AbD$o8FD`}A1c`V&HC?m@ArFTq=))fQ}y9f zyB3f<6w!tkccBV84$jY7SBU+nC5jHC@7JOqtPYw?Z{ldAj9-E>`pZd zMzOp(?rF{$BM3EZR__9Ccg%4YM!h8SBpD8s%3P6lX?SRXk_n_k;kctkIe)m?=aHs=ZBe>4+&)2*eDoi5wHE>;$iD;nv zIJy%?On|eaRP}&A45`tJ0_=rhPNKxy<^?f5)!4#hI(wLARc4c+F|Pi{ zp>s9Ojwq6`Nd(?$ZiHyqNW|xbUo{)L{@Hsu`+H-Sia2(JG#6W3Cu!7tJ@gSM6;5~Q z2ELwd7cWeJ#<%o0WnvX<>(7SxnSn7)PZ}LS@SX7`RK&b|6|X8urUIMw!Sj*freH5c z+R}9Xl7UjMVlP5R8B}bBP0-X)ROp7QDB*cO-fi9H=m5vl5(_Cs+K6%}diO4X6s;A) zT-#7j+RNO#604~AS^Vz`AcQ?h zJ;)6L;R1Dl49JSS(8I68R3l!Hct4MuPvV~gfZH`-BZKzVc0?2q)#nv5lw`WrlQe+z zcmB~}Mwb!wHvn-p9tHPdm3C~>!Gh4W`;vap_>{ECm1lg+NK-R3R9+0OeJ3mCi@ZdN zviLe~ciWB{BbHdw7v3plhhyT68n}oy^j=G=S3mjM_Eq#UUkm$9;7N;apXQGLenhb{ z+;Wo08O?SiNgY&(s2yXv$Z*wElbcJ%9GsBwfW!R@N~mN}jd`79GTrOWRH15Pb2Z3L zg3uazPxdz1UjQ`h$<+w|4=NHb9A4i}m!cl~#9G5!$XTP;``L*-98-4d1J)ZV^S&5D z0Q}n9wDy<>C<}ttMuGnsE*i=~Gy%tBR@oeaV<&cIBUy9VyezpxiI( zQ7q&jfvUSRc@v)EbKw!=OP~(HHdJnD`GXnzK=5`JJ!(}a0M_Jgcv>lAKy8mxDR4Lj z<{=n{B)EqIy-DdTt+)LHwaP3n1->H~16HGQGy=X;f4#KrlP6;>&2%iR3M|?_DWHOf z+L<;RaHb}6P2UmggWymobimcRZ7elvf>%Q;NAvtVjPSA$5)Ih6qN`jod~hewW@13X z;?8`#CYXIMH1iQd)=KZlu-s{~us#SDMd-uKS(d{tmBT~Q$R_ApqJ{EmbIoO&sS}qI^=30n68INoC;JC;!jbsC;oZw zEf_pRB9@)>1i;$&<=!?s_c2@FKQB@AoRWJgNC4qk4GUW4k{lhhy{Nb*SHoGYP^+{6 zmRSqu&8+TVL5H9FG12ix;y_{IY^(gt@Z+pGp#bc$0ZY(CsFgm#HubfcGN$y!uA!pF8fGUWF49(W0^cI9J+LA=s2VR|&8s8L2gH*Nh zf1Q86EPVN1wB)7NX<`Ho1+#QcbbWJ&=mr7%B7J50-OJA!grnkglcz=&#LM>K#ony1 z7TaLxcFwdF*=~;fnrf9#nJYs9e5aMiU^_T6oImISh1-xd>v8vZY=>CZP)4FJGnsH} zZ34#3g&r{#{BI*gF4@Op*#TPk3dTy!*@aJjDZRr@VGBlF<+a)22hwXlM~g@`^BLYAx*>-(e7N63|;fZ5~v#T#s3^6B+7F3K;-&T&1_C&VIpg!wpIA zi@7H2-Cs$z%T&Wd&FJ|aKVu(Sq35sp!Q8eSFm{#3*9eyQ7h%UakF zvtrK&PXp+^y18EPvAtJ;hVuo2g;tnd4Lhn_<2=u(B)LRY)2~^sal}GBfl1ec=nuRk z;pOdCvI2Deac#}JaX8+e7?tQ#Pp8e{hV6_0o;)+f8ob*%$sEB@Ku%A2r##T3BU_ot zU;o6N!`-1SirB}f6wJ(eN4eOVzOF{#yK40TaUxwBP)VtFDqBpiRJoWM^; zR|z}r#!1k;)jTO654$-=b&o53iJqX}LcOeiqg~Fr8WJ-%dg*yih-~uQZ<7%akup#n z#hM9bt4ntBvF;~_lAX510uKK3v1SOK#OK|;g%8J8^FJQ{`S!{ShFjxUw({M)KR3v% zM^(xTJLyINMas7R?Q!I1d-zn+eFzTnwJyZOhOZhr`&J#v4QnNeuZf}itm?&fF& zUsP*24fs17x{Y-H48{9p`SI&0Iwl9?E@x*%-FN>r?h4g}jK5Ox%)vS5=tO?A%i%T) zGhRp0w%qbz9bO{*$r)5d=j!f$hh~rn;g=4UJ~;(_1A$GP0q|Bl#+FG#=#L*cVp1vbCmSL_cuS)l4(t!RJVB<)ju3m>2HLO)7$OS`yJVTx)XD^Sp+onu2l26SlUcVkwDO|V zH`P_3|EMFi40^0nN7nE|WEYv?Y_K0I#6&R>KHPf{(NFgiyt?&j2@KyS7!^xqmO?hc z0cx>>S|}|p%~8c!umh-E4#xyye)6L3TZ)<@9Vj1Jqv#7)yZ%Z3&Q;t>0YpC7a-nG{-}P%SY{|!;Y|DOM=!!V8UwL6SPsO3ZXn}Qp|Ff>F&pI*F%#Z? zl#7F6Pij2ar=lmrj;)u%yPutxD_5RXK(kRncTf!m{w=;=fR5Sc^EsA&g|T}%kWP{J z?PCH8tbZK@lV^tJPkKu)LiisFSuz!+MuwuIQgcbjxY9P52D7ai-S;uw9GwwSN@Kk~ znj4hWq1_FciwWh}XR(h*_g5FK^R(%8{(b?3*yO%{N_V|?ElOTTXELm-1OF3=;?VCO zQTn7lhW6kt>UGHH97rs~RD9bVHq0$u!A(T8Ds#P`B?zKmW9xc)7U}9Wgd4?qUKQc* z!64E;G)A-MG#wW1ot7@23-fk(lXqkunE)=}(ZcP5ledzgc3c11Q3qdo_|w6)Cg2Yo z3eX~tiO19V%L7h|abruv)F9CG<Kq+BP%L5rY|0U}oW$eOpO-frQbuqY zPQ_re2z^0_$R>&0EG65Y<1eju5{<%wA9!$^puii#K(r8jptip7`7In9z5}6r@AmfZ z!8&QdxBMggk?g<)#N=gA261bq8dQ16z(oBU06?9tz{5mH25-hN%M$FJ5l$mB45ROQ zo=Zp|y~TMkTZ#y5b#;=N=39A@uZ6Da#0OYBmU$sejury!7+DDtYOPs&>DM2JlvWkT zFSQO2)P>R@r}l6m+8Vkfe|*z+VNOQ_A^JfQ)=0vmnuh*${xD>F)jcDxK1i zH7e$KUPrTdf^HHoH#>WCj;k5k`*@~e$|O%^PyjJ<^k8Cw-V@#6_XhVAkC#4FITZQw zmL_^s6#v*A41lk*D_=hVV^Y=BBqLu`i((FXEt&?vPi8#gXjeeA(EZVMQ6QRz5r zaLg_74|7B%;T1f_C;I}Hu)Yn@y%UM(%su?cYdP8pDW;L{UI)LZhyuZ(xnfw>jJ@6~ zUuS*#$GajRGotXBH^?$fwOyx{G*$n>C@ne{8De6Ht+_l>c{YlBW~VcH3yExac_+o({gZ z266M>kF=MVnWdw8<@;ZOW2X#}IPVZSpPIRs`(cJwDIhSNHvX z5*1j@xz8r0ne8L3jbvXAz%%P#mq7X(+7+en^aOO)t~u6FmT}Fa=HGRg?Q)mk14JC@ zLxqXaB&)aJ(>s9XrKRoqFuC^)1@TXS`m@GwHbcWOJ0l!u7czBkiH|ebO@7wyd<*?8 z@MDyqm;s!3*-!M9vD zsaS+k;;-PcK@8*2-^KAiO%`uPBOJty`IRhV*fQ$CDXF<3sXEFkWj=!QE{!is!UP6Q zg6I{eB98a)V}P81uSAFG0oR<4%!P-8g*f+@gHvD!{G31Ud>ci(KA)nwQvQBb{I7d_ zLsMo2FeULNTroSMr7NZCm6(KIdm$y-o@N$wl%z2aEO?y#sK}lRgxj46t>kZ3c5?Oi z6dZFHc4bAD#}9a+l=U%JXOY+rY-!i;8lSL_g7HEHy`tW{(BP+R<*Lt@apRJ_?cY0i zQmPnKjgKzf0#k-*!`tpw&AYoA1{}P5nlC_#fqGf51aitg!l(zqI(IZSo|zVirx65v zz?eW|fvQm)c}-=XNTyhNWEPIASPvnh$`E-{JGzu+{Ek`);qLmPJZ~Wc3HPMgPLUa6d~9`n#yAWD5Hce@?Ug+82EqN(TjQ0%yheU&_ZK?j771r7 z`~qD~3iCKB%id`8aFkzV0x9zabA14su3g$O3e!lWC8XR~eu6(!#*_7@npHiFbg9!42# zNIJ|ryx`h0Dj$>c^>ZJpiU+aoT=~hmO6rJ-`;Wl;rkhPZ12)8G+TLB@g?e1&X8%c0 zJtJX19Y5+!YUT`>g8#SragR(NKb0)OA*%I)L_tfffXY=&s90*>AeJ|>}-^2)w!yf~S!r5p)R%bV@e+)8lb>uAqD z-X{0U2;`iAt7=%S)gq`xxs1U|)n)R%xSL~ZbSte-f4KQkbQAbV!na>0AEuw5g?^(8 z6KwNigfB$I1gQJ%{%4KAkUTGtI(SfG4wT7h6RRotQ(vRI((aC9KsE9lj(=y~kp2~e zeV);;P4Ti+X>7-1WXP79VaMDv&{WO+eK1gln!IkZR*G9~;2c!i+BXm5s(ct_mfk7A z3-cQaZNsu_9B}&2OuH4e3HvQnP{R5_-X^#_>G~{3wyDO}hmxn12`d@pkfWJc^pAd_48kLGK~5L6zwOIVhr*0qp?~fU`ODX6jVF`h`QY{&^p~>@76H z>@e$`!Wzp1o_Oy!{Xcp>hi*Si5`MsU{$RV)d7j(+l=|jtUf8AmQ{EhV9;|zdtt3ev zc#9^(ay$_Z3SU(f#@@&ksd}_V|EdN_)SH<|%A_R>q~qIW_D4MDbMNff|Jrs8ssMG< zS)@;ou-0_?CzJyWP1s-bkKOO*J)Mc*FZp`8A(6bWYjNVz!80P#IwZBPM7$1Y zK=MBDwqJU&<{Pu{vkKQ(B~uZ)6!Hbx0BQUxu;;vx8c8)PSE5yo$e1H27a_@f?Kch( zy9RB^o}?ww%x)_=uV4?}!{}Bc^a#!2GXbx0BIC&5FyC8#!|^AGOo&}@?oS(X-VEGC z_TV3`Cq*s2c0VLDADCVKHRsXm%kkKf!>m>NpKH#g+VJ=QgnIUYl3>&XOmyS%X~z6l z^KqG|Z~$&(8&94h*dKaZd39m03on3Il3I{8gRzoMt(tpnbD!1=Kf);siua{dL-vN* zf{jz4o0i>}g*}NN*w#!}s)GcfZykPQ`MCy&L97Q+Ems;#WKMGMiiImvK^o8nO@H(% zNcch3s!eSbR;q84RH{=AeeT@8vE5h06LoS@ac)rq;Ou;~8t1t!m2EWqBJhG8!bZ@U z?_EOom>q3;q4k>pIz?x5B8vD(B$+{r`m!2*N0B&qqnmoKwofA{b3AT@2_TRmQm`>Y z&Q7s==PHqc#=5o;zy*tm-X6Q_um&tOAtQa1Nh!EBI&A>-DtpXjS|0|T^rS(im|_As ze2V?>Eixnu5F(=LLhMSFVBf6%NsJjI%^;6^p9vfzu`5N&r;3Cgs6bn%jqILBAM(EM zVLDL$HAgkKzn-;MbHE-Bug-DS`9jWM~z$h^S;GxoyK}8SoF6W*7S($EdKi5kTB?`2} zmokdh>^E0Ri=t`xb%xJnZz1D%;>r6JJG-ShDk8FMf9H$ur9tAi>rrS$)R`;;H$P zj>CMegdE)Ka%6Oru100lozeZ;2xd6$ywIXhNr9kyGFabJ>qTt=DaBEI3;c zro9e(bsZfyVGSQf(FsRAHoH2MaP?&FL5C~U)XId}|5eiex!4wMao0m15ePchMent) z$7$gx^Ha(=`KTI;c2hbaGE7+Q1{{v)bk;(zUti zShG0EtJ53M7g#br`zH4F8pJaDur+sy6=@$^)Bh+NQbdQ$TQ%|J&&_&l3A>E_0vb!Y zk=9uFV4vs#UhkdSfOF@e0X`R@iRjND-W>s%X!&Q0Re{1PnY~}N!#-@hdD>Rb?xfVvLG2_jjt4Si4&AR zv1kX7n;cLgY3|*ylbQ@8OI!1qn5=Vax1iGQUKnx>8`9@PvZ z9wDo`Kjm7@NJnAhGvdq>^i^5TIxX(iT{Rr?1Z0X6N$Y2BLgb zU}e`d+t@9YH2+L?6AS^}4NT5FiFOIyI)7}lKa2TFw3q&^hHDk@#if-Gr`xCcypu|2 zbY9Mh2>%KKedf-+g}$VYyySSsDvMN`^1iwKcw{G;c>T|Apzng|@O)f$B!T^bhA$z4zC^ z{!7y~1-VEg#n=4#UCX@_?{oY1;P9;phUxyKHg-)Bv+|(Rg4YiO9*&A(bmF%DFhk~B z@~+aDj<8Jc_&YK$j1(saSgndWPi=y@zT3pGWL{Tq=#tK!mDC7nzPu@e^;{=NXI*J$ z4u176cckmcs(RbFXv1v(qIhld#d4&!HKHtDXVO?>@s7r7gRVmDd!=G}YkJ6G2m|L7 zHrCTjd?(h$t?1R4G6sC;c^>`iJuj~%X~=85RlMw2l4~lW6w3L zmlxh(w6S32vSQVmpkL749<9?(tRTMnkaisFl@s4h-|oS*hOwr9-tK4@*m+;*tr7Yg zI9nc*$AtD-k6i+Wo$Y2duAyR9+oOjVI{Bgcn>v8G`{LTI@z!}X5=B>7xR({nu!;!< zdq|fuCYhE9CPeAvtou`7%Q!)4U-#v;j z;X`X_pBU??iC9=ebM1I)rXb_po*Gh$SToU7V7W*Xl;Xk*a;DwmqoMNjm74K$Iid#| z)}JW%N+36>2i`SHX#*l=jeC_eC$0SQ1QenY3UoKmomDcwaaiq={C9}J@&ERQz{+D4 znaq;Op__>Rj0{z-@f57#`ZNSoV2`!hGXFkZxd5~DMpR8cDa7xH`QqJ8-gQ!}UO$JN z->*^%&}t=g>-L+?Xy(#=hx8S8i$K9;`|DsV#TsbEr=pTjLF^0Ft=Vk{bNC4Sypkdi zAG0h5oaDF(eCoAo3I-u(XbTv;xv$k0*sD9L{&KWckQ|E0WdE=3E!3ITqHy9rHYlS$iL;@Z;J%TRm<(3 zB&(8-X!^s{GFhT5;)2^)l7W&Yae}@_#QxSyWxOvOPi56hwxx!QWcI%d^>9_CVu9!V z?3v9}flChxo+^Z}C(TbQmh*P2$;>T$E@JYDeb`Om9HHlJe*c4hC!7^O zDYMl10*`*gFQTrc$@-g*Df!ONKBzcD3a)DO@UKh=?ouuJclDA2%d^irfs8Z=`~=P7 z^@F>BCera{RMpbU0r<$V83 zk9+L=d^$bv6GzgSQ2R!;ja;zOF!w{0cHai_R-oJvGLEy zj>&>BwZ>kAwCEw3sWM(Hl=myIry}u&?oXVXnd}%r17n1}w|1s--}u%Aiy4+~ zjWyQDQAh*l=ev=^ci$qMzz`D>0HvrZYtS%~S;WGtH28iwvY5DsV;t~6Z1tk|F5(oz zyJxK7jSR7*?}2IzLL!*YsZcaC8IX+w47(Wz7?5OOC9!39=tOSW#>51%{)7{dT16h; zV|sT}jVOyeLdu{E#9WwNNk{puSwp-fY#i*Pc}l)U(ss8m;7EknQhG?|F*l<(vI+aE zW#2AQc$4h?hXR*vg#Fxj}XT^0W2CPfNDVshu=N@bj~{7}WfP$1?q zk||(mrE^EdA|8h>PFMUJ0K@HXZa4iDShJPAr>ZnS+yoBgX@*6JvSad`mHE>)bdb3S zDh7iWPNzGedi4_V5N9-H#}fQj*HdVJ{@4w{cydBY?gYTg&k~f1h4p_4UA@_RTlxsM zMww=J=Wy}}g6PpQvX^X8v zn$~1&?|Le6DGSHBz&KG}t<^WUD!_NX!oUE_sOu54BLyGmSB@jVtG|6)ED7pWW%2O` z_&oX-er~s=zkW7L^p-wNJC0_}!tqz+*v?2T6xfSWU%OVW8?fZj#Q{GppI9_2V4)u2 zRaz=weWe!Oz#a%;i64pDi454|S8a1@%-a~GQUqvU;+DyGslahPvYi$`S*L~VaEw@4 zy&(u-S9sM$52o0wW{V`=GxqbX9SR>p=Z6(3d&RfsqpN?v$QpVr1gNhJvm5-gsWYNH zi*0Cp|IVXsfX0xD1Q`9Um0I`KP}!7(QUU<)FXY&~+p=>xna z! zNI%MPzwy?8K`n=Qgmya{OTPn-++f{xpIlW|RrTJ@o@2KE(ecAbi!1AV^ zd->k=x29IFN9ov2(Z z&e3300uiplGdFOPtKn;~*(Z42!Q0wTDB?33+8p}<@Kx_|DTPe-Xs+Lh%0ma01-*k1 z2SE4C;YVX1W}0O({X|u~ig)NJRq7o)-#eEu29cTHrEHftF6FVBSP~pxrlwu}@-kjY z8OK{FNoR~Ar45;QJ9&t$AK+&v&tPrdV|pO{nWk#o#x~Q) zOb!L-Y{dwf(yKy@wJ(Nh%wzQXQeB~L#{WSkzUc2Ex(`SW=NZ#hsO)-C?)>1b+p?rh z(@Rvcj~X?(gOb`**8$i0z~enx=Y-Hp$vzY{aiU(4FL8pDx?P>bvv2gCRxMQQz`$98J=@>Cr<1t%H@EWpm* zxmq_|deJ0m)zw0}bK%4#DNsJa_VtqGZL+Hh7Tz_P;hS6VB&a;3c;(6OhXS6H`WRe7kB;2(BareM((e zcLB)`+eBt=Cb<2^7NhqZC+{r;hq>% zh_I0p+YA&tdC|u=!6)q{nrln}{8O5}L)Ug5YPHvB=)6^Up|zdn>O@iCCzGMKLY=Vq zfyn)8jnJW|GrdR&Z`vD?XMlL_`9H{sGwYwO&6qRmPT#Jby9t*4Eoc5`tOAgeB`CIZ z$lTD92Tj|i8hXC_ao)#0y-nY;a7-bo!KgL@y=PbQZYo;}X)Q-0ZJIiqK*F(-uqmqf z`|RPPN(?22!!q^y#t!^l+cWnXvs0;r`(Ruh&{r^$? zbMLkH?%rJcW|NgYt`RP>XH>F6$SAAQMP?WGUL#~&E3>jkQuba+%1m4&p-9sD{oda{ zK#xcFbMNQW(?vZlLu zI(vBvY%~Ra_^kKD>V_ykv5tY1KJ$JRG-7ZTe`yj+)Kd<)3QC)U5tgHLbAxJUFG6L( z1`88GUOj19tsg&27ck1+UJsWO8uiead^2@KVI{AbhseI#@vtj?DqTK$d6K=Ua-yC1 z^3@iIx?0-m9I%!+ku_5v*esoudtd*){!gC($eRd7mIPY!urxxQ5$+_q zhuZmovDI~^ye%`AM@HPHKCCmn`FHF4e^-$-aP~LxSYGjb7Is-YZ1~sP>*eghjdjJr z)MyT0m^=n)JHS+XC!JlCEbP%`>}`t(^byM>?O-`DjTb){Thtczq!Jq7EiEEE4R=dI z$3sh!fake3!S`)h460c z>6vJg^1fS@V3#jUy8?$mSJR834;>>$IN(NyhBswKP!z_w^Um`T0_9)3{AsPXMH^Po z9Cv{Ww$#D=LaK|ozys3sFFOw#8hQ!2f>wa_h($WU(Mm`9pIChs7lX1&k~f|=fOrrM zD~nm~h(^yjbQ&r-c5to-pek4@!nbIO*m;H!ge~1Hs87K#t};gNv}b!TBa4PQRW?kXsyuGOvdSm3DGu?vjiO@Q*v3tP+rs#? zP7>HwZAr1J7`~(X+RCUlpQbZqlqH3aG_k zQw|f`gox@W^#PPL%x(SY3dI`n&&5h47Ev#tp|s=pzl!bV&TfmE`KL2PuA~$faP1? zw0C!B3NcrVT3C8EF)fJWfr$>nWJB7IKy%x{f}t3GG13~$kRjO-Fe1pB8zm2$=L|+( zgixUBHxTzzm&= z|DNMf5G(&j*PY>psnA6a$_eu#yQ`)nns?i{7)gX+* zC>}!FKwRbQ=FR7YHf+-kls_)n5c<$lzSLiDr3AcuqIlxoBN6IgHs!q^W2)G+ z_3Jf;#3Gg$E)cA69)u^(#|<-k-PAdMZteZggAo>>)}HP?$F{6WaA!HReG$wc>?l!o z6J+pn7WjgzUkMz9m|aae!yO`g_cQctLzCDus6SQxgfsLa>nHOY$e}LCky!68s$(UR zprj~r(}{G2bPUoedzH%(!fZ~Py|ET4%Wn9t=mrXXT?m4FX}&3LnbwNsvQ44A>QIJs zHz-~b7^5o`F>Pb8=DN%3!XW0&5FZ3v&Is~_%eVtd1gY>u_^9k|p`h-~Yjh?_=0L4L8_a8n8oduJ@5 z3Md*iEJ$VNn#TOF;zeeO3FQ&hJ_K0PW_cTltI=?h-v9Nhj)Hlcthjw27Pdx|brRin z)6WGXGajYheD}&XVR|7EIr;I8C|*Xk!YMd~Cyt%>Rq#d~>S5e^D%7t^a%=YR9a(rK zlh|GPqSaBEVc9EuflY@NGJE`%x_d~V=T#BBzOxCw?T8V03Z z6g$@3Lx4LNdqU4g?Bwm!0W&gHN~ENaQ!8+NFcs$ zrxqxrtHLyo*68>C!} zuf60RuB(XJO3nF}x%;{DtmI|jM?9u5oK-cE$a1c~n)E;DTB=WR)r&WGi+o2FXk^x5 z->@_@lgygI_&=31PqyErya_E(RoMBH23@#aFMcWNcNtHPIgi%s$oJ*NVefcg{B+Xf zl9M&}{$Ckp#nv3nS6*KDMpi%8VTF{Y8z-HD7slG2g;;f8c^K|Yj-)Uhm`$J}>&*(# zVW|xvTob-K@-erz7Vsi(&N$-xhS3SU!Czr}rvf1?fn!1$6gz3`E>!y+cJ_ar3=_Wb zFuex!d#H}LNgG0A(?v(pr+m){hkEJ9p5#Ln@ej`I46Qc8oDdJN)#z40KmIj~blyWx#W&0a%YNNo^0(583}FP<7X$Bn*E zjwbAheJg)KHoC7Lq@S}&y4#54H`qy5GG@n{;#u^aah5%g)I6Ut~%}d z?zsMMsZg5x12}d{bRB`f2pQt2pVjtr1C_aW&e#71l+0H9yw;#FC|ix7hT- z4m966Z7g2&&x6rtE8Ug6o`K1O=vB#}S=hzp&8%^>XiWjROL^a-{fv2(<*+KAO#uMT zbi{Jhp`{72oBJ7Ij$DpzjBz<)PeJ%|kjwTW&)MJOLsJ%6}j4 zC&k>6#KQhgw5h!1cCo7OyrT&wiN*`~v0hyln-Y2$78_X`XYws}rOf_=Bw2&uCiU@u zy{|Qet=#5+u8TJQWmT*N>_NZFoX@@5`3p+zvpXM2%@8NK8Ttul@SwOP>-xXC7ao zJ1_;vQ`2#c<23$@N+zhshS{?v?NOMO?h?X*28BL%qm{6kmC)DyyYunqVddYjHY=wWWL1Vk#n~??-@Oj7bFg$F~7)SaWgSlz;tPohvm{ z-95&$hXS+a|4KGOaD}()oOj7Tt86;xqMto1_5|{FRqo&BSHK8v!7GC#9$p=sn|uu{ zo{ib9*Fsf^{Nb8}Zw@|cN{u7jK|>Cc;3l;YrIyD{tkZ|?%pcW`KdphtXCiL#yB>jt z#HbJ}(RQYFs#er8APGgC-NYpm{whMb&navkdxLxG&u8f}&}^Sa>LNu<}0; zZU{Rq|5oegujpyS8O%jfgg4QB^|;C?p_G2?3=T|r#p6tLxJf0Tn7iPd-@6x33yvu8 zDEx_2(2vjb>bxj-dW`2Y%Vwy+lVP!gn*C=$_(aKd!#3)~7x2Q~hVKsv$M)=+g_pap zKY>g}9GUEQHMR+Fj89dMOqxRA3$on)(R7S$nN~5C4x!Br=0OU1OMCrgPf2okG+ zjBC8oDrnqU>_f&{k^#saNP%IZ12+mgJSY7dwMxl##(N6wJW$jZ?4@$TYBuUz@WM$> zUDc8oSW7yjjVqzK>e3@wsNXq35NQ!22#>#V0U9HF-u!kCrl?BsX+A8>lWsJFePg+GTwRS}*q#@bpALj~QqMeu)X91PzU96PrZK<&s z|Lo<#A^M#HvuOpGLDLa;s5&XfZByr|`XneHzi<6n$uC8!-L5@bKz$Y9-OmT)nited z6#cB_zzS*z{JSCR1)6R}ejDA$C0}?jTX3r~cZh5hDm4U>hRpm`!e(srJGP7cs-~%s zvBnioEGwsX@{*o6`_``Hoy2RYxxu8kh4as#0FNCM^9+`#>$3& z4kuWGB}v=j)pYf~jOF2yM9owy)p_dSX^S$JoBJRd`nTqTTn;4pF)>Ofw(ftBrZMZ7 z>gJNG#`+9i{(Q{@%sO-@-4MN5dArtQLb1)1)AZnNbf-1#BV(U8kL6wfl%nTUmu+@G zxBd+}LoPMaG)VL)H&Fd>PV8}3q0+B2J>zP}ZhVoURJYbIj;WPOM2?kd>W!ybMUz*J@wy?_kq(BFvKv2zv6AwJKLvpNgx|Gaah zPh^8#<<-<}zE^Fv5>>y>`-o%}6$0&MHgF+pqMPw_#!!CZ$8vMrx})qsz3LA-ypWfh zt7K3`+@IwTYq$QHOI;(kuvbtZw#4_eH07zeY3x{)u;7v9&yA~B`4f$o)YtG52`qkX z5=^>wN0qK|4_#y{#b_cZ%x6)=A%f@ZjPawuJnUHW{c8dVkmzu04M%&FYXMttS-zoM zvd;0G|Ds5op-lfcz&?lo-|?c}A<+Q0rwteY=d z+vq)u9Ac^LW)K>8-A6ba~tp=`cWbDPB@i`twx5 zVF>KI>cWfcpCErHJSaKVIqz`2Nnkh;y7Vy^bsNNc1}IllmLz=#86!vbty&f`8CprP zFO}{B8$Rdf>(57RyXzMs1^h(ibW%+jB|I8_P19VyxYT=@DEHsjzk(Z#i1PC;L}4h4b>uJ+|)*Ks1cs+4*_PJq~J0#VBM#P z$u4HUed(rzNL%?nQfeE~Px#!Yr?`ga7;0d#k*>dNX##&t)>9oPE17+4J;GSiCbzd_ z`l+9}B3?6qIx{A}IRb5gm#s}J?*f~0H;S5H_0lW+$%|{A!SfmvWfe#LljH zb6;D72hL^!(p7)oj~av|2;X_Ag`Q`Y>vY9?h1{X1>38O?M*Q|;R-kr zU)ZurM}${Tc$d# zUroAnPJh1Db3h1)#-XM_xg2eOrpq#B+F;{U5|g^v%9ENN%;h`2!yO}+Iu@}U;|;2V zjs-Z>iL;1Mf=>SpX*Zcw_iJb$y!^!JW^uY6HUj;uy;CF2j6;SyKZdSaz z+h35L5c1}0wW?X;xQIlOTQNAkE>HeiSo^l-R)*+}#fSX$Uu#4aQFqx3%XC6O0H@P< zUl`Yrt(UH4C;u>TrthJT19}}V3GXxw+$+@=%Hv7B^SWr|)y12&KY$()Ix{3fr#fUc zh!^jODtma-;z=9J=ML?xgX>xWmj?7%LN&evPqu=^*LHrqwL{Zi>7~V& zW2fC$O(%+$rfChYpIR94o&J`&6mPv$efv{c#4tfnr;8P`?;<^8Svxa~g>)K2tDHQ}G3N{mwktJw22=OrtLIGZoSumTTT#z7jTU{|i7J zS}to~vS@`!4IH#h$gR&bZ#OtKl@@z zQ=s+oc5gN!XhO^A@M6S_tmdfI?`x#@Xz6tc08eB!VIEsYb<^DXWfvuOn`>zucusYc zRzngXvf_oSoouKqp`cml(IQf&XkI0a++84V@?TGt_Ny#9>(uDOd|g`}nX6%wTEPGC z$Z}#l5*WYFyK}ah5C~TjAO*VZKVro5!e~OlZ$Mr?p{7O2ixoIZjfmZ7Uw&!K6QLv{7W1xfO8H{>#naCnVDYr+3O=Z8~O^mw73R( zzBny*=^+ZXZty_w6q|qB2_FaDGt0LwRm9IDKmymX$4xRw_wd?E;b}SoOYuVjH$vIj@Z&gA5~*dtfhKk!yuxw&Y7t5(*1%14eao#)m6YZxqBeL z9;b8OHy*DPo>qwfH3i86a5y8gg@feiI6V@wrb%5CLP8)#d?sV^404^73T*QU5@5cU7`_T}xrK9JYKOG9A*wqXiT5c-W?i1$;;$ ztBX)ns}1tN72_FlRPw+eg^6Qn&x0N;Men#i;x>k_dZy9=(f2-#Pnu?6T~bERXgOAT&b z+#@?!PNU8+qUxwJ(9FzFp29{J@C3o{uo8yltSJrcx4xF2D&%@DgLswk3l`Fl+YIKB zdA(MK1-MHXDz)VpMFRh8El0U(cobaDM%r?)E#^-l3D{xoj$w^yIQP zoWI4UjXJc0Mg6nDF%>9!C@@^~=VXeNby+?RFS>Ucx^3F$N5M&&7`ptHNt%?KRS%?9TC(^k_1TqNrbgr5 zm!L`pGCdFh{%4+un%>n%eqQJM;BcVn)h2e>ZT=ae@M_o9_vNumw@Ws`bYa#4`8ZzbBz(*IQ$v1f>H{;Jk0)SpJ-j2 zPIs_9TQiJ!_RDAtXx*b%ao#NRIR)J#8 z-W@acBS@!2_h;$4Zf~Esnx_QJA$de@XS}fbI!$+efO5fTu}9=4V@J|sI{)W?3(usv z{prvE=cq=}nVY|#p#nGw-*zocKen4EIElDxSo{K|XpeIgv=ee^({+}ki_OYtM#63u zxfzX8xw@W%Evt^D8qu3+k2SvSU6=PsN47nAkM8ZSeCXYqrWTJ(8PP=TV}St^7E(n` z>&8Lfmt|0jh`ZBy14FwKYJcsC^))UIw1Rc`S5p9@lpX#UB%IE*-SUHlO>w1puhNvK zw><#v96e2EQBSulCE~(B3R&z5z;gDLwthUuPXP}dONUqb%r(!pbW5pJ=P@qH-m%Q0 z|3Tq+iTg4FWfE6uGC%O<=X?qBE*?KrlY%@&eBqxa#uc?fHsT)V_WE^SQF*+gLg5ba%GxVk7i&yz!W^`2@GS+Be+Wspe=gbQ7KdRf-J3aR_T&L!XUA%c|% ztVu>0V|@v-mBK~gZ|^GkdwZgM~L&9U#ry3TAZ12d*MlmSq9@4C!^f(at}|V z0t3KqXtK@1+$D!Nc8ua3HT3$-&M`p|cj;kf2oI^t z5v63&i~c4#QqsHH5y18lM7gKwp1{93>?lwpS>}W*WwMs$#Bz^uw;z>i54)0bd3n45 z-||k2bg$YHfnlrQ-#L$^FGQ3112$jSHaGG#^gD8Wd)vkwMo*ioAe;#n{pvvK8egPJfbH%+&Eq>sAIlC8jyVwr*gW4KHv7`|*ZE|vLiIGxvFGTB&BA1rG~rZyQpRzOpOfz) z%$;E!lv5xR`iYuG8?iyoMZA7H;iH@CBe!gh7eShZhQWD|Jx`pO3#%ukv28P;GIx+? z$hTk`IeQB{`;l^FW|aG2zd(=q@pgW#+`v%5IK~lkr)K?3y(BDnz{+9uE2}2GevnA^ z!!KJu8^&+!V40)Pq@@9JsPUVOHRR5PHyh>`jH-rlu}Iyn0q8=N@<7XhSvADDvrxS( zyI;MyB#Ng5E@*94|M>)8qC{C#MaqX|=x1h3R{zbG7jXwal#`yFR45-^{#t~_N1;q7 z`fb(Yh1WeI5@TcF?P3PSN0J>$S8a-L``ZprKX~21WE{r9kYK43=;4@SI=GtFR#~BX zPnuTg3tR0m{J;%Jf{w#9p_iiQPOWm>Oa32}VC97voe>kLV`u%NZB6s^sVsw~Ww+kN zXEJ*8a+_Wdp^WprH6kuxN%^?m37)I(JGmq!Q30vmvS*>t1*;Ymtkum^Y+rBHYli{E z$urOX$Pr<(Nf#~=C0f>PU$DgAG>qwLk`o6xfgJ-_=f_iNR#z#3I=bk*oW zQ+Nc224bdCLm|ocz(XrkV=4s8;bGE&L(?Kp!Un^(EoUR{R*PNjeY`oef#tpBH1v?+ z-g6idC7w$snu2klP2W2KP#jf46>aKAyv}E|#SF^}|M!zUQ{Wup>}|T)jm`+Oxj!6` zBs0wswGPtc8}aPK??+R-V#5}$p<#q;Es$ubv>@Ff;jZMui`A`SGbqgfC)tA`P}8&0 zUNq2$SX$m*2f1AZY@1dDV(`tj1zDjJC%pav{9z8ZZ=AjaRcVsL247Vwh}{A16<)2w(Wo@f{NN&PFu?T1P0Pk&_wHHaxn=t&JBe0Quj>XGo=e}Woo zc%f!s7(QUT01JLWiVK;FhR%0m1Vfg>|46+LQ2J9|wADU!#UJuYI=j)^7o>pG@#A%X zFuX(bA3gw!=EoB2j~!H^di7NcL!xE@0?X>*K_D%jyy1ST+9vRG0J4D0!#ZQrm$#X! zVlO@Co~j;zbWcSw^B`^nFoBCUmR*XgjBxRF@jo3Rxwq422XHYnGZu84+7WO(x^+0EqmhZg5}j$4mU_|RMQa~$ zdUJIqy;o=#`0IZ=e7o{rBPUb>j%fjD**nHEs2>b znCMe0?0Z?zfY7xmg_kYX3l0BXc}adOI6mAvD+WoSN#j$=PFJV_37VT~(q@HqZ0g+< z8s&O%VSK^&?rOr`+@jmHh9Mg)tUcM42wO{2y}HSv&caUL0$ul;S0WE#SZ0N*UKVP~ zgaW)ZC&!(Jo^F(0p(;nm;!zKUf$J*Y+E9*m;yLB2oMkE@?uGj?4~(Z%Bqg{m;s{*8 zXCuT--p59$Doz2%+n0mgZyAroG6(%k_A@syG+R5M8{0HV*Jq~3DOh8(^u?$tYlkV5XOa&fG;FN zQ~ZLki}`+aLKi2>nKoLtj0IgD{VxMd7wrQBb9XWh^QR+oYY5*BG(Lf>-?5Ta!SrFXse> z4P;9NEo*pRORj=O&&Xzu@nSd~JNnui6&R)=MU>yglGUk5ix?Z+ei?joOeq*&<>3*D zKq-r=Yf$y-G{mdV=`MCnjOTS5ok0nn8h=Egz0??F`p@RLUdwL01OQs5VI(XocT}4E z6{>LuyODBaa{KI0uauHPLH3=A9scYlDrW1CkElWF^qXb!V+!M+>DaKGuM3ba_Y zTjn8>UExjr(~j7_`0EU@g*g0P_AdmoaW9VHn=W#4Ova=0TLtwx{tTt#UzciD>HE$e z12$UIGXu{e)JG+?qK%Lu*uH~z7C(0oDIWPS7)THEVLUd;KQ}MT|1NuTSQ~yiAbAZp z$@$Wws3-|6VH)lY^>VN+eZB2ylfAc5kOvRIpxJy8xUzIZxZ7E-^BD@!zfPypt8x!* z1})r+C8&({#mHiRqww72OVk*?P;%@U@6ssm<89KWF?{ayp|vHo+{C=XD? zs}5r4@D%Gh_AzBXEt3lK&Hha0(2s8ENnMbU)0bn>OXJpmo{<*HT>x4jpnI$27zA&X z7_!d{t|<;#HuuO2m^f+xy5V}-ot+e<(24?L5yA5|l+gaEHWQ31;k>4vq~g6`|B4rz z*AHAb4wc~3H1BA1hDC4zkb>6t>WpJcHlUM1PgFdj47hla8;Q|xebIfvqYikQ1c{#s z(3O$)gL11|S3miZczDw+-)5xWF8U60mmW-NU3Yled%^!vl2(%CEDTMmp8ZI>cxDLo zMfzd>#BD{E?&49rgyTxE8(hb?cd<7Ff1O857ek$!WfPBy9cuY15c4c zwdjvfw!3~VOyxG>3JidYGxHb^8j#kt7$Hro)1rGcVGNfi4Y#pown_>5?Z}Fe8iPencf<;IEw{NvSh`e0;p@Bj-N=dMr;(x_yv#)#g@Kh2~8oU#< zCo|paZ8h(-VCoubxnL62i$z%w-f&}6;iW5~AiUD}38|xvD|F~9g>ke%Yn`@TaJJW+ zBR(?~@(JFO#?r@0q2X*RbYS%vDx|=&HGPdRll%A}myspv7-S|Unp9H6Ken*Tv={av zYu<67JL_B);3Z(K;kT6W-A9r1+=2xZSPTwbZH6V#;dx6BcIQ2Z=*0^1>uS%>zZ~5}8e>KoOrNP&G3wa|~>>QrD^>7(6UK&En`@$?p+CCay zuzU0C=R}7N>EiB=w+jnxVA%;{KVFCMoUt`#)cmp2!yASC`}CVWD^I)37Zbqc`+1+( zKXrUXa?Gx9%%jj;p64GA?D9s4tY!6~7z^oLCU7L@sBe6bSC#6m((bgT3m|+QZ4^~L z*PrXN`UTPAi{oOB@+49@M?g&UhDP=tZ&PzZFnb%1$t{`)+deH`;>TDqF&>z};!sen zNT{8T{ZUP;Q>l5exOo(>m%0X80cbpa`db`x5Bc>oHrrx4=o@N|nFi&kLYk+LLgUtS zIoiK{KGh8)g$%6?X+G6pgztOwfeo;XEL=4}>El87(T>$^h7J8Z3ky2)xM&~{u%=41 zxeMM?jR_a*j-E{wRNkm14{5!hh6C1C_LU$d%f0Iwj9s06XE4%1i$(VVHoz^pQT)4z zwR}VbCf?qDOuBLU{$$r|P_%hq{;E zD8?9T`QTAZo`PQS5yywNI6hmu}}QrTv?MdAdnm5?RC?nnWcu8c3H>?xBw!tV`(5 z&b-fsTcQP*n4q-v;E8n8PxSuwvL0h-wGtH=EJ8?eIm2@R*B<19A@YDbhJEV{NeTt= zNo(4X47kFM@vGJd`Gz0C1z@orGLwt5rNp-+A~>;3Ws+j6-+LsGkDramux>?%hHZmg zZ#ILjNBodS^shIWVW+lRzGXat6!6|Ydu8=IHrx*vU`!XFvrMr#>HXOszibm^juUxU zj;Pk~LC;ATCiZpvgt}Q-Xq`z2a&mtQ%Y@v6kqiY*Vx+dSHQuX|W?BB-goYo|gnb|| zIfcHG)Fy_IihN-Y3PN4UF~=H}G#UcP^=~{7?fTUopWRP>$W>k^*^m)AEX0sp!l=96 za8y}S0_9NA+;jLdCj~_z?`J-ya%K{?;ur9I-Z$jn1b1*vI6r^7HH55q07}U>95MEn zK7R>jpUYFC-Vfj?Sg)FZT$CPq# zXB>8QE8~fAzp9z47AKYqOQyH`fD&xp@Ik|oZ1<-TiOW3>Rx$-M_|qE>OdpN)zk_PQ zQ*!Y`nD2Yawix~~dh|7Lf0yUOY)q}_DIhSZflWTEqF8J7N!<26mT!~EjttW-_D}QV z6e!<7-3y%2An95h^z&m- z)RN!!?|CxkAc&RA<4&^nqze-axsyi+V^a`qAo=4STev9U&%WtjrF-3s8Ex9Xw@nDA zFv31m8pdWjF8=gsrXfvu?6gx^yY$SURIP{`*{(FYAKaK~9>o_vOMzAufRj%_6R0Yz zVg%}8{Dt({JcP2a%-=85G{o)){PHl3De>}`G+D+Z13aCMr6i;vH}EQJ(uVl8JG@FZ z$5m@z++== z2OlIJYg*Gx?`vJO;Lv;>YJtO~5WHgSA%jl6yY0n_4-aY@b+?gxl65Bmz)r2ikcYaY z$fxcoaCbsEe1Hj}xw{msh-Hl|au7EUvWHx9u1>~Vfik5Mo*psC;GvJ;2^vgWJvXu?RG@Uo>xAvg)_bO>fvG93F{lh{ zbzssNYxd(?iTw9tFy(I#&yEyoSJWxv`p^NWtG(5cgY4){46=IDVm5y4DB?u3zM+=@ zU0ZRLCDk=j`cric=YNG=7an+} zoKb#PA{CuUp5LM?bFOI2E4NyaDaev|HiJJ$OcCKvS`J*-F}{voSZ2Jal3+GWP5c6* zu&Q&y7>pgPB@k6~iQSI|Y)lB#g)8}eWYhb{Ojig~j)OEdqjken`A&+1*jO`MK z{@X?5aj9M+Oww(Akvu^_@0&j{4=RS%Tzn6*^QcQ+*jZ!(un)bH( z($Y1FHEBSTJh$I*yV1oeA7+8RSdgq zSwZ?wK#XV*$bT^Gv?mqVe?TXFAIErQdDQu6_%rC3NJZ(3M2gW1`1PC`ll}5RTsak@`R9El1@^aS2N`vFxwGDh22dsjHVtews^ahRmE+b0SAWgg4qtM3x;uBF5My|F z_N6bvc=5P3{yM#_n$*8+`PZV&0ZI@6DnlinP{9`g|7N?U9p=D6?Jg?7+a5jd z*i}ZT=_{WuDV%sFA<`KoQlXN^qsIysZPF}No9An-r92o$mO`7M-^p!86~O~n(5I_! zvqVYr6jtUSc%b9{Ul-9rq}DY`%7Rvk+F>&Ct1oN@h+90~(xm=&v6r;welgDSKY#3f z0^X+TWbY+<%WTLHQ$`SsSAn*yKT{Nz+R?|Q7gn$|#ktpXUrfVAh9!if)0$=49o2>O ztSTTJf7_@V0hVdRVo{R*U*M%l{^uk+>Lgi5s-^=SAG-u-qL}`N3~UN7xc^k&d8JHS z?t?_3C=3hs%>Gta+%Vgn%yrhZr{|s!{Qgdf4kQ+!2f{HwclJ{$)Zg=S2=7E{t z3=BredbvE0vl=_99i+NybI~$wB(5&^0DoeQY|cUfJM*3P9^`7=yY*@mMm1bIsTQ_p z%(2)C9=bQ!ESDuKf>-~{cuZkBmOR$HNx~&~k9i5gD%!4*7Zhgp?0VFxBrIF|9V5<| zLp?q0bqQ;1<6la3D4UoiQzfLFp>L-FoI7I76xI{?U9Lt)U;Zig=E}PQ6Ei?)q(tfy7eS zEL?A};On@D^CnTb{=^k9%7m_F;RjD>mC}mdqQe57`{tj!RNKz^whAt_=7PxjnCq!h z?}|gGNrs7tCI1QnR06p_gVu*ev5Pq}vv!1X&IUE=cL_SYfZ)jTY+)U}K@?rnS5GhisUehL-Mb+Jf*<2##fz&`1xHxca?d{JKvfIFSz!qMbkPZ7(2!R z(@n&4Aumc+5CgkW-cuVEzqK>aHp(j6^=LxU8aRh^yXhI6A#m!1;0;!Sv?@6bu$hsV)@nz^D$}JE)T^t zwgkWT(@chwRpVl=0Dl>aXToE5<<5<*>c`LWxJ5jS()E}BE5uG5IQDV2eQUo6#ebqY8h<5W`m#~WZ`2{gFWc-%*dSX3;FwFp;)1Q7MThwG zAkyc7n-I!ANn$Y>4H6gw!`t{Z{4~+w6b1*^AjXrXcNJVpvH$7gcS7kt8p1$^rob5XyF6?F z5;b)|J@*QPX6GidhJUY16fFT|ai6WdM?)yhQhZrvsNGBT4PfPwucdi27GxvUcaAFB zEH4@`sCP?rL0t7@eWLol*>0ATT+ym0@B^~??@R10hZhA~ZCmEd_j*4jsDklJJBE77 zeVh#$eophmkfxLS4H8tDDrpBLn73}MuHJh5kj=@XU>y(i;+r8SP4vRAEGuJeEuBZl zyFv!Feo)}z1I;1aBO}`1aHrRtmWRf=!P#5#cP0-RH(HS-@Y~+3(+0KZk#1E@VY)B- z1{I#2roXQ7%kHB-4aL-QEwY=!mWi`>7C7}vuh8q#LgQ4cs&Lb4Cio@Y9n%{rQlEM% z<;kAjj+v%;!d19>XS~zKYOXT2Yj6Ks|Z1g_L~`FdprG}oh?)#0*$jNBB#hWe-6zCOpKeGy!4^h`&EdI6MRL*Z zsouSR22Hx%rr~k$Kj}`oiKQZ?1@*QI(8?II8 zX_}vZ4^)@LSC?%4j^P(9BkeiZKITI?iy@6KFn9GdYcV&pV;jTk{26w!9Q&qM##pWd zj|gD@+_t?I+#P^E04Jh$ii7s!n_+}>=}7oV7+p*aQ#_Xa&{J&Rv;D{8CC$Hi@7e&U zS?Y$IuG_BA*|}mU(~2=?9vbcu2KQ6r^n ziRbx7=|1Y&BQo8fcUp0`Rcw?9tr7}+o;VY5Xu=9*6R&c~-nEq;7RhTW`htPoT5$j} zk$n{8Imtxzx^3$=c3MkVPYZn@3HxC&VCn{(3_B*8+>604(Z zy*m|slHXz4R6Qplu7H`sfEK!Tj>k~5U3=ZGE2p&$-Yu#s-442 z4uAO6ziHF_RCQhboEJ{my}U4~3LF?JPHGH!7^xwPFoUO;sq~|!x>r2F3h1g2o~oez z>sIx%QaV0wkto`x8NAG&4qvq<4gEy=k}A=VOk#yeXcSm}*8FrLt3X1z>{sVEHLy1=Zb;jj}|Uf;%&dqo{5nO(4y`VN6a9!$q+^|^ zU+K^K@?L{cx9~|tz$!5sVCc+ES6>R+c+pyo6~qr+d}Q5CX1K&1-u3$33=`lovO7K6&K;jJuSoVB+#kfFB2eR4a#nDvKA4>pPg@2vWz|g zm_tXrNI)5hY=Yt$aT#HFQAH1K%`?}k25DX5`A2%w!FjI%Cw@|3U(x-JV$Xs9`ETXB zW|PO$OeyXvD!yovIl7L9f$L1MmNwnl0;QmRmEBLh2dNoRrd?K0^wx zb8i4zMxj0{J649f24p;xAcVv5!pY4uH)Oust&j=~jJpSrWDSsZ4-z}` z2J!69Vf;#Up18=9vyL1gxl8O_M?BC^QQx3T&J-YI+gy<(Ruh&`Tl?W zKEvKKvYlg(tjvRhj6-qkS;-0^qpYHX5;~48>EI|MMD~b^jAN5jva+R&LL}<*yS~4F zphtDix$oQ)b1H}%Ru0sFY987IzMXs|y}`Q}=R{cg|`gkR?s&{=|oGuche zp9j)YPadAprEf_`Y9@A}Vt}1bVtt3r+&c^cMDd9B&YLELZN5!ZAcg%Bsyxc! z{HV$mWUu4g&?gNybJbdn8l~5*ELk8*(mDc{CZgXp^w80UWvb7LuTtY-l}uz{3Qdq} zn3b+e+4SJlzAaQN^|}FD@c=>-H6unyM>q%ioIso&QmH5%X6`(szrwv zqd1{CVjiBCl$4dw{|66usa2sScJcF}Fz;*Z_!DT+nRFm?%M%prEtZnF@K>hp?tFLj zHcImwP*f{B3^>lR(l4ux{BvpS`N1|c25x|l3i8^EZnOIRL~{N@76MckED?iCB22Ss z{mcH_Rev!Q)}^_S0>#@l#n)YLpFz+*845C1$kmNnBS@G?-;WOsGj}2u8>5;dM<8){ z$F}AeKAJJx`)9gS!fwtEmSbJ&x+uqAzjIw|EdT5H52SX^vH~!1+p~Oi<7tMVFXoW_ zQKY=;ctLmm`9w`02fE(`0J7K7q}y2}(M>#^+l%=}1L>Kd>uXFR2edRxM!A2ehYM%J z&|WpmMGplzESb6ME51*HmCqt~Bs%aL*nj*7DX|bjz^WNaX%GIMKD$0q2<>UGR~PLt zLbYN2-}XPO?q!DGVm%p0xW-PrTTH`SYEKRt8n$>UMD(Sknhd+y>UB#% z{i0t#TD^WXnh;esuPQ*$@S@onbAb%v+pVBrS-Zaa)uvFKs9v!g*#c88_q2D-iF0mJfebtbHWL<*T)TM_#Fi zR>;ixkAewthtb>qP_$q{>F=#V8s;{Zf?@0mX%eDl*aIrq84;I;fMHZl5`rn-7%~=+ z&wS?2$C_#ryS3^g%2ili3c)?1ocfXLHKDOhI+{u1$`VDHugt)J>G0I!9pa5X8DHv$(%|i;PP-mlqFzrYKUC$ zv__Ue|E9QVXiY1?rAc8~a^AiWC795tPaDb0lB;N;pWrhzR&{No>bnrks?@#GgTPK+ zULLi+P|%SU?P2~PvyI{X8eqxA_u;z|WNhCz+4>CWMe(nz!F-$p-!0v6ql$J>J-_1` z-v*EE-eJPS)P!T6$`fc<={2`fED-Ak60a8+|ANcQ0oRi`iR`=lJTR%_$(J`@Qgv6V z3%Zgn17OSegh_*#zFKOhuCo1F$Nj$+I?eC*==(xrK#KVBP0+%++cNiyo8Mfg)*wEF z_#Y$-HPG~DNq@hOTGjk3<4JP#nLiHc14B9Vx!so!QN<{3{yZA@_V&|I^IoymL19uAiF-LUgf^4IA6T=xwG z#y!a3Xq-kz?n#TWP{sZLm2evO9*y#KlBVfGJc*k+uaB79}{^8#Z^aOLvmN+6~|_z{IpyGY?f zbMtH<>XqKLn{5p~skQyFmk)fQFKaFWiEB=i_nAhlf|y3QOq7IL!T?$w`h4eB@I>4$ zwMen_cr-$Kdg^~JBk$OtR=wAaZ~JQ>Dqla4B=DUJYMZvc%-_96#xo$(hxs^&`j~M< zv4E#MH1{E8GToO6JLaa_svJ8Fbl4n z6Pfqii0}`?30fOP{y^MwLA<^NW+r3wmi}`3?F5~q1w73R$#H1bKzSP((Q6zi`D@oJ z3-!-A3!ueyau*Yr17I(gt9diV1jP1K1xR*TN8^a?G7h&ZKUZ7mrXtR=7Ku+cMAh_q zi6Th!fM{NGGS_^1HuF3B$D76i9-}@_PZhY{e9&u|ekO zwMw_nv;Z5=^mTGH$RK5iqiIzSxo9# zY2WSK!-GGbB>RQ|0PFWZqEnR*pic4v;6$9D^0^7_(Y2{oAwSOL!O$pbF@^5@VxEP_T#&$Y)!>S^)!e^d z5T;)|l$)6&zlmEz*w*ZE#EofV$WUS1dKxFLFskQ*lSBl#Vs%wa)nME{k)I!3_HXN( z&^#+OSSKh{&M(`o`9~ONJ+E#av}kBzTq{Kn*C-P!emAOL4A+Gv%$|mxkD=e`xq9|W z+9CvSJTZ!?lfv#|&4Nlo1{CI*?@_TsuQ4i#(}QMre>lw?296@<=j?pKq_o+M!y? z`Gfg=Mruy%d?Tz_?nwz6vS8KANgR+E$gqh0!=PdE<;$goaoU1yWOIvbw}5gJNkxty zJzPH<6frehiAR3%+Y93KL&`qpP#}T5tw8i#V_gfQis_Gx`NCZ}Lo>07^Iq#jJVLG4r?K$emr;e#*7jReEMh8iZQp^S?7a&uC3t{pNoXpqK+x8F2 zx=>b_kS9foyqbtNg5!vthXE@0gq%dc#!+<7x%+=>xzR;%SVY#EjZR!hjT`4C3Uk|M z@wKY&ozW8L5`Mi8W9oCk2r4^h3Aa^N@^y(4({0P5C=3RMm7AJwnbCHfgMB7@q5V`* zM#$r6gCiBic~9#?Co$o#U)H(Z^1S|jdeM~|;sg;-=|`pMvX7HIr`Zl0I;)h-Uikdf zgqCvvJK^0JeFXL~0fN&-p9|csH-GSNry8<-&Gn_fnoI7jb}w1uKj>0bUuFF>(wCuM zdtmlMQ-6hBS@6PbuZP6e4+LQ9<4a}gG zz+T8P5|alXXiWVK(y%2+KAgz6{sC!iJbZfS)U@m7&s;AN)vNsFywye9Ug=xRjJ2`!t75l<9jub*`?XGd zVVx{g-!9!#{Jl!edJdxLQH)^1e0?Atkj8_d{qkN>Jd-kG2!(~Q`kiJOx-HdLw$3S8#f0vgf-shspis~%s8rVf zoL;CQK3IDko_IZn@K3R|6iwdcfAw;WAgJdck!ZnubK|^3|Hbu;uhOti(P>DLnhf*Keu!5f6Vb~G-*LgeTeH&IJ3&{Eb}`Q@D{iJ#{!DVM zP|}zYKq@@WmYe$Z$J*b{yx3H@yc%-#krPu~hnS1ZQ|5rtnU1mT#232P6TUmEv^vUe zANe$cz{!z23Z#75%sC>bemeM@ z^4?%{CcS%4C-`1vQx9*2xe9DMs`6DH@OVLe&BJm-*MBY)6p>wL9Fe{-2#2%@cd8A5 z$(9fORfmti)1~-hk_D!xv&t`cd|As|C>vaDq zUrkYF4}$#0<*$dOUb0NAtkEBv%ZqaQlvfWZ%odSLz6A$axqSwuspEu{&*@oe5L?jFN6!wm^uGwW|y7uIL!KKWB7AP=(zNORxq;=*w7E()qcwtF&)SxC ztR95}A7-{yuD)WlM)d_G11HNa@g@e3kTN^jb(g5uIjB(0GirG;9P`Q-?z^l~F-=P+ z3jQf}C6}eW1;H9D6DQ)~ZNwgu&?n1wwp3O~#CP))__-gNAs>vcPATd7YkocmIx^D* z?!zHvYhoNhh30IK_$UM&qTR>gQJr<&h;IfPw(^O_NW}#!L+}lnHe~&*w|?bgcJsS# zLE8W>=yK+4>WI{BM0L%BB zdk%H!etS`{GAii?MP@)?05;2cl;y1&&uX=3-M7sZH@TjGMbvkcWSa7=l}zz9w%1xQ zfdK*j`KHONAP*h+y_F~-k8dRfLKy_K;TTN%@Z`7)N&)(c=2HppdXhm#WGCA;_tL$IWb*|x#|M~u>s*~X?z#IIj5*I z=0kUVFqOvlq_PSx*k-N<>Ic8UJdd;dG+SOxE4abOEm(ZLf2i z!oaQDFKk<-6*{zVcE(8p`s2GuD`UCE>U272Muy8`XA9VLX9% zNr=ify8Xs{tku5J+upHroe#WOX=Y-wOr8nI)BYZ-Jmg`j?&42g4A$?U2OAFTjVTmb zMEMaZ`j(wT`q3`NhPpb$KqqTSKLW`Zj&{Wd%3Fq=Y$Se!{cB$J(7g{h^MhK8KH^I< z3qe(d*+-kxIAqGU4m+5gxSO`jt-koO#H>RMyNm&#Ly zUC&FhWo@?{$>^?xVI`KqqNEySg>XcdnJ)(En5ImiB6ZYEWAKeY4n zOv3e|C4=eim_tp0QUw7-t$ywSM^*BfIdjNlIG2cW@NtKDU^xnlr?`0hQ2kce3t;TP z|9qaeD+;>PncsAk-qq}sFRi3IZHzAZrZFjL+foTF{^+pLwy2 zmxv6?-lqu$A^)6}SV})i?jjxaWTS#qSyJ@SxzidIpwv+sNX&psd+Hspa>j zw(mbIO!BJprFhF)T>;_bZe;3B?J`==F+IvhyS=fiVS%0u?$vZ6o3k z@grX&{ZkHaE6s2t4xJU+zW@a?mfMHpzF6S-M_owfxO~q{$=w6?6eXTWZm9b7lNvK& zYNo7KVvn1NlBEV7;c(XY3p&VP+mUGFVk)npj6=xwcy$@#BncX2J^nbpv9lTIREeJY z`t6p{VO}cV8i8{*&`MhGIEr;@_LtFD-)2vt!(6?9r*Kn{ZVz81G~%E&rA`M6lS^~n zEx;jLWAYxD0i#ADt{|uA7yi!PSf=B#xtf>wS(xeue|j{I$-qVcqnPb2Gwgq}l{&sOtpRxCXjadP6ZO~{zq1EI`~rwyVqi~amTrmN@o zILegz!-Eze_4t@)Fpn_7{;#Ha)q89^toz5GXa{nv6F*zP3i}AMoln%Fc)|s@@RpYT zPD|G=YTKB6Uh~YQ?l;EZJax0^LEZi zhEDg*X{eCDC;g#4M_q8~{n%fa6q<>Xp*>b;8xU&Uj#3h&UM~|*r+WHXvOy{A=9VTuIkHOqyX6I_>17#4pH0Jd( zt2wI6Z9=g# z+^%GxCCKV9lhZ9Yw(@fl1d^x{Sg@2R&ud6}5H>I)=SFfhB_%olKV!_v!` zn%9-*S$+k}iVqGB$$HFvp$$zF^nuHE#(%b8ZD-hkp-IyVA%={w?4M9iw%lqbA?@i73GsVI-`CYvQFZdAuj!USg2gmqj6l9opgCi+-N|)$;(YqH%NG16; zU9POkSa^T2>SQ4ehxie3R5>o;H^>@1y|1|99yq%8>lxFdQhDXtm{~mHp>%6dXk@Pi z>dq6Xp~Q%dA_FYL?vtQCt7>)in3+ehm&p0h#W!Ni?aAwid@tGaclW4QHn5Br^R^J- zR>C;}boTRYh*ldIg(k(&qHNdV%nPFns=N2{IH?qHb=TOQ@9}KLev`%x_1T#ehftd! zS#f0;R;VH0g0e+!;}bB^PJx`x@Qd#je1eF|E`=ADOx_Y8z5(FvTfdw9SX$W(Eqd~_ zu!9J8rl#rJS~Ce7){XU5*KI<`%w#-ltv%6=IUzI5{zFGQcK#M^INH0N5o%FMZu}i2 z>h7J|w|!JC*LJ>8#Ch^$uoW{-dnRY??K9`RpwSPSv@iUARw~(W7}nqf!M5)&?zEr5 zydkAe*X6nMTNfwyS@oW$p(HIe=u@KUtz?b((jSGtoIgKN&UWuLMMtQ?%EMr-^*+f4 zqFz5I>X%dO4BjC3S+KlUyD|`=;RKpXxf02CSD!J)mJwvT^&hG$PcHMFu=-mpJu&k@ zGIN0({5eJjZb7fYP!-(`(u0-TBf}&d2sgzOPXE-DrL3^WI;`d0`>o=d{^h25vq=ny z{^SVY2J58wSpLf1ih6iWR*YHG282y~zGmcbt%onfjoUd&8p}Oob_zlPT#p6k!NUi& zv^dpPwihlP6eg{;-*t?em}kUmN)qIHVbag1zIXCmAzhK}j|7Lxjw_lD#Q3P?l$;NoljY~=3aA(m zx;@)2?-58vi(UCETQ=Z*bR?8SHHR|Lv!za6FhUFqI;aPkg?epuWrDyp?32t#=037& zvI@RizuD-%yAXJG$UlAKqke zJ!G&d0Gy`Q*)JS#doB+lT1(u$vtPa#9wI;+SgvW4cV^wb*0p5TN0oyW+?70S|BKSe zHXC@$zrqQ6l=|I?;;`Ojw>O5g%_6qK`KNU3CGxmuJAaa2L_^i+w6zZ2E~mds)F&zV zeS7K?v^K$`XlDQYk>2`*(A- zi=)Rp4s;xS{#otc4ZN3YKNPSvH8EXJRSP+1bdGKbw1gY_#R}DAU3abuH?baxUsqF{ zSI!0TBZQ7quIAMN!M*%*!SBT3k;iR7lgx(pwNVd6hov1Fo1_~D3U8IP9HyZ``pNgp z;Ju-OO@g{s>23xA5&o|rl7i&G1kmLb|D~D>-KHKjuiO;%AaI|vP3}LE4ih}0H91*Oswma4_WsChH zS7-S|9wQ(;Tt%k>&cKnh@YJKsb-;z&nbRtpU=Hd$FQq{m&s4wj$) zCp0ZpFG+useX!~!#Q0+F#Q)z@@jPDL-qVIm?5cpeGj}~NXJtripc5s<{oU8*+N14+ z^y;`S0NqBnKsZ9Ez}mjvFZU{WCq$U+b}wX9uI%j9bf=?I&~yC5vw{o5b^k$pP{yV& z^zy3Nfl(Z~Nc>iGe&3tMh}IO{?aMc@S|BhPHYPN47N z(QS`;@@}(C$A=hf8iM8NDEYi<){}z~46`-mvj{(``({R=v;60(zujHLo9?hn$`C8nGGL6TFpe*<3 zKdA8?3N0$&1Yq9wjA69})tztx!&BJlpu3);!t~E$3_qGEa_VPCP?#P!u!H@=vp!wzi3gFfJy!}0G^8>k0g{L>v^#6fWp=>F zQ`3GLT-=(fgE%c>9y-dQTe}p%=C9}DvAufSUfic6d-X&oFP#s~Em~u60{y>m0S+80 zQ)`3^-N0Fig?>DjhVvSs6bI~i71c>5*eBens}F*0rPFVJv(k^FuvpBFiD4O#%RG-U z9_kDb3%98+@&^uJ{#C4>Zol=#*IH$pWiNB#+N*)DJ^J#Ra9`T3rVenlo`7b#nqaXx7)+s!7$DuDI4M2o&1!2=t)( zV$SR9AvS&eXFj+Qq8ARiq2rHV#B~ld-QJhaDxxyNf!`$v)09pes@wbVAy(~Jq!|8+(XGy?XQwYrw`^Vm>_S?Y^L z*}|<9+Vb&aDqjkVj00P`8gmm5E;CaKq;p({j0d zM}wnE$MW=o?F1ou@R7}~(BLW3ohah?BOe4a9hlN|w1~1iozC&7(pSv*nhL1fIvM0#?IT zA-IIb;C5q(kug(A`KzVO#KO6{t)2JN1P<(%w}}d$3T2(VY<|=0;UfWA?&40tJy%Sq z6}OkMvJ_NE&kc(e#WZVupA`Op+~l=V;Ddv73IAzXE*Z}@5)88hlzrV-_R=~)J~Dbs ztDkQtE5++tZIv(s(74nVV^@BTouv;PY4Dql24Lcgi6?@z0btknGEF1*?Xng^zZ5cW zH(W=C|Zt^wj+S^fwAK|Y1wI*x_uE&+6#pIiYo(*(O=m5MZ2@wQPf3>yP5Wz_-i z>)DH{aU17tzG~3<1hs=gM=Off-YZBsk#7gjcbgLHfeUa4RPCuKnU)ZFDu(=YdY5>E zLi?pRog?lu@#l1K3@wE4vwE$f6A_Yy7FS=mxR*(onXu zE~_8q60H=9h6I2b_VIsuF^wwXrsOmv{m045_P?mOHNAT{Q#^%6YhkVUk%&jG^;obkeAq_TB(vp%supO2Mbc;YoI$!CUw_UQV+TX2FcYS0I()tdc7`NQ-0a zIix$u_u}OFBCRjy4gnNI3!dkmBBeX4OXRPAMZIA#9r}O$2N%0!(d$YTK3ct&;bvWV zCyx#Iz>e#yq$-l+79SI=FPm$CcDRPhY9oRCgRr|?Q(iQ!ZY95i+0=7V2(bb<)>JBo zV5S$l6780YD;thJ_PVH}pdjntkKIS5!ai2{>Ov)gcsPxh^=W#?Jbdp|8>7$U0-e zSYPY*VNr65LsLTR5B+&W8wIJHlZw?>)=AX~@sWOI#or4#9CIDT(RA`_QCmtcx!zHn z4yJHA`cfDVo`1RO?XBnK6>Ms4zj`&6i-Qm)OKS!e@{OzTOw1>tU=_d2^2Udd5z=Hmd#kQg$CKr5?f{|nq|t}89M5}33`C+5D+7v8)b@B+JdvQy=4oR+QbAOCc2Lxg~BP%erTf zKw1HOF8Rkc^4cp`bxePrcEf=ou&1Hk(n1K9x&A`g?rghC|GkHpWggZdEz{k&Ier`G zx=sp{hQH5alin=v<_;G1Xlu(~@r7;f3a@gw!Do4amYF&D|%WY@hC$i*a&L#+!NBse( zH}kGFDg9e?BKOXEb)Ww}remoCNL0ftlM!^Q{nNOL+gZYuE#{UViBHomn?pD1RcK_= zlxMu6Hm#zI%Z>g$KY!$Ixd|?qi4?C>+fH76oj11fXW}_%8m(I-eBmkQj)y}f84Ctu zj89EDN1>I$cbz%-?7PoOk|4rz<1Oh!@hug}!c5td2A&fWX?VPnkAQ&YKfzzcSwmS{ zHmXs{WYPUY3fz&j$ZIE+6;ERZBe6U;9X9(;20-eJ1T3-pcW?}jJC!f8MJ3Q-V$YnY&#I^vbO z?~d+qSYe+dHV}Xi*14mtH2x%4b3My;erbMo6bMz3>Dplb&KR$gG+1L1zjDlY!32kD zCG-ThUQI6?l!mo5P@2=>k!iuyjq_X{_OKuIkm)Lx<1M=RwQTpw^*qh*Y3AM|gR>X= zX|3(H>_INNUB!u6kD9${G;4RBALW3%o66*!Ad)u@ztq#`lOb@Zk~$NTG2-BQ?q#Ot z3f7M6J&3pcWZuQpxz6&b)v=Bmv7M&`VPCZk}TMc`j^h zd@|lUQ&mMNup9OLf9#)9_9|xSQTp|>3e{te)IxqlA=N>R>XrO=4i&E5f!%mEtLU)~ zYyY1ns(b7+o#4(6b3Qf1gwl`~AebJVmB^P5yJ#d(C3^Xne;9Cu_VW3nS_Dz*7fisnTif^C0)mDXO1`oPVxrA5ih&nY<*3w)imo(tHz0z`GX%*TV%8#e?V8}3xGeB7NVFuA%`&t?x=i%DGwdZ3KY112=#Ktj z$m&86K@VRcN(*vUhK!oNa%=EVSbDU(>DhI-Mvxoh@bhv?VjbuZF@Kga7IeNu?M;}j zDrlvwdYd^YJhzC;D5T?2&o``L-0BX}qR=GMI^oj!5=e9Y%}>RqO7yt(>_aB;h<6=r zyO9Dad=tI=dk76NhOz%Ck#c{!W$h+(VQy3fFI_u#qi_(ypG3+lmd`V&@j>(Wp7}@rpqacJ8{P~d}KJ7WkBYe3k%w(}8IGmAH z4-if{dG)<>VNI8AK?+sBAWC^mkkUnzT^sKq?U6-DNHq+t7!r2rMhJO+=bMc&`9u=ww#!C z=jqgxe7I`E@5H0f=?(^Ti#$ow5m$vG;e@LAVBDA>Ki(T9BIW9dB7u2s1;F?j}qJ~@`q^c zaRUn5nt$-syUJhb`|gxN0%VdI6dhv5#Rx<=_tD|pu%HjS&)GEhF+NW+?%dw)NtjBD zhCKTj@RXuhCD~z)hlA`C>3OQA)C|Z@sq-Ckjfa=#*rhtRiPe3|hw5Rnq3SJ$bXp}o zoubU^AdY4~E>G+!m>c!8;HB^6>SF>NKf(yw{f0p|WrIDRF%gPopM- zirve+b$rV%xz5tkgjE%!J0{qzu+WJg$5i6Q9>IM&$}b8NyYf|jt(o6kj@2r)CoAe7 z0eZ1?4~M1>NflD;{5Sau9$62}Mu9-fMK1?$Q{u1nbQ$l|l_WZ}`?}c6ao9D8gau2V z40}aICiA&35w=*)t~FFoTETXN7NVup-)}FYQafEerVHQ@cZ`fEh~74(VMjMSm@bg5L~(C0mRHNP-6v3j(pvD?@Z_ovWV8=gaPq>!%Qq4 zDdsEunpsIw?M%0O!gt3eAE#k20+X~X9q`|Xyu9a02NqT_8(z#;A{IPfK{h5sQRsrM zdd-t>Z|mn~EEpEM7N5aek>{Q%!G98O_KM4smaO|m3FF8H^~qWDME%rrl5tL_9!s?` zVR_|q>2gvA&0nS)g`dqIs4K9`Jg)62l!p67ROg$@*mz}$$c!q<{FIBjVg|)qf)I;Y zgEarB@|gdG?;%n)5)Gcs=r2Ge2@7$YWUhyoLgvIju1!fJ4=&9#B zC-%2o-3EX|SP{$P%EoG~&}HI&85JcewnFziLImx?z zs)l^cQzc$xCXnG_oBmZ@Y9W;b#%C*6t06tf8;C{aB(L5Ukq2{FB0H<|x57k1GD|fMVq~ zJsgjZ0@$bNu%Ittq092H4O?z!F)p?OGXVnF2emxBhP)2Xy}RpJ_e)xnn69Q}GRBQvYJ{14vtw-C*X#DHzGEWPm#oJ$-E~A0p}ee_ z4Hvog&F9<&!=2+uB`blsyu(}-mJZS{8-r71JoTYfM;LhFm<@Yv8Z{O^DSLa$_S33p z7N9tab9&jp*l;VxsBI6budcQaL9~$GIi(*%4Z`YoUoZ>Y%IQaX8~y~7wAm0JdM_z= zV_g;tM(^R2nR?ckC<pH_OOV#C{r!wFp|vc8{47Jl%j>+66jYH#2i;x z7v4Wr7`}%S>CPZX! zmoPM1LZEuYvvTUP-?>#v=<`=UzuUZhYr;x;&IabeIE4>26K4I6{ZhJ}Tlj(2+1Yl) z;SZP~Bn>r3t~=Wgc@c|IP{xc}ty)OmyL1)H&E4Ovgm0VVJ+8>TSMfTWb2dr-SP{;SM{0u(M+ z?Ah;9LFtyO{@qnnLB}%Sp_!08dR8w64*{ge5^;O(kv)Bm_~%1Ij}aiT^lP3tg^jS|2oe7@~Vim7NB(EQ_S20`-}r$6ZRZa zj4&lf_nf0ENsZ9!%ejNLG?(jm^s@vO%XB7z`8-XhV(=^Q zADa2vd(zhYIbLa6dB*QQ2w^GdU}nSj(Ss|ipj?YW`|8ufYd3FV0+}dN#iYVdkmd(3Dl;w>6F=>V*T6eRvWKbk}+n- z?}_ENd6qN3wqzx*yueyMkrUBon1`Y;t5Pc{NSMUS=|KxFV%lPPf8muQ1HSL1f0NL_ z>x(6dj(lBI+Ultuj!Vw8kPtZC@JBcvKat>3{|{o!m}q?cfwiJk{wqPvlv(wC6J&iY zP3D7jBt~$(xI!##^I;vvAAU~@I>3OphiPl&AT5ujiQgr!!d1JI;yy$;7z)m@ydqb2 zCbSm%my&&M+3&&Q>udicV0j&VGyUQQZi};B>6MtoD!0b8TdOC`f(){6X@N)4C_@lc{a?*-&T)v(dqtsQ-cG^MPJ-?rqpS+Y3Z z2-Z{8&;{S0R3=1?{U$Mk=d<27i)o zf|_1NDMN^wR*ySNPi%aJTw^Z2PSaBcDZCGT%D2VDh1UQtZ*^zh3zu&NXF`32Zs5$rL6{IJTZ6vQXg}LGQ=KMQ#fX`9xiI=eU@8~)x@R!M z24Hbo6V&hO%hs^9l?9@=2S!09huAyd(bh>-n-~&~^h_8BeJey0DV)Y(UF2`H2HgXe zE+N(sEX*0Ed4?2huwmZ*0Gc|K0;`1QR88+AYDkFtJfAv85HfO00Cd=KJ`0*WMG zm4Y_RHU(*0umDvNS#`!8750RvAf5j15bDUZgp2pi*lf6XL+hct=1wl zdnXGMz?0}%X!Py!Pp?=SGkey-Sv9738tuUVu?5(<9*!CbmltF3G^OXRcRba7rMiL3 zb(E#<+*~3E^aIabevUN`^*%GC7qS}NBxq0CDtC5{T?C;;K5jJHQ~uUcp~20WA;D-5 zg5;D(V-Bhh(Auv12PhRa+(02c3u&MWr^WjS6$ng|V|$Lv1%ilz_5pAsQE_v5SiP>r>?vBTJT>0bRH zdY!tg$^7<>td@oSgxf8w8Wb5?S-0S9iQdmO2ULJ)h z{A+K=5-xo77Tfi82ksbhui`dStO)`Ak;8b}4EJ}5^tsGcN5iSpUNrQn8VnO4mc4}> zl39s6GI&RvD;W)j1 zZdg}7LSL*`#0{YF$QIrf4%yfCA4p0pj=nkAbW;MGER_#XlI7Ve5#n`STjcla))HO5 z+0C2ELejAJ-D0~o(mgH+>veI-m}N9hI0;0D{1mywt8sy!V3qH_&r=LrFIBkjCq zIw6JrpQL`jz3*6ND22?NL)QXMr!x?q=WBOxY1VqCuYttfTXnw#@ida0T`aY>YcfK7 z`(;C`_1EzRm-7Hhno&Fx|D7MHXb}#7EYyhyu35ocstZ)dyyR5S1!cV-x4)LUBWh{p zxtku60yT+iwumL-_|W!}aZzGF)}LhbLX0CE5@R}%h>wq$DKk}fk(mZ`;GjO&LHV*C z@-{R4*rR6f+(FF%35L*{gyo9qI5<)#g)ityKAOhyWi+02gn+Z*K|4;&i&;5yszPDf zX(n``SjYV5d!euJHFCQV)@qPJuCRNe%q(|a!oKsUjrs%h0=BCe++)9I*q>14OOvHd zVxGYz5o5AbS4i12S9$lcQT|$hIwqBQ%C=n-Iw);Pk2k;w&u7Er>rBSQz7{8cH*WXP zU1xksjLwG=4c!D6_HZk7QXyid-XKhZOnTyNFn^~btXIX9LplOBd&6L!K))U(j4N;z zWv&oZa8T(xjlF!k=FWpA$UH^8ioitLnSU-Zg|#v5Iz9qN0HI-DMP$aRw{798Fynu} z6)y*kd*i!aff(vbhsK$9wl_fHE%9Re(Itn$Tq?j4-ffNfd=v%G+mSr*olGxB35`IS zJ!6e8*+)JJr8*}LcpARvblMxcUJBX2Y_*Tq{FEEkDFhn%5iHucQ@P=*SSXy+ptojHnds0%y>Z6Wc3{T&kzjnW?GnG!CHMR-G0t8Pk)(!L8 zIs7a4>AShs_ldMSW9AUYRLzGty`7;8$L0)%=@in?%R z7&o<@k|tc&@+4XMzn>A6^mX<5*?JSTOJNN5-*Zthz7=HY>aTx(?_-lUdqX4ZB47Hd z(_f%t53Xn6IB0mn&dr)I{}ko*SP7!zmwL8^WvD$`X4~Z=dzPLzF7X#iFf;uL1WN2Z zRbi@}Gq$akZ;M~0I5-DkYAfx~&`mdC7U*(Qp?y=_^%RDS8jx<~dk@`BI;7_hz$i6K z8J7L!z{g)YYb%!Mqy^ZXC6r`=<77txvPi%s_{?NUR(6Puh&y$ zNHy33#~1l!Pk~F1g2VOSD=<{_i7i7HAY|%wg*O=gyCAr;* zX#&f|$2XJ>vQUELaPD~D8%OZ#qUNSUBpAq>mfRjQglTR_lU4Ik2pt7rfy68f)h0My;%ZdU-F~hK23S8%AiYF|69lgKv zs|5QWpmYs)t|^-fr_G+J!`2|Xv>iN6!kWdk)jEN!L?6xm za%_g_bup-Z{U&5myQ7?OT9%n~#P>|`Do!JjWQ+~3f-BKgLucg|&kdRhU8WP=*LN{^ zbBtSb7Ct+DDi(IR7=V!T?yBVZ7^;(m^8mmI|`?ggTcSE_p-Gx;ruixak7urNgoHZcs1HJS~bM`Ob%+W5IZnQ zX?q+4k4qK5FOM=^J;Y?jZLR}=I;n*U>p64LKY`)=Qja@(Q8`Pn z{4eV_iA2ft1rNmAB?2#&yqNM;-q_JNNjlVw;Wff&&M*-5szNDKUx6h~9V9E|%eed4 ziwh5JXV@4URK-YpcovUY;h}}D>IBV>1zK8#@(eR^*yzb)*z04D!_N}726-ooY2QKl z()HcR3B^yl5^mb=iERRf|Buc)vPGp4L-x!}8v-ltVweu`!1YE81yZ}to{3rcriiB< z`w!yjd8^%m0>wHF2tl zrPWgZ8)jSslbkoujGYw((TZ3N0Uaza*F^s3#dd)fXv4bw2BH)_pw*fTMiJnQ^+s;4 z@99VgDJ^YLJ1}tz2NE5Ma2V$7_iAK^9up(t^%{Yb_)ja@S4VOaz_Gh6kks9(q3wn$ z41txm2ejMLV)`?>SW(z6(+VNt%Uhwr?I2`k+HT^W>^8+jj`P#d*Kjlv@&=~ehi(_I z#Sd#qY1S%cWqnWs54+9IW-?3kk8={S*AnHdyQy`c5XFkec|;dx~8uAyW*8VJ8^yZKC}kwPmO!G84jZG_nD zfJhYbsD26$Abp>Gh2zGajDam5s@BDDVwIp9%Jp`QVg33c_U2CtQZ5%K`)vcP{~eK%3dp3o zvCRjvJs-)mzYt>ESGy49th~EYJPrNQ?dMX)yM~&dV>8?)Cd}9d<^c|@Wwg77&PR>3 zrZ-V{#SPk&VA_8hxUS{m+y!z>Zm69phuwV&Xk$i)#sg_QT6Ah_g(^bnSLEu*kraA2 z5$B^*zeC+jDUK`)!QIw& z=*7t^2CCo`n1bp4(q)96f59&~J%V#n2oXYnULu!cTE$VRSn5gdqtHP)wUr-AP{ZI8 zlUuj8DAT8I^|d^(8v*HeXR{A%l5)rKJ=I+L6WDO97%!Y4-6H#Q+1tIPdN5?JAj1DX zg9uSFlgk^J%RbwQ3B4LUGUzhh<&GazoqSI~Q{5DcZ};Y(mb(`rJh|_VV}KIGB59UK z#*>iS?N#;PznCN$l`|hcMGE-99;WmCh*Tg?~NdA?@xVxP)Qw>^=9yb!EJL+ZTviYwZU1 z3fQx`!A74;*6jmAnrsbZ2Nk+c08&yP&vDA_awP8idE*SoP(>hjszU!mq@DNH9!b%RsCt|V+3*RQ3vbwyO_pO%bM!+f`S!$ry zW23#Hzkctsb?PlEMkI+%V9+fqt2;D%Qjt1TGp;x+?jSr7-EtS0BsP+a)2CWjXG7A6 z&IVhMh5W>Y8%i)=_K4yr%)Yf|y)Z1oF*}Mc=_n0})>Qhh!_PS1{ z3gRp4=U6NY6S&S&fNa+`&^gelKbFr_rsO6V&Z{X-` zs!zJBcjfIp?igV*hzP^dwwn}&);Ip?yZjL^S;iM9rwaGm-?+(;M2ubh|BoNy7<6`T zGSyuSR+#<%vuYzD;K?ZF4tqB+@wXypENNX z>_MoiKtjE<*uxn&6;&#K1K9B?I6d)u5lp{vv{iup)0zI&nYoo9+J3W*D4}blxnC&5 z+23np!{El)E|5$RX-B)#ul#*$mSotRl(*9VRbML_3}3f*>f7lOa<+}^AlSvDVzWjacSp?NTe#I2$k+gHtPV8Izf7e0LdY%#H6sdZwHd$yL zwY*;)NOqKd_bA;okqppQUw3lDgS$74i7^ZW)#7)LbQ$|nA?^GD$e|^i?5O~o{9@J~ z!WD!@v*3YsGWP<1F_%aC1nOyppipbFGOyU4D_CDwKHId4Se%6a;Mb`+Qjk7E_=rzI zLl&;!jxOy!e%LxupkW4;{V9TIvZK?v954eLJkO9|k?{UnvqYTE8O+eqUwD4@-PA^Y|9sx2&~gE!QC8aU5mbIa7!PJ^Co~idZT_D zLfGh|`7?vfm#aC)4Z#N33P{Cuh7-Ac@b5-rEcxrT95LcgRSIp>W-3lrE~>&jSI=`b zeq{>~;r|}8^;ey=KN5A>U^?)D^$8LGs#SO&(|e)CUIEEZoMF}l*5Yy2&)^GjRR=J4 z@na)sXpHriq7L5B%R++(oYTYuD*5mL?`b6!gliNdZi1iaetrxu_D393L?JH~ClI(t znXCEaPIALoaWo)tSh4c%O)C9a&rrFE1>^j~6qi5cf+P9t2kQtX`X8SF z!KkoYXShcztXR>Q%n_jt{|}03+r+bQaBS+R@`nuiIuxyJBQRhgFa>_;<`1(iXL88K zLI;KekRyRt*oVqI9b(Cu(e8~2Ri!XmF%4rpU)`YJ920*FcE9t2 ztJopa@#aUN9*Q`u9#d}>4N;Juhm8FF_iujz{dTR`A>?}e95BHo1aDK&=g*%1SMzZ7 zNRofuX9qa7j><;ch=&BVs>M;Vu!UJGFdlV+xenPz_G|QH@KEe8?%;jG#u6=?7r>_d~u6uSCu&mnaiG5 z*xfqh{-g^k?b7;mutI=(4zQcSMqv-ZN z_^vQOB%VvV6SFxDu*p|C&Zlpr-LjY+0)u&xoV#-za_Vj8CvvSUBX~v)7vjVb0&e5j zZJG_Y$ggD;%2IbN+cfSkZdlONUN6MrW{x|q`bFd|`yAjm2yy9<5i$igh{ha*O+C5f zcbC^C`D6b+p8c}ag=o$|%Vc`0;I`7X!NnXfe_)RT>`$qKfoB`o;`Z)pAq^o#T3Iz% z_ZmTpCJR{`uyJt7So6o3j>@a6FEDh*V{q~upPB>$I0kl<-;fAsnN`%PM%?Rqu9w=ZR=EiAn2!U;}XPVMKPYged4fae35F2%&-8qJ~pmP2^+nDo36i`0V7t+K!H zgh{K;4odnYYO}xGeh^=BWg2RdPjND2ouEo9&R6*Q4Ul^c?(cbq9KWUwe(q*4&yZ{h zob;SZ97ZP{Hif@m(=__h^#zd}m^^|ds)hq3Y(K8oFS!2ofk@@~d~U^^YxRSn?WzlR zSnD904%$T>(_C+9iGtMr6tltaUxAxBGT~9Oe_-Gx14)SoF{W51Wdilsg2vZ9V{l8& z?$QudX0CpON1w)QW` z0DKCPy)=6E@bqHH{@^ECpHh`!GChJLfDx+3wAltP4+xHvXzEVi6b`6Gt5FyRMT`1+ zxpPn>u&siPqNJbk2bl`$7RdL6OO*?HvbV#MC)6{3a2HLDU%!>51CYgiJpjWdJ6a)qSUPXZrHM(BAYR6xC6z>P#(?E5Ku>I zzwP9``!g5#`+eqzK*6WdrE?2pS9IU8YxMMKqV!yfq#_4YgL21nUYibdVT+5go?Fdv zcQxD1=YmP(m-TTLE6m>)$qTGUqcY_Bin~A+hCcb${t+#%`uB-VIz-Ab&9j_Ig9Juh zX?V>vUebu%mCqWKPXw3n`s+k(?}_+Up>wd*R_y4I_B51~zoAd3IPCJBw72X3eKWZvkX+ejcUc+#<$)ty?g~+{WE0Pm-M68xKOd6*sM^JuFVCZY_fEJI zl=ki!T`#M=A%Oay^ew#0yGbXOy~(5?#$vceIQ`9~;b#8&jQ#}*RnV!Hf8_Gj3qgJx zO@$BmX+czZx+E~c0o%JUc!5jnbWw=#so?!wc_uK^cp#)8B@f;Y@f%@-Z#wc|c%C!q z76$F!tXX#MLe2&mGZY3O=yGtFZMy?gXNpmpLU>K}5XeEbk1i!&xn+a?J*80L%98(Hnm}rIKm?3Y?z2GB0mYGMoHAaI*fR;MS;loaq&M?R) z)*FpOwi-fdO6d;z)Uq93eEWeglxj=IfQv7?~rX{{`&6AEp&|Rd<33;<)-?I>T|C&*f7QUN`JMKQK@~W z?0NtcBsJ7gh3NS*sKVIs^cmCp*_lRQ_Mwm{;y*c2NR0Si{pd^po!V-H;u1k9-SIzY zN`-9%?eQ;1pqF8#14%60$Fp7S_bSx%bJ9DHji|&Lc}vOkRV90^oV5YT?w$Fsr$F^m z(nV}xok$r|^W1F`N@KDfeev!(*mUV+Hk#7G-GNTHYc>!tq>a%T;^9PVRfTA_+ z-#40a-W)@?yMC{{W82|c{%?<06+G(BEt#i0CKxi8ue{@_-6|Nsru_8Af~Fk43NMYB zLoC~u#H^))xHIjYIgSl|{Iro2xP%PxQbPc9&vEqIx?D%@zQJ~#7P+8KaoDeG0(3qj z#Of~(%bxXi9;|k2e%;wigJP>sTGCCpgYujIXg}lclQNq^ULz4%z)r15C9k516kb0y z!g|4Rl6#<5>5T^06+=`Oz!%xKo#oWL&srq#&NYsf#@?QGOJhbbRxh?~`*EBPJiyG_ zuhsdKsvAz5P|R$klwjgdI+PFtp(x}CO{b7+I7+*{>LyL5lXS2gN3*L+1?17pD@xDK zh+cVmbidBmG`8cOlz0vSw3M`CfP6Ku*_?MFI6uKwI4^9nj`L5|n^t}kZkJ*dKTySp zz8kxYE!%2()$6m^Jw96qvGth+cvNRZZP^EDVYuS83IpF2##erY%F9;baK@MiA^G}r z?wMMWi_PNc<(BOY{Bg8CaNb_+nakgj8xvv*qSp=`*D>#Y8-5$?Kf0@oAz3f-TAm&9lb zd2nh7E)_V#5(cmiJ7Dv4Q~HUS0|o&+bG>@PC|>?_NJpMlpF}SxOM6LK)KaR!y+dCI z1KI){=&^2mdu;2N^u9XVLjmVUCY;4Aw2FIp&XyNn!`=B5=rs3j9(i7LU+m}s;dGyJ zBRZFVhiaI2{*47&BZ-F|H-SHRvQu>*0=HMB97C*P4 zb{(F*>OrKWE(EssRn=U_8KsdugBqYzET||`WTiN2ZkE_;quOcq$X&N#Fdil&6$ z$H28e`4f2HeB-(e%dsp98V`}_?eva_tiHrQA7Q)boOPzeE!3~Bp2y{@=-NifSuqee z)I~!I%HMH^B_XC)7hi#I(WI#bqKL6yXT>|1YH9Iwk#34PZ{q99Yj{0F05cksLv!0H zH2nj;_{EKzz-$~mHiKKwZ1H4;IdBew4s6n^NW@tbK_NQrl+QY5jew!7?0zZ^8P|wF zB|zv;U{g0_E2)n@Ow{G^^8fKwg8>RYuX5fx<2&(=@OC0uBMowwLv0GVZj;_u7%+uK zTE7MX8y!HW<;x-s#?|QQdMp_Vfi}DsHXd1tC3SM^dqJN-GM;q>(pk`OWw<N!Vod__!+aaJuu!|{!?hfJjzM;HVRl;ce&L3J#rD7yfqJ)|B-Ul zgl_Ox!4=R%BHJa!(clFpaqO#YJj-IClq%JXl}mpUJXC-_u^TSRA8Nuh9rD;Q6$x8@ zPY`umWUSgr@H=DK($U9i2}Yjsrfd(`x!2RKE7EqaYXV@afe>d#T#dCzRX3yzg3-$P z#E=~-GO;*V;&M5=p1dmzXMr%?*p}(3I3?4-6jlQK+fRcx1cKFVjBQ`tRoyi=hKf}y zJQlh9yQ5jMxpS^=*`Z#!0#f&SRJo-fW`(u7mU+{m!w{qSiYf;3IhT2v5^oD<7}W;k z+UfVKCu5i_kdpl`>W!#o>hE3{=AWVi4~pacDT2%_ThmBddRj-C!YQ#lCbq2~3P z76;{Qji_C2-7lRwCkhG^U|i4+ap+THbZ)afz}CZQ@UM`1UNVh~-a~q3j!^7_5b?OJM205>%YQQAE+-m)D;nm4(J9d6 z!a*IG=;X&BsJm3!8}R} zs&PsmeUL8XFKgxlkGIe7&4}6#8TYx=)9U1@8A*ruSXj;0(sp(?Oe)ixqsljab8v&zCn`G zG{p}i*)wuNZ4a-dNpA~z{1r#=%q^LuI(FN~t~3j7THjM0ys~CE%??xyv0YNcMX$aP za^!g(E8XGHL51;-7*#Haco-<(pLg>!edWx^U}#gd-L*l33do_zJeb*936*e=NM=Z*$C84e@9b=7ApkYIU_IPwONXkU8{=>*0{ z$bQ$Dw?7hi)-->b0c$i8M4ZMhs6iwv7YULH>{$kj4@~7=3q7?sG4byzUVdj*T&TDm zXfkmf2Nju8d-|7$i#u)Ou2|HbCPWy*gh6&RpqTyiSLq6aeTG~$?ucz)-MC04m~u&@ z!>V!_R

    v{9MZJ8dBo$kt-D~HboF3zcYQJTWzbE_{l=G)pnl0ed22_SgO8O_8Ago z1?(QBE;jKVZIMmO+TCfelR?7iy%=xJ$1ayn9cf9vdsOQjHBy5WQ{83lf^3^)zih3u z*ESccKkRU8T$l?@a?Hyj6`s7Np^tDLZMvx4e}no=`BJ~4UVHpePiO3G#_1Z|7h5;-H0zJqGPNUL*iJ(y_Lf7yiK*`@2L*)&~YX32(q!3Bn2vhW<} z6;hKsDx+q)Kb^yWa6m^*jTbmxBl0b1Y>_`}6g6}tgmvPRaTd|uU66^qctbCx#jj9c z`}|+O;J;%Yhpp!jPjNp6AceUt#|Jli<^|Tmf5%&n98P>Rgq?qp20gub>LF9wnCJc7 zuOGFa`3*)@zI6{5_Eb>T`6s7Jq2FY(ETldQ*1k4Zo_v0xIWj~B{>(^ zLYEv|B8GS=^!ijxCKo4JW39xe_GcNRvgYfnh?A}=__GBcSF+Nv&atXMj#BtjffAmd z%fiz3noK__OnZ&Wj4rPVuuKhN@rGZv0y8`#yI11C2hu}FWG@A->l*UmTa_?}@u*UH z?O6-Z;3m^a+|nHxW)qYWth36p$T**q=r2Y?4T^@EE9Yghw+yd_p4Vu8*dcXnf5jDz zjHQ0TBa{@>dG+n9&N(@RAp@Ri!_?4WPe zW!B7tvo6Zcj4!I4pbQ^MTQOs#y{lSaN0k{%`D~wLr5NZ+V4q`Wx~DpE=8Q-8I)A!T zYQ4;q!}QaolW~BtyeD#%8hiq8m^(4AbDmCoqN}(1K=urR#JS?Un zrikoYt~YzSafTq&C{B7(?tjB2ap8UJPZK`VpU3*YzfHpwHC*UCGF`?0uKdeU>M;#b zxvH1|@OvIf^CqdKf}M-^_p8os*i^+iJPos(T0Z5AeV#3-%&_w2hEQkW@uK}5 z272H{NVOBem#q-*U~-11HMKF#RqRx>1$xyO08@a8yV zV->-u)rZ_=>B~7MZP_A2W_?J{ysQn51T*!F6Y)9O-~U`!-*}S6M!vDmI|Kw}@^EZB z#9+8s$Z}kJAazfnf2Cd=q3qIH9?k?CG%(944NCGHUSaWp7nYAlh@laWeLp~oszR=k z7$BfqZ8LV6y}!J`VEA6Od_IY(uU-R$zZgg&kg7(5VwxV`BOKR!8vr%gxw&xkb3{!} zmABEJ6Fj@DNy4*c=%sD+njyYRy*P&0nmN95|lGRydV zV5fu;f4cwQ*J5~2JlN>`4hsaPFo72k$n2hz*e} z@Ql0PgpbISFpFdXITa=s4RR;fXZ2|kGRj6<$98fQ&w<_EmdgF8a#r>6lbDiQFsdf# zNvpVkON>#k`mAo%>52UgrUPpPY`Vb+n*^43y#iNxBmHa09-af2R{n$-QYbtrr31$s zd$w=D-V=oNP)BC+E&EwRkJZ~Sd;aU+D2!^7+Exz5?=BZa?_k7^EN1$%@DU_)Ou;9vaP)WtyUPImC04y!+3oE zF|Ky&pjL$<1T2>*Gu0m6_1D&Y&0&QcU~_wE<_dK6!Y7kYa z?Tg?A)@ophbki5~L=q*?7yt;}?Ux6vymbZ31}#@$W@J_*W3D%Erd zryNlj8)z<`+4K~na7#x>oGv;T7Sg(VyO)6Kuwk$|V}~XFJgWKUJ3M#lz7C#au~hq8^6IQ~56my+eD`se}(vn3>ft{Ty_Y8;Gy9V$nA@kg?`pf{Ji4EQK*o0~K_BsM9l|ADoZJjgmP6F$fJvxUtyzbYTBYkAa8VI*Ek~eiio|%og|ET6cOlJqS z3%!Q#X!Pz3?Z!zu8;%TXoOhG|PNA7@+G9AfFhgzHdZhm(w4}UPINOr9)j0S`aLkD&Uvj6I(o1mh%ZIWlqBEsWrZTP)BKrwt(v^qiER}i;vVY*m)Z=Z&G8M)+3q+^%+b!vnEW)oI8iwo{r;~BnsWcl#^X2}r{ z;QAQK5(!2d4?p1K-A0T)8i?fN|M}5>*Y{fd5<%8*S zcIU=tX7x{F=`^EaVnPyM^&nftGL0r~<`1OqYO%b*k}n91cK}4A>@*;`>S12UvQ>Qf ztjLL}@dZkM>JmYcmb`SpyK#B`rJZ(~-(acW9yw6wJ~0hSp@*ZorCCDtj8kxgNkW9O zr4RhZy`dLLv!fgp&Tk*)x92}zC%|K(U(;IiB`u5N8Upjzd8;3L(PxTS=#TN0Uvude>ADw|tJjomM1 z5r?}oRtk4dzWQGIi207Z`*)DneYvm$L0an5_u$!mq8GybAU_ReV;AU}5bWQCqy~8$ z7ZTFqxlm$_XCKuJ-*2g>JXWsaAFiHa3)l)DHLSr22^fMcTa&Rn)08L{RG8NOs1t0K z8EEQvkd@Qn2NI}g^H_s)`rpO!IEma$-Grd#LOs#9Z6}qQuW69a1$-UL7%bDs>VnbD z+*KB@l>fZc(M~4tM?2!!ovA0|9k^f`?G+O-^3VX={SqLm&}=#CnlmgopEoVZm%M(B zqgE%V|Ju;}-(^22&GqzG7Nh$c0)>Z{T9{9b@{TmB*?skaZ~dB`HExh19$}^~8*TJ- z-axg5;n^5d%4~9VlFs+5OVsi-KJi)SZ<7b`4)H6BTD@$+!XPh_w^%cu?|D^LR~ZZd zKf*EtlZQ8o;)%*Ou;K4qzDwToR~*Ar9XyQy_(xk$2YJ{wVhhU48nH>wnk;5fVi{`$ ziEl?|1$tV-EK{be@StC|FM|fUpqi_|M$z1FZvR;=8~Y-%KP!PsXv_fC7>Ck_o0_8) zbcEQK|5lvbPLNz_SXUi_ zKNP+RSH5=tQVT3pIz@Mv$>#Tzsx5KQK6s%+0Hm4BW$AuKXu_bvAV|_hEq(Q>JnL@i`DlVz zs@;ZkO0i4I%wVYMXUV)5NkLBUGRr%2+}{AIJ8S!iQm!=V(&mG8Ul+`88`=8S`ARei zZ%w^=0nh!6e`>DhbR!4*i;Q1&VIoVukFRz;m%Gi5=XxX) zOVT_f;Z_SwtT6oFC)<(YJKwooI5w!8Lj-bYdf~qtu+r1UGM5=W59HKC(j452;Y)6t zS|%P_1U^lM^%!c=FWx_X#4KJ}+Yge1F3Ec!?IiROtJI<2oKu`RmrTki4T#J<92cpG#C{DLZOP2+4dX3KJC>m|QX|J}LY5}T+M zR|PL;{tx0l`vXY3dDUCJ&w$6Mm;b71C1t)Nuww?QhaEpZ%;QlG&&d`MeD4#Be3(T^qZi zVb*!lBvW)unBf`iYd*7vQirmNcpeekhl_PXhh@Q+6{15%-jdN)bisy~=hd{pd^oW5 z@A@{A;kMT2vw1Zl2^)x(ZSY1nr`-?f_z>8yxjomEood28XP~*={)58Is22jK6I$&B%5rAe}@wkDc)1$* z=WYppR(vba zBBl|!{^hSY?1;sFG*xGaw*nVB*6#HsV`&=7;^>5tSxs-*^zgNsKrl4$fsENIs3=F1 z3f;ezu^!S~G1|tG@Eq_VHL-8pe6Dicz;*lski5l+nKW0LvWv$0fZD?1e$Q=t%a>OF zx=g`QyG#j4Ep{+)F?m-FRj;;^9mehiG6jR5c+hP}M};z+xi(-c9;#<DNyT4#bgBx@$(@nTNyPb0d(jtNybfgQFvuNgjg{`8&HdQk=L zpfK<;F6V$W+Xq)C$1@~7lQ+mN%BTYhY$ddR&sL28Qyq*Z!i7q$RDz5)I1d#$R9F2R z-lHymyqiJ0yG0QAMidVt3M3|44IGGzNfE%#8M=4&L7R{Lg`@cGbIcnSrH26>`u_yJ z0D8ii#Y!_4eYD{FHmuF;7Ae`23rgH3oqt{?me%_`+X$-~R zpI;u})t^#bSAwyelGM#TZlaaa@$4I{WNdrN?*m^dM=W5~AQ~oG$$05w@5}QYXct!t zxDq))+$J-QRwDC3I^B73Ej$JJKzbgILcQvekD#Zu2!P1ue$dR@U7oG?CU$d-h_q@Z z|C=;K*Owv1>s+wV-z(=@$6us%SQ$8t+j*zZv)Bmcr1Vhe-3bcWOnCQ+=zq3u@2rG7 zoI`r}(jn`Ui>9HLGwZ6F46YqGwkqy0z_Q*Kp7|AM6XvAcMBBuf)wg2}4yjM8$ZhPd z`)ynU9SE8aVu9*6z_eRR`EzpLlV{JE8n0g|RjB5<*@&1;jBv(>S-#30!(& zM8$olFwO49{?&8CYgS+%MihD|Bp#^<#Q8vXB*%j7(W`cq&okqSF5(-J!C)Zfd@&`8 zT-G5bY5CGB5^8V) z(&t;3j#SY?;dHZELyd++a36~Jng)i;jd&SN>|Tz+tUdj#bEeVGKqDgA?&>z4+mg)1 z`GjHe?^>k5ug5t3HB0PQT%kv%I6{95D^hb;Z$e6DGxz;Bt(FA*-7PEmb}&*;UX}9mi*ozL#8>O~?CuV>rO2~OP#Q!~ z?HIe8a98+}Q}H@C72Q&UwdJ=qAhyAT-R9x!qsfP>_G&{e`(VYlP6%R5kZuJgi$ijLNdNC`WdPyeR&l$RPl>K^X$^M1nOR^ z`JvKBWU)uFD;-WvAF}~^vQ^-#!&w@f1TFJAbFRh<67Abt!Z_Z^|BK-z!%Q9)avg~C zOBMF_cNCdVOx<~bj`;98uZ(3j;mH`pH5ya&N%3Mimuoo**mbXbR%7{b(N;HfVmto( zk2<thwPB2C?p`RM<>sF1Y;7zxBJpDsCU#uU#)A9`QAf za)}|$u$?{Agq!W268`(_S-GH^+MfR3UG*Ua3|vp@zHBeA`xPrq7^tQm*__O!M5T7V zjG|H=S9H(~gplP>#!x;lhl-6_CetZw>Cc+R3u3=}o%F@_+J0FewupfXQ!1d%9UE(k zT`>Hp}|Q_&?u(_0w*hPQ8CpxN{h`Xs)Elc zBu!kP{U_~JfZ5m3s4XF1M6xdf-)Frt{hC7?4=%Cz_*0r6BjsyKDV;^+@Zta@MGr5% zAyjoHGwk@?FtvzhP{TxmLWlNRyp~E>ujZ$*%$+v4z1hn^3+*uj$xN5mX$>pBW^=L< z%%brQ#KP*0Rj<6DYNkPAZ>OT7I(K6-hDU9kzt{e`f(h5iBlkPs#DVX5R5% zNyE!Y?E>y{c7w4017Xj!WSd{t5aD)}&;3zD`}4+CY3{{0dD{mRdYSx+_>3*)!vB|b z>I4^TFphmzGye>3B7bQXo?|HhR-K?kEL|W zd0Q6sR|ELv@J%?b&9F%ybl#rtzqe2l(a!YTmQ5owG`Cjo*rb@Fust0&NZRc1qd;Hxy56PT3PBxtz z{1;eSU}pwe%($AliKLc{!RiI!o)kk2``~Ul5bLKD8sxjPW2vuyKBiv(8E(|ud@A4x zXx4<<0yw&lU3ShZ9Mv?M1q3}#v$aoOa4qg7$Sj_kyX?Ru5N0#;fUiJ9XQd7@3dCoG z+HW}=7XH}gzA6^w(Ct##`UW(wc9&j5T{~{h`13A*nWyS*`o^vnNdH_3|I}JNi}8RG zJARy#IYcG7ZS0qTo&jkV&$jdl{Q;>=NsV06&%{HB75p`J8<7BgY}s5Qi9;o>KR}>3r2P6X}B^Kp14;)iex%4)w3>>4a6cK&cr!_z^yo4o1xEK+~tJ{%V$l0{GcGy z3$_PCrVnk)L#MVMM@|AH9Xv>>M%oo_?w4^9N*^DFh!Nuu{&KEqz0Q^FW1KWfs-49E zNCBp>YpEeXHjS$Ecv8?f^chc~KmT4nvx$rfa*8h8q#xRw2mc-c$(md4UOKy^nGoYf z;0+PB@qj{UBlm1C#8xt}y3Y~B+O=)2anelk%MZw>r0X8u-Aq=*GtrBN!}(Cjp9XL# zQ)-eUWoTYiD$wf%nhcoSc_Xl71Kv_(Xx!lF4WZMJt3qfLR`vI379&ZcVZL@QXo&Wc zb0!tJUg;;|Hr8$tbCL$>Bu60E#j?^A`U&{;r>l4d?$(4d9EagJ5)LQ5Kw_dT5C{F& z3WRP#8)<$G>=|Y&3i>`UlRy93aoerpDAm4g1lb3sZ*x;spp)!dwq@0>iprWIV3cz$ z3%*h zJe)k!Y;LQE)g*_2Oi+ybSfI|Fq}CIb^*;ubEy8KGK&kE&4S3qb)!$i(I#N;ky*iG%V^VuCsto;p+E zHh2|w(-ZOVL#_;cH7Lo&88`Vc0)CNbQ5{z~QryuaZx0Kabu0935kU zBt1S8kWcmhId0~bun-tjkWti(XYX8rvw8mJ(D!2-GYl!Y(j^Wld|lbhD=Y-PT8>2p z8%S4o)inYxHyq_INsPAQbzzP-b3W-xf*cb)ymWQ6odWp(`DMt9uDz0*R2zUy^@XKj z;bL5*F?{#^^GT5Ef^j9XYSNU&k5^VKHkp)ysx*e~?7}h(hX%Eng)FKR%mp43MAMeX zw~@reVuJ19L)F$A8euT9ha0XQt}Ezw7n{+4vedkrx7m%YqA(2fh3W|8pF8(tmF<`W zvGM}uiE~?Pgi$Z)S=rFJ((4i@7~_=<=;e2qmvGa<1^{S=rOPQQ&-7SPx5O$SNjk

    vP2!7pV*(#V`uv!~TKOi>X2iH~7cx^habp zy?8%@pQkxeqvqy3{o5wg&|}CB_=tVrs zMfiP}Z+h3ujacf=d~r=>Q-}*0hjjc>)|5TZza$-wUyymx{9I3VvB&_}VQC)VtrW~W zPAZz`yHs-8vPM2?U;Oz)35LH&8%hmEmkRrP($8n)i2@{Jvc4NXI=16eMx5k$A(Qk* zN>;pj18R_vG7LC$R}(N{Y2OYE~r^*$Df@v)U#tXH`F-)qTyqt7Pxg}PGOq) z(B{CNK)UagYbpHnT3zDsX@^n7u$W+y%?)eK^pPBw@4!1mHbTSup7QdCeX+hQ@9-LN zNGeY5%?)k=5_<)|nh&>!%SqDabdA&?tvMBsiW7!OO&E{ebg75vJ;n!2IS$a1VG6>e&9Xw#E{=yO{wuX(^>kTW5n`d z{NH6KD4_46F1iox!FMOIFy?fKMeAXX1%Ef*AQ+sxcUv3IaGhd&2K}gcA9`uC;ChZY z9Syqp?jLbj70Z8+OwOCTTB?;sCqqNseTD4*_tFS%ZZ>5i3Quff|7T+(y^l;JBWIUITLHg zR2SRwt4qnK8bHDXP6B7BYMI()5L#MCH*xD)p#_xDIOk&8&NO~U?c~{+=WSVNG0kwG zSoW~RWRs5B6XWMMb`mX>O2HPB@qnXWM{q-8hUPU)j z4XQ%vTlFlEo)1p?NQV0qPbLR}Za`8hO-cBw+=j^Tecj(YAe41yXb{-QN=6<1olfD9 z*T$_enASVwj2c$gY ztm+aoVmz#KUJl$uxbB?P8&taEc|2pU4smUWGZ{4299hzO-A_PoCJJ@}N7Ci7;Q1i| zD>e)~CS>Z-Da-OD%d>Zh+(Te^5r|sk`bf0C-+l5fuAtv7f<(8WPSDZ*4!c%LUQ)p* zv4QQ69*zvx8??a$x3D^|c8_L2t@MhXc#3Gv93QbHRdNj)r>;X~;>w zAdiGX(pu^aki*vXFJv@FD%yOsPW`5bhOmELJ9b(i8-^+HNJ&Nstw;QN8z7S)(s?KA zKd2OhSBJ&gJLQC=g3!n$&8$5uBvwQY4zK|{a7qr2pDK94QZ;TuPM`1TAqAG%Fj}5u z&Uipd{9>BR;UX`n{uGekcn_(kxoi|}oVhRPVw=5;yW&!JV3LVu(cywxvziQL98N9Fa%rK8pYR;|gASEv(uk1aOtdvO zenCDLkvmCCz!@CWCB*hsp+1LMUCrm=vRmChcufT+>Acr|&i;vB*#cuJ`PcE&c& z2kwwpS25YF6l$+##aT=;`Jigkw*v_N5U8!kZw85TUzFjZBL_l8+O}>I) zT5kuRAyKoH7ym(Eir5p(@DfH<-LQF$*M7^!BFBeS8wM-qLVO60O6~_};oOUJ%Hp_h zm1Nd1JUx72T<(Ibfs1>Z?;+O9j@OfS8|2*{*~X+?w%OHc)>}EAzs_dB57u)@CX63K zGKCr1Uxybt1A`hID*(BjsyDngxotHFAA&~ZzwEpxU<~C?xzzIso;zpA+#?1wf6I2} z@d9)yiI^(%;Q3*w`7Y%p4zJ;*zG9RgAb>}avtF@hz_fe|mdCJOo09^) z1pbjyCgt`O*h2KQm412Rt1Ur!3UY>Rb0#B7$itnhn%7iCEcgfTT7=>(rUa#bemb`Ri?dP^22~m1t1PseQooeVF&t z5`lIlVVz*U{wYmySjZ#ejs@ZD$u8y1BE1Lwp) zvxxsubRPax|9=$!To>2g>f)OB%1kzwOBdO)XH-^J5|^x07nxnx7P@q0&yb8Xh+HeA zL3Uju3T3AC`@O&a!Mz^t`~7;K*E!E4^mQO8V2V5U5FF4$Fp!%S;d^P_41HBhH~G5xe{G`tkopE13}^$sS&pF1wf z8uYeB(tExeNdwl+l$r48*IA*VRT!s+?cO5_evyjfV7)ZADSHV>wRZdx0hyu>G^qC*sMiCugJaC;9u{GVqhHKYUcpe zn}u$9qkCvBd>r%6OK|zqzinVo&(3B1^xPw?_;Ou7JXD`r424X8%#VW9cEwjL>>Tr9 zVdVKQ(Bk7zx6LeWqA;^e8uLbAY$v@AXa#gl;ojJ1HS`iDw zbAP&!V3sk=xQz}(U3+aIFEmYhYohxCbV}F4i45yvZD?i%r z3MUsyEaRWP9V$z{RU*u-cQd_IAzUmfDGhP@9Q4RflDr-!;TNJ^H&upyl5N%WbcYFY z%yAV%6F25|-ECnSNlz0o17_kt!_UCjnO_zArXJ$@_pJ0!)~GI)k%tC=uPWqcU&oB3 zmrhro9LCoPnO_VE!*l=kwYq6MbHkPQY`XI_gKAuLjx`e`_4u8OWI8K)dolfmtk2^I z@NEV>kku*=EXsK6ld!%2@>(}Ft2BpWNUZ2aNX1Zkz_NX58s+qH*`BdzO&G%*PnK3>Vu?jyaxNr6O0$mc&;hpr8~I4 z*Ua#*BG{~amr-;8$C87`RdRjm!LB>=ol<4K20?vMH)R=GBz6Skx3(RKw?ibVbi|6N z@VLwKg*SBG-Z`f2I)p9-O_ix+JVzOKRgi90r{UCYnW<|AZ?$t=AEeE_@3pGfb7KR>OR_>s|R)3}^m*scG5yQ8L;ia6Gl zU35%!gp@2AWR{4LKr57GuA1xvHAjG}9+~Dd4AzKZpwH|{y3*3el1bBl4W$502W1Uj zIPP*aFVqI)(;8YVdF@K-urh$#8qj2Q(;fq!(IyL3kDFkQ>1Qy+F7Z!zUJDrM?%o1- zrIVsicKEJYMZq4p-*BLmN#Hv35&>{98e08ve};PjXw~v{Sm>AID($xP4wP^+`6RE& z1eY>3@&r-th%lQgKxFdDilzK^B@w16EBLNC~$V1_y-9M~}gWsC&(^A+Ow%{O=6A7&;j^ui?bP@f@0saElFtB--5J zenq-Q0ZXVketB&!EFE%tpcTcE{@Y$~NP8<#sqqtUjaF(!gbaAku?ISTkKvRr`uscR zl&M(CLjH6^0>)&XvJAHFmJIyW<2kN_zhgdE@;34E@Mad)0XkO;_DKZJGx^u&5{!GX zHVsy#2=v8sXMOXS9QCm%WroI!9{nA*vlgbKss{U;c!7|{VjSEz zU@x%EFlZ-bFRn>oy?VsnU-+tG%9kWw^OJXcp zy5vFEUc~d^JF;JLm$nX=&dcC#1b%;N-4A?pDlbaeed}$KPmmb6c!!o zlso|v4qq75J5<$YUDKGY&$_hdnJuc_m^9&pG<_9xo9%Lm(Pg(lZ1WSoXS|Ebe2mwt zx-)@E=*aO{@b9-t^KAwfBOW1^1q7fJr%3w=LxzVx%*Rf%H4T^-TB4bQzeB)Z_pPl@ z?oli~u}6bnBsIj@R^;;cFE-*ykWFlr>xWe-0=EQ6L6_fnz|l1{z;&wxax&-f(0E$7 z3$atgZpb@cJay=wEh+h|6A(y=H2&;SQ&{0OL6n_pL(qNI#Deh;L`+d%nJlxGkJ!7f zAGjZfa>H|Ofn)Lfd!G&lsqjvJnU2^q6w9v2h%p!#eDr&m3p~||_&GY)?W|^nH9{(U zNI&oh`8BkHj10<7eg$Hqac15Hm(dY%)3%x%#7BDA9pVD!5>{yB`Ehwx6eYb_Frsi| zV1f$*S*EVP6JGh_FBiX#+4;TY#KbyUR33C|7pDt0GpsGhWlo* zm8vb=Q?lGrV!su&Ktz+ctYI>uwSxjVMxM3cUCQWbn>`l0^hQYEn^+F{g3c|~9-Rc6 zCg`xn{3yTGnm(NPYXTx0L5mSTwm9YPu3W?V48qPr22zha*W`5=rZs|2&ClkDU;yZj zYfhdF>eWOdC+b*)1_EH1g(hNlcePXzTEzoistZcyrbr0y+ne9iCNpN0Rn0FHszY6Y z%A&6EW1q;}bnBajgPooxlhxn>yGXxMoyi|4`wt&!wg0vC5ntCdzx_`o*fBUw3Hf70tw|!$eGn zO-?82WKSH_vOW2R;Zqv|P88@4EYv+`{`fO9a<4Ui#}8YEyBMD10XQNeC;%O$6mvSF z5kkv|Z~6;0YELLC512EO&8Qsr?u3Wz@Kwvtv(g1aUOFcwW&uU)z28YG)r@MW1@G>#?%rBfu9YJhsqxZRn4&O5`%>=XgX>) zy~x@n8mmVLfT@;oDs5=q`(%Z!& zbu%WUP~?X*_Eo1p)ClRd0yrQ2I^o5$EI#ce`#YD-3%}vw2f@T=gwcgxfwg=!w0dRN z{%MMIQyo@TgT2h|Y)7Gltrs`F)LzVLnjQmwB;5D)1Yc`zD@R73RZPBK0PIbFU zEY7bX=)Cz;GPtBnQC{q&;AJOpJ>k8Xa-;tEU9dFR}xbQ}eo&kvZ=;>SZ&zS>$jJ@*~I z&_!jV?g;frh#A!0oV44iDHLS|F?sZH4psM@SjXUhL8YVEp?{80R(|AM%Of4Kfwr9m zvC}IzcThA5Qs(Spdf8#k9PZMPS9wpCb0Mcbx1n`(1uwND*ZZ^4 zZ~?H%p36IGuZ>`l#=9}8MW^X|dY}@4>(zaT@s28kZAR~-Uyh?KgSdYN17-t2WEAWU zmnu+{`T|vCu+QaXp1vMwQymM5{&~!pIP@vg!ZCNGi_bV`1AOUPfc}wu5Kgjo*lO%0x~R1MW?hi@}J`))jV#gOD62x7Eq>E5R8HVVfwW;l^vCvJ zd;tus9j3RVj*#@L78#P{%dNLkka#^10H{`YY-fu;B|{3Ixi9b{fL*3!#rjT)y*5w> z3S^|l^eT7&z^YPxF3+6n&6E(8vHiZ*!4BsOO(&wLp8Qv3_te5t{_6l@A=$#$Htf;# zlAZg)V{1btrHT@dt0&3VVW4w*p0l9=DflVxklK)ewJm!P1QjS_PO*DNC^%Tfbw>tj zi=@2s$}0Qf4IU{_F6+VBq{XA=)1jiHV;{3a(28T<}7^vQV`lzx^uf zR|AVn-T+?1X86*5s?xFZR$JkNj3QHxWiZY*UeiVW<` z+*7Y5Bx=P0(RX_g&!rYWKu}qCR=O4A!_mCQb{qhPX?%aTnoLjRqcbl*)DBBVkaqE` z3c+bTq~kZ0-J7d0yH}l(Cn1)j?$5*%dGp$iNXIelO*@>GZw!)HAO9L+@!@CAa8K+M zed<&s{jwotSic)^N83gJ1sfQIXdem+zbs$&?A$Q54uB!Q^~YzycGlD@o%)iNmX=R} zk7X#~A1~w~b9Xoy^-n{S*Z;TG^%t?4`J#aP#v1ar{^&B8gJ`kjDOF>=r-_$I&En^v z%*(HQA!LlI*+I?6JK~*ObFjo72BOmXWJvs!_EF}~cfl?5=Bryr{7h*m`k!>DR?;;)VBtNJmTF1llffZ+h(t~ z0mwfaW>u#1&E#ls29dN1EBs%G)&jr?P2z?ul>I3~nV`o~Ne)J6BXRySL=oLgOum!f zm_Ub@t+LXh9M)Duh`v}1c~=*TpqaXti}9Pg!GLpwvV|~NCT`1CK4o8mG^c4Qtqja& zDWo%V|!p3 z`wpgpxTkomF0?XTRzSoV~QVV0$wi zBHKLgV{DJ{4*CY?$)>?`fFmD?^k%&p-dh`a0C@t!!cV`GiQpcxK=Wy5$Fn?xf$>D; zKzT@12X48t2{MOR`#cr{~~+9qR)QAPJL;RYLXr^ey*Wu zQuBKS_p&V9dd4XSGc-0C`^)iM&USRBBnuYol+Q~pbzpeyUm>wDBa(wFxtSb z0$|Tew`+P)HzQz|-kfb7f=B8^X`JU@`*h<73*x!sir>w@so9_D9+cJ(SxkZ~!_`9# zg`KZZePQo2d{-}b_3tL||8xNw)3Ww#Glb|kXFs}rm?HYD2IhmI3hBB`HiU$QycW66 z1D*|Ri%(CzgTECG>dEs*_*?Oq5M-g3vK;lD*#l-h0z)#2>kQX?hTtI3beF3a&Lcnw zEGu^mRqLRh1jo68?9PGKYjE;7N8DxJkK`X*#~;%eiN~}I+)E?f6sqGzjt>tFi6Z^) zpM$!1Qt1iWxOYEN9q!BiZvTX5B)0P1cEsd)Jr=wJ+%lXZ&lW#M{x(2f9d4b`Jel3Q zU8)M~$gEyFoAQB`B}N?omHi;@6iLH`#@>Ii37YI4eRAVzZQ>sp+yrynmzB7uT7_6f zF$H(SlLzMLn@B*Rxw+gfAw09MW98l(K60m^q@V7u+e zs^XZR=oi?DZhL*-ZN7$^q@u82TYG{di(&eiWuIqm$Y5jhk|!% z!(BKoZtz}ykyYU-36-waT%~iqf*8997a1cUN2`X&_5mjugaGs^m9#>*F)pxgC(Q7A zckT2^NRRS>>4Mg;H)u5(doy6dZa+%RH)eLmwcunM-TgA`9nq{4;Btm+3&jjB-YujcD~%C)ozI<^Pj9RBhLm75O5<|V08VWb~!jodR_tGD5QP9_2u z&gSL}>zUhEpwggdpZQ(%rDtk*9?O&C6kbdDt*>tdEM;SaLR}gD!^w#D_+eaU^}a{f zq^0E@P2LS&3jj$3<@T}ufiYI3JVIa*qwddTd!VF(XJ5%p#KaepEyz_5T#$nhlWrKL z81k)Yo^my|k7bSS+6d=}Y47Q_9f5~#$%*UriB?Z3YMALRP?V`BoH|BS4siJ&Fb7n0l=^$!iEpf<(!!{T?95llv6x)boNi~hrD z1?)L`r-dI1m8f}>uW|(KAR^bz@OM#g`M!pikKt3D1pu?YOVnY z^On5oCx_8b=|8~K$n9n({tQnog>${gaP0^U-lC#CGveWTH4Mn}?FWLK`0Fn8Ws55QXOf=tJ<+0^b$eXlzdbSy2QKU_`lC)x_NrG%ws(ss?R(KP-CPC)wYQT*z5#vT z4hpF?p(3n}UU{q@J)&x@^&PtQuMNgto&B6=*44L+=3wSlz~m2(p0J)xJUg?Ozx?aF zr(>RE1{p@8d`bV&PNav{LTpdT;ELVVL>@KxwkiYfddU?hG%itcO-YxnsfN8SuBSEY zH8`5yM`p*hq`ur=D-MTpFa&6XGO+d-(iqGe&de5z7XMK3Zqd_RAk>3D)58bUO6WG! z434Inj%s5oE_rP6ep^ZwdIgE$FL7nG<8`!d4b(q%Cu@me8BxN}L-d0uUAG5E4h?Eo znLgWZIGO#|I4RxzsXU)}Nq=}o6EZ=ondBtEm^Wr%uv{PF~g3Q9X z_`QNZjU_lAh@v2~l}5(O_>V*|9@box9@HgpW3`!y0t!)=wAaW*6{-pmYpVmt!rfXF@+VUjjV@gV_YueuEGDB zD5nqhdf^WrvzZr$pveR-S=dUr(2_4Di`&A#pn`+rH-L!K#Wx0pmRIjaBhU&z4}ihG z3da+rvQCM$ucz?!FNbC0*{X0_uW25__9ID9;yU2kGOHlUmBQwTa%yk-67-*f50fjj zz$#YsN=xWva?gxu%~$~JCkOQ*yte0+gp5dELSGq>vD_J37{!CVDNj*bLZVN?LB%x{ z(W%tHP?zvDXTHY7zV#TLEDQ5qQH~iJ7epsmBnxSe01=bQbhdL)Xpf-;y{QWuM*%}< z3ej9qi!EeLRoQQzY+`*DviJsxGRI)|s`@b@;k0TifFh09dB2R?6cV(@n&nEXx&no# z>{uMhr`l?-Zoo)!B`n`79(3g~SEyVe)gTpP>d2qn-xQp0VF;U&5r7%Lukd{3WT~n7 zSTXyXajFU}3>5Hc)OhAV2s9w8;M{b}83)Zs}9+Eg;@>(VKjsR*6T%1|)V@t|0 zgP$ur)VR#&Ld`*EOuHmHi3MhIj(TJG4lNuQ&(k=nuX2*O9*527nx#qj#!-E00obKL zKVsF5fL}WG*sY&r@@^?oFA2hJi(l7`Mvhyco&8_tkAXE)4^KCq(AlfM@BKC|{cA^v zDR}bMY)Z_Cr{I>zFEUsf6|UF|{s8XEA-XF%s|8$kDj(|CfBLH~wH$(L$84icfU`gA z+=JpJmW0etSjBfRi_2*uAtIO?3t{I7jE@3P!JE#z!yPB*NU&{J`+U9{XGly12rq^;QX;)FC{Mpy zJy1)k4JKM*^Tv=Fu=z8R z#PXJpT=Z0cwUy91_`7iH%zAvTU5qO4_%Qf~SkCy~ovIi<9x8WV|E3!|?2rhCB>BPN z?z`+$3nZ3l)!G9gbx4S7UrJTvA)qMm&Df@IwrFoy1EWU}&UwsPywvEAmHe5`3@1{T z;>{mm>0P#*8SE>FUOA{ry42%c9tyNqMTvGtDchfRQ<3rwt8Yw}`Kq{uaBw__$*BL~ zGgVXd?_*xQ!9Vc){nN0PzV9}4n*wK`$SAYS*i=Ghr5A^P!V$#hL0tM%{(&f%|330* zfNIQMl}WF_iBCB?*%US?e9#`(TjaQp2@QJ5jWt|i-lsAVv+CRvb5v|HL|%vXT#@Ug zGPk9cUq$Mvff_)`p6nfj^*gBd#Oh$5XIO5!_w%*I$9a28;J4~$!Y6wwi&mz$(dIJ} zCn%Dv0Cmv)QN_tl$=)fk4N-WbJ=8E?*cKtfHcxe{p z!r4+!1lcLRBwMByS>u^HErQxq$GLdT1SL!EJXG_)QwI2BC+EzSHWzPk7?x_?vgn9D z9zzH{^kLXfU6=g?3itkZ^RA-dP(ofB0h<93-s?=0!pf?tW2b>_gphSdIw-BMg}> zj>>Vo1~m2@AE5(vb0Je-nZOY9Jz((6!a^3da^reuwMG$Q7mb(vpFtW!r~WlFBM=|% z3%9;{@q5l6kTOSI82eUqBXGRy);ZDdF=wRj0`Ly5Kb^sLWs!PVS02PL+DLnWr6NoT zPGMav-&oeqIw!@uIL7VRvOyV1&nLZI?x44_*wPckWv%c7e|FgRe*wFQ-gt4UA5_N| z$|a{r4#BV;zuW}MD;aIagxXaZrBgk~RHp=VbajlDOu^7%Bc%SPFXAd?&t6q_*Truo z)DZ;&zp#YR7f+^{3D~iXQf@6y)SZ@A9WGADSn~@;+Lc6B%l#EmWsTW+pdVLbrWh#< z)$Gbci)zoc6??o(m}Z6^FYoL;WvDKO9tm{6Cj|oo9s1YXWj3QvU9`t#_WiGusAx*Pm0hcbSNBT^CASyQR@}ioILJjsSZR0asJ}LCJk3j1pDR zb#YIQ<(t7*avn^-bU2t2pK*_@vgzckp-I4g?O`UOU3QkT3khp%Om=M$Or$wZbZ}Hh z4D_U^Z6~hUi^QPzUY#DBNq3aHgySHD;hFk}4zmO?aUZ|hrGUsy@{-V2TxhUD!0e+i{b1nciHxId!RXXZ8FhZFDYkjCdkReE6cuxKXa{ z{hz<>eKKKW7OXZ<6o&0h7y1H4Y0OzV%?e!>iD0b@XMW8ebD38!oRB zC8Ms9Rp)ItJ#WZ$-ygkhy7)MGIi&N~1pIS}&q;og;1bXExBaO*OcyZ0VROPz^7=Uq zNa@4ZHn7b~ck`v_i1C_iQYYYayNqF04#D2>Pxi0lgGGqSx7eU@%z#raW(~I|Ra)4q zlHE+=1B($?+ozK`I^4OU8=B5JHU+j7p)R1-K8Y)4MoSj0-}X(`79OheYZuR}Y&3M! zh3x!bZmxZ&hl?55l|xwKxqtQLj$e&6th>$lKghBAg*`B2o{CuZ5n4Cw4%JL-^Q4r@ z!XC=2mAdIY>iy;LXG+A$;d8Hr&oPp0q|Gs{rUyd`p5mibR&jO02C} z#R7;AO9{8f0Sy&@)>i2K9iQjiKWv#khMHOB&-RQ9ohVCVG$=M_m8mwub9mo$q}d%l zlY2v~i4x-m*HA;13k<(7y=}KO>A}z;H?m53q73;MxhVNEF%BDBiH|h;b*w;o2AjO zAgwj8gdVQHKP$Ce*2kN-@hmKCTYsNqS~&5NTF@tf>jGvosI2cZD3!64pz|o41|&^u zH&>L8gzsby!EJ{pz5vG;0PD}uzp55MuabLHS{(_80mIl;>2@Y<(H|cwG41X@AAz?? z9VmF_9ojIjD(Yg^UlRhg2R@O5V#Kv{ybd4N>F&8=s6}>OxD&p9Hbv50{wakbX7d;_ zS7?j$=^pG${wDw6Rp-?xMHq1C=l=^Z8)NBREp35EG^p+DPJv$JTZD<(JcHd%+GO3! zDiLRbvU_~#9!%_WBZUc_S$OTRLf$8H)${!LRSW15uSoeFxYzRM@=nryNf^q%ln83% zBY&Yd_BEt9Nl4x#bPbg;PWI*ASh&61vNF7k_CM$XQA%!gP>4e03d?+1afV45I}0E# z-)|?J$T1W&#Q^JD{qNbKWA<#xgLWTX00FkJrOiJIDX5@}k}XO@CjqX#TDWGkLgKjk z%Gmeg@>(~RKs!g;34yW!73%Wr2uOX#+7W0YQv-zNjhWbJwqYy}ZOtm!1 z(lMN9^~9uk@Wi?Lf@yJuy;qP6Thx*^tyb@>@+?)CM9h5$7LrPZSX;Ao(zV769%RJt z+EfJ^;{>x+$nB6dkfNreKIB(;y)SYks$^4kH}TvY#N3SPxx-4neT24NVkDK(bt+IY zig$bRL7MgEOI0K<2rhH3=ZQ)vsj~DoD8ET8DTF_d1|?g=b<~H0+z%bBV!Q_zcYS|N zFmei9x~Gs^(o;M!S#-|a=X@W5e+q<0%OhCxT14=_`w#1dJxEGf3dzn(KN4ga{bayGXmvTW5D7ezkT^&<)u%$du>Dq{3P z3EBXKz|$JB?4M<=W-xgssM^nL8i#kP-6eLtnZ0ZCo;o@SbavEOGxQk#;BEB&Do(7RnT;>NNpYj5M zFM53Kt8?7SaRqS{sFU!v$Tx*@Vy`hgD2TYb?DIJO$bbq<@CgvP*jd>jX&{f#pO=LS z2YXt^QT{@eNzyxU18WHKJD7yzJ2A%F{>%^Z%rlF{^lX3;!Mnx_{}DW43!m?YN>~yE_$t1Gn>xJgF(9{I%CJS^(0)kufMH?Q@nn z(L*M2J0esnOR=1cQSt&_GRZX%H=;g%{wtm>y=pXMd<2aZC(CP#mW%KQy;eHk8pA*o z6?|gz`&DpVX3==KS*cm~$zu>rp%lDWPS~!*BWGd`D<2n_jky{5h5L2!Xe&+ErV2dDAF7j-xhCePi-U{`NkuTnHFX%gAOwv%^c!Dy zaJwu2;ZpimU#9IlXj@t1)5&MKTdiwI!5f9@>FFcL)7Na`04`C9Fq@aa?J3`IIZVh- z{%QR?IaDL0HJBeRMeCkeL;iY;PCYJV)bWCY6H*N*a~kZ@#)JksEy=?9!Y1~puu68- zhI_tg$=oA{H`Wiv4i0Oq+4!6D55EM!E@f4&_q!)VgydiHc+L2u)l-=zLtX4*N7@RuyuaKzfe{MVHB zN+J`%{pxtP{tjY(LT3#CBHZnkKYG-)cydjcZ`#(w0$R40TYx=;N#SYdFsZ@6Dp0UL zJDG)DNg1(6Wo1Z7)r3Pf7uv*6PHNiqegTZ3j1mmx<+sidWtVEY9l$nS;eRMD>kM$| zkSp5kjnI2X6&yVK=w0s9>*hvTF&J@Bh-y7%x`)nTS6Jbsd}s*ArT@Hu|I;Cm2c^lKy$C?EH9b!p~;?l0A0sHKft2ih^Ym15C() znnlpH5Jzxy3`ple*fB2o8WKWRvqdlLKN_iGFebZ8Zg{^U=X7=?<@&=|=<19hL-S~2 zIa5g9bkmz1-c*k8cgtYnI8r%E+=i1V_E$~NE8Y%7qgxbE*JR5_mx+oLINQnh#Y7ok zktO9@LbA4^zATgu6#h77%!0>wh8-qB`l>F&SLPj?xDCuB!iAN>a|fk}62|t(uz^t6 zIJTnKk%@JN3s~3nvegzXS|C+DsXi-GrMzXcsG;BBR2eLgiav$Gp=Vs#>jI^hfS1&*_b$Ud99+ zTet{ea}GbWnk^paMJly6JD|dRZ7VbF2{EfhiVa8dEVeNc#gGH~$URC^g~V-Rk#5Sk zGM7kN0{GdNO?2Wviuk*C6DFfQOSdo3ilGQv3|-VUWNiftCQqBKw-nA>6s=K1S%mcPSZ0lT0qG^b_-xCWSSF$V~mTLyUSnQ)rZUcr=RnCSZ53}PZ zz`+R|M+A70U_8O^rZO>IK!h)2R~-$?!nWk6-jok6WB?U^X4S zl@bUFlo-!=`jSY$K4;URtBM4j%$c`tBuc?LmN$|fh>4#&thH*u zLx0V`1wfE5MZ9q5`s9jf|I6OY zs=?G4xGRJ0*_aeZ)K?fKVJrEM^! zwMe#di9I^9x=134Jc-HLSD2oFw|$)7zJont;Lo&{e{hDN)ja!d7ho?WUW&dle0VOx zG?*4HXtek|fWb(SaCO>dWU&#W$WK@fiGzUI0&OzrpBuRomJloKJU5J&w#HH!JqqZs zO!b}CemY812gDaoEYuLdgDMpJnxpIdr!-NEcswtkIY&hS3TC}4Dx%wJ8U#6Ksquvs ztjAC02>1p6E!QIgnEdycaS1N&y{bBGj0rv@0Om8?#n(Yk4+~uj`SuIgFzdP)87dB| z7?f3}WXh<_%^41GN@z??R1H!avj;Sndeak{bz6ErEM+g{XS?Rp0bd`_i!1e8xfDP% zoX1xy>U*!MO4}-jeZ_O7NNnzXGj=r`y>~jL6Kp!}NY#3?it?f+zP?wBRKwq@!?gW8$Hk6W#Vv{L+hQJ` zSaZHg{Y*q{Y*<23PN^Z!9Xe4pG z3!*}Adp%7t*2tecCt9m6%%F(+C%5zkfb8|QC8ye8V3?O*HF3;O;*LENg zQ4#valcr>_2c*<{_dK(j!pVi|Lm0E%;2WkwfCw9kUz4=CcR;3T2R%|eXO$Z!`;y+gPvz)l52PH`wsoz2by;!4RR%H-vD2G?gne;kcGWi-JD}_M z=7prb$2E35pWv=*Gdz~Wb3^Wxy_KdWYKaLXH`~qmpQHG&68nF?TqZ)hg2~ZhdoaDH z?3nA#m53kyC(;%)kR(ruAu3CGq#12wnDXR*P)rajqs`s()8ioAGj}|4vk$#))2j60 zDH&*_x_nZy&rrL?uJqKoq0r?kXJ*V!UkA(a(DM!6*=R|(R7V7op^$&rezXtd@Swaj zx3F#*TtscTzPBHG_tb*e01?^Zm0cm9UY#+2){ImXJ&_OE9SfS+@-fW=F!k>iqYv+Q zD$6oRUsRcEku*VAJVYP)Le<00L&4Xm2(w`xog8O}ESA6W$7NB=d?ORrCmjCJc`_uPrt7P2+Qok?wX8|0${@wcph8>+-~v^ z@B&Nbs~Ya3Wzt*xa6JOqrKUD^c;3V?j#ZbeV0L2Xvg2VehsBx<5h%Kbwv7^@I{Ea;A|P_hxR94Eh6XJxVVCbsh5=W&v#;Y$1R}hIM?&XnI`zS#j3J>vwb68R6iKUOpLV~0^<&-48*@qZkwTd7D{MU+Vq6?*yzo%!!tW)ldU?p>(woW|B%bcPyL7zB?a?Vo-6l%ntH%2*#$FP%N%7$KVE~ca%PPK zic6^QxgQkVr$V`e-~5k~`!eLIwyp*JMUbSixBi!`8!NbCsH0;fLreeFg|N(X^|%iS zw*5Ae4ij>0z?N>^`lLg+(5?d(vpSs)_TR*v| z6T5B4^gzQF6Q-dvzrFOHS9eo?+>vK+cH-{9 z1lW~{a!iLNk%wS*xeMj_!UA(}hG>acP*pW4-j*w=J-bu0uOX}^o24K)I|$^&Odt3& zuC)vGz~qC6%$Q!#??7A(5nHcN| z_`iWwM%^-1;NU1q8#;O4Yvx|km`SM35702=>m(%Px3+HO$6!0yPd)-s7T+frCAa3- zWe1U{+%AK@Fd|kRnv1KZaChg9nf&tO+bQzbw-&&^83}Zx`p2?2=`! z8cpVz?{HQq*+_(fULGSFdP*^@Sct{z*;K$c9Y3O2YJAwx#gIL7;oG{lzPkzGfCGPG zilc&1jukbaqU2-+zO!P%XJE)NA75A_2gxjiWt3TC6#!s8#YJYtXBM4qqNKmW;OJQl z8Ny|8MC#wb6nF_y?CAsq=il`Ygt8EoxCFd(7g`>A1<7XJ7Ls&tLN41<;eB4?0JPvY z*h~94nSk0&W+Wx6LqCYGiCiX;WQv2~(n>tdNgnVM zm)6*eZ8LUAo+z%fkfc?I2k(R*C4~)K1_7`!BMuc^8xLWBRUhs3(l&^J1(ivy&5M6L zIA(I|*B+FVwh{AnTz~`*>1DBbep(*#oMWBoBWiIRRx8NqdU#$L0xA?j5Q8ec5oY5! zG={K7SP4TVD$U$Ga$nopzoEz}I9sNePg%Kzef|%%EAi;tY03g(xA&>c#t{qA<9KbF z!KTZDY{&MuNvBnR+rlI7a=85UQ+ZMpeqU3=?hq)Yt)|V)-qCb;aeVq({Hz{e4Pb3f zM&?p7g9^4zt4c2x%((mnT}{7_FZaq+1*zM}MVa5t+7S?6!?UlMz3yy>=mFp4-VFB^ zV!tlxKW~G1D;xQ*(%x3;O^*rMeJAdeKZA4v#&ewYS+sw>KY6dR2LVq^#Irvula?mvZ02mw7W-`CK;l6>3z(PXd6L@;j@xUDoI$O;#HYf3DUU<$QA$De ztwzbfpQB)9qMV@K4sTTtwe+U9+6SU)dh3;!BwpHMb2jBtRY$T&m25$#t);sX^#WjO3%ku>d%={pGsZ{7tVtjdKgH_-#9% zWMv`d#_`vSp|TpY-T7o&Hu?%P)pItS9QGDTWis~XRQeXItw6bBkPUXZ-{>zBinN&*@yqepb!XA>mg^iK&*9?tKd8W}3r;-i zLY7_5CFsx3gOy3QKZE?xAcxwnU)`zv4)05?kva1Ksd^=371^c+`tJEFPGMihlVGQXGAKl>yIaL4PW^E?HgeqMn zq8LcZ5^E3RCP$Q0dAq~(^1x0o*N?!(`i0vfzz?$q#{sk9m5O?K0h-1O; z5(zo&LjrY3$1t5H@X$S*zgL-sZpDcDeb}3ik62|09}Wj{GMsh&>$&yQs?)nV?tWh3*@`^@G0&-*0Yp zEB-LhT!pOh*0y(5m0iyT!{{6<55)c4Sc5!)N+>S_nzg8l$LzVxs#@^fMhvv02~Z`lGWDY}$86+^kPKiO z={5JMoP44oQs{*B2+}&KnDq2H-i{^!(+=#HFiF2NJ0A4x#?j;kY~=U2z_%5l^M&^r-IG?f@#FE#qu~F&rK$gm`DKKE!wy6LxevtaE}X zhB4{=gSfQgiiuqn(j4LDi8B8mz;zx9_baa)vLJ7erOUSKM1V*yEy{u-X)9F%wj3IuGTLI58{bo+I z9Ez&0R`bo?0ScK=fkITasa_o^Jk3aMEC#a%-VuyOPc0N4Y@?IOYHj2wfZu1D>9u6gimY$S?&C4J=7een$CF_B69ZX{e0f+; zkHAmc#*u+(`8mQ35Ee2Sav428Y=Y$Ks{OGl|5TDH`D6*i3HdL$&z_Bw%*vMcOt_V5 zzhU`*_TDq9$#z>8enRL~K#(2;6a)kT=_P=#6cs^~q9CB4fOL@(S`ZXO=|~YUK|uiv zi1Z>wFcgK*1nE_Zw9rCH29aXOC~3KMZ4%C(m=2d0+FI*PL@U ziJNGYAK%y)s(=|u(cx@~xT)GuuZTV$b>v%257ia9@^;{Y-|uX4srrv;aJx{d%-zT7OldzWQau=f&Og(H1};4IkntUx_>25>ZF>&c5mF9N z=<&p}eu33RYO?jcJRe0fhkBl>Vp9*ApqKe1d<&nM&YmrQ@uqCKRUl?##=lElWQmj2^29Q%Yd` zaq@y04aoIQ-`7!MiAUn%$0rO{mO&ulIG~7m`&7;|Ej!Ru*@?>N&2ckTshBqX_m0DG ze)?rzAyva63_WmAIsORbPkaD1`Myf~AW=UnssXzBAu5|L44qp>1PWdZWPHzlQYz?c zyc%FXCRG@cTQqg@b>!3S^~rXx*&YyF=9| zMys)#4y6@q`8Tp%vrjRncJxFl=^utvbf*jaRRntI2SY;AT2dn9n$~yM*Z;(cP;y#JdoO~bKx+*w&oY+XTs&e zyP)^uom0BpnsOMgOG{#2(O%mC0c9{slI!8In5WDMvClxY02(LUXL%LTaJ=9=BS$mD z4C>^XyfGNo-l*?Ds+nKfdek7EB6=Efa1>RY?|B?kI}d*`pWFe!n?H~EGT@=h<+fWpPw7DWGLfZ z{RSvLXS{^CXuoTCjjiTZieZ}9vuLglh>FhGJm}dHIU0m`{vrFrWGI3;+Ju+h^u)2~ z_@Srrn3tha`X2n{X}k#mp>o}?`7NS3H}bjm^U0roGZZe2>> z>B2zz-Gx^l{KviPXEJ!cvX@2{1e(A*zQJNf2+nO>FWX)q;;uoivGA2aScku+cu{A1yFMniwz^B+l%zk0Z{%7&lwS!+d)%^dQGj8c?OFZ zRh3JL`T@}WTuU)?IQ>PT*H~A#2MsEKrLar9eStrAUzlI6*3%(O6 zpsWk_DQscI6q`6V6v-wTm*HWr&5{`ClW1ULPS?xIa#nyt`hqAZ$D}ur2`cF)o&9j$ z^<&|xGPCueW(~-TH8=T zzK663k!RuJw>>So-+mxzOA4ka&M$+mXTm4S6mPrw;IE}9`?V$mL9QTdr>6aRefuAy zS9Q5EPY5j8l)xksxU}OrjznWhezLdt=?z&8PlobJ?wCqdipk-;`m&zwL%|>AFwNlj zdUe%~&ewbBy=8TjaQTBn+`6aaMngs7AKj~u6geMjKzwbb-9-b1>Cw_aS1lYvEPT}+ zsWPO$eUz|+RxK_M|73TibP5Mhvy&v;okp9Ub`m$Qhj%G)DD-Uye=ejLToaRR-s=5@Cp_%*%jq!W= zf4ILbk58Ki)s17!PPXL>ZaWXi$h4(20k8Rm3+o9wpMgdjr~?>8R=l_L z;X<1lrVmVqf{GiZl-E8<2JPIWd2F6Tee~ zs#{s~h2xy(qJJ=F^M`J{9;|9sDg4m+^B{5#4udS0DN4tiHxk>@UGFm&4LW8?B_xHe0? z)E`%2cJDJc?9`_(=iE9(O9vP9(q@16?vjCb%arCXD5k{CvnKu&K$c9w*`nT@yS)kV zj3S9zu5y6Ae<)SLWr}LP(CbQXM|u`IJe!5&| zo6$V<#7+6B!WQ%^h+gM&qIWDj*6UoM21+Uk;|YqzRDbW4qZW$P%>Otf3h#6P0p>q$ zIy4@qUsy?hAV+~sP|ciJaoAi5M^3dU{_>V0|L9x8QRe^YYQ+!_AQ6) z@|*K`bB@W^l;?_s@;!RL7DcNjfbB~$jA}xe)DGVcW%n0{UG|!#JKf5PYZ>Md0Ob>( z9R4+6r{<{9&1)5FLWQE!Fo6zbQne}UwPohtRxesxuLoMs^UY5g>9x{cx3;Z}cT!6m zS|?WDS7{*T%z7}su_^}{xUcNZsSo+%)Yg}beu$YKfoH~FRLO!(@lwOeN7I$Y6ScAj zLj^hGoK1d2x#JnrJWVUV7ZNHr5$#wlKB`^2_F~Ky0v6!u!x2 zPvra<__=(m`i2PB>{#_PaeL{ZJQ(U*ZtZWlUG0yvQQBVH(q!H!DB^E2vrGGQ7$)e95x zmlkoeS`6Gl?j26x*8<%@lePQT2^%L zKD0RpqYLll%xH)+mI4o*i)AQ8&Zp6J%Tj=>JxKV?0Ld9?S+~2f4{e{daJDT4Q)&%f z!pv$oc(B`P0z#u8kN1}ONwWAf&R+S;)7`V-fZ}W1_Q428Y{duc1nbXUDu1|se5LG$ z{@FF~O(S#SklfFFp0(ZC)%+~NYJ_~mil1jtjXenPqgmar9$9jr^X!32*#54qr`MN-rW$Ecex)m}u%Gw#oU=c_jplzPUcOuvQUWzh2q3m8 zDjRDbkk3zTD~liBM%aSj37D>ser=M5h{8QPdKqSRJ|FyV7L#_zdP&cZn@rDcp#oV&3TTA5!d4`8A2SR zd@{dI{k}8JI+oSzCmMtz#Rv=P4mNIr+U#KZe|cxiv13bE<|>%&@l$=Fl5xy4Mr+q2 z-Rpi;Q|{$9YFEoujc-s%gQ=lKFilU7I{49owV_U7f7nb87TA5T4=r>;LGlb>Ro0Qg zM(NKH;!%$xxf^?o+ym29M|e_pSGEX;JXmY8(=niDp)uxT8muu*I!6A$ceBJ>K)W9P669x zbG~@{hRh+UkyFCcr!IgeqidY#dk|sOQ^U-?wu5|c#QIR+q-aiEO}t$h7(NIzwxuWC zrRlaGWya1L2eY(I$b@CuYnh`#B}yVpl*I=Vs2{gr8Y2+5;}4c?0a$quYur9$ghMZ2 z!E_{%c!}fJ?I{DlAFc4kvYBm(C+pOj5AwM(iKZyr-_Y0t$e4(+C zm@K)hb@?26a}H9onthVVW`x}fFz;TIo5ci>51ipR5|`@Uvf=W5~dotyKz3-1Ac^>eYgP3&53S#|4g>u zkCQ~=mu9-i-wS33th;P|6SK4foq>gsA{!qX16!fTgv+?k%?wBbj=Z$iCYW`d6|f*vFQZ+twhpfuw%8LX6<^pasgQRdb>v*24xIdTh-Mm_n;!DC;aZt zUoitVbQ9@*#Pi;i#Jd5BV0}S1=k6SF&it^Pp+gD$ zHuA&wXX%Dtxi_p~AdjVA|3`+or}liMLR??+y_8Yj<-2re8Sazkpd>>X5up%`5BkF$ zsNG)dzz;gtETvNnHrV%&j@Qd)$xx-_mD!_elv06OgoS#FXQHPcHRV|r6Nj_bojg<} z6#ZZ!Ve?IMWBt^gI2{i@^Y$~UwX*L5kJCX$3qCMgge;4TuY%GnIkZA!+ zv-Hq?hMr!Z{0u#Fh_?d%=Yocdb()$F_mD?lAa^&J{GAk*mEC%HfTiotg8V%gzY>eW zJ)=@s`1}Vr;6=XjYtic|f1VQSjcqpCt!e#XC2Kuv-pF8OW%M!vC6#t0RmP<5W z;aylmZ~juPQI{^P7+b}_B2^BS=7c~7gm&6Mt~Y~60QTS}{{EA3cEKq; zwtY6^D-&@&dF~I}y?}Us*CqafM&(t66fappn;pR@*pRYaK&$cu-0m6w3jivC1G^54 z8xH}^s5P=yXgs{Wv;4}K4p&&WGIKDLzU)T3>Ls!1mQt+IFX`{7C!)uc7(X6hAzaQXn`+r zfO?UAz?>M#ci;Mu_5p4)B=a5j^$IRYN+A9Is$3Aa9&++V0zgUt6L=Bay<^U}2Os^7 z2o%OWb@n0S=}?8=Xz(Y&rU$#kMh-tf&G5PjR+WZch50n_H8fd70Ftdj_W&puDyqdy z7gxR-jrfg8WW$IY7MDbk8(`i(X3;5!hYR9JnCUaolu}NAK||#HEOyua_XW`tv^cqZ zA>=%m(_7~>SdH28maWlvW(3{`QmkPZ@R4R-O;Z3OFOwjqYUr0WJb{D-KL;DXAb}Xv z*W8D^`}ZNC94)wDsQ|2#9tkk21%cjww}GALRiNI z03uAWk$|lrKcJX;hkN|ZnIJnPiG>hEc zgip+MSY2)IS+%Lk3|yiK=wgMW_|L7jxI9N5kWRnt$}CK^p?dr9XyNEHad)r7~ir& zL{r*QDTg>KqrS_t*?aVWp~`|&4#B_%aQ4OhAppD&GyWiTQ$s$CJbHHQ;Af#A4HG)@ zh1TZ1QwO_aA0iYsn6SRe^dlh;ydOplNSVV&dNmpaoEmYm)M|&1uy^yRgTWoVq)l@e z`oDkv3uACt@w&&wuA0Inra1BT4tzp}3N8TtOA1Nh^k4-()ETfkTY>HBq-j@f3ebBi z7tyw>dTcS8QNc%p7Ou&5F*#TJpI<`qQ{>}&ZhC9#=hgA@D239XT;6)IH9a-BHs6K` z>}q=Iqm39zV39YxzK$F|a%&F7$f_3m$%(2)=v1f7amysrXoY+7q9T-Ip;Tpa(92Lp ziU0fOKR*WW&~31hPSufIzz(xuyJ7E@H>&UKh-AB{rlG{?%D)XViXHkgV+quBa zIJvx>v)i(CeTGn)29=029wLb&b-+WNER2=w<+xmQtO+EH08UzllaAp zQg|=YRsD?IXA@0NnpZwvPMi4UmpHRXOJG4m3@lZ|C&ldfBagirwR>aoD zlR{<&X+0O|;IX0RpZ@9h|JgPF&;1`<9K-K)%_Yh{;ur}pN^@x_(S_~b?n&C$8E=04 z`tZzE#{RkfC{v@XP1c&4P^o(;uLfaAx5xUtfSl|oibpbbILQ(X#>VgW*cgMcQ99|d zxZ<}LyUY}mHuBQ|;U3_KICHu4_sIU2epv$TKDQ5Ta24%C^}q|}Lzdq|?=t?_hsJf$ zF%V*?3b{w)iY6RI;tF&$luy0uTWdaTv}L2joLf$90-wvGdJIWM5|_cztYd{B(;Kg5 zF~u9Re3MeFDQf2o?vDvSvD9#Q7jjr{bYn8|lJ;z>LrvSgQNL>h*o4n+6Jxvd=xJW$ zb~@DC<6|Xg&Uy0TH}5=Bga56CULpN6eeXMA7cl;AFPsaG#qy-CXhd`AyBlUvF-%lG zyj`B6`qHCn^}{XA3K6U}^S}GpJLI-e4Nw5-0AvD=OHOURboaORZ$97pP#Nok@Rw!& z6cFAr7x29bk~nyb-+#XK*68OrU#9o)$d0v{%c_<=(_-S)>$6}sz8Z1gOTH=n>5YsR z3A{&ErJ=*H^0SbR58Clo@lvX%)k?>hl6FQ>Kkx42SQaaz%4avqdruxuuKlGiUo1q1 zoTx_rV)S7&-}j!U;LENzc+{x}4N$p*?CW2WDvEAOGq)nfOiI$tN%;b8-vS9B8uqvS zVVFQ&?{OLb*G5G23!gG^ zH11@LA>ClbTF%p8d>UtXm>$qfPj0~G5M0QaBm0mWns{!O%&-rM2FLA{>_c5{+Cp__ zAyi~jjPcU7&z2~4tuI)tuiwfQO*znhyoi9qjRt=GT4_I`S%yDR&R*{6=|AVcnZ=sj zj9!zlMGqnOcuJ=|jTKA2D%N~FBe~Zf^Y{Z;X|NTa4*y7;ubLSCj%eb{s$A8v9Wx-I zL)c?rWC?iEKJ+l|9U5O6w+Dbd+05NPu(@;WLdsyyrNOp=reEV{hu15}pP*lNLGIJy zLfWc$%*hS)2Is?wJI~R51#1{vDAqUrE}O?W=;uB(VZVS_UjiUC2;d&B>yZ)UtB!@Y zPhxLZ+3q+jvr_8(s_mFDD(#TPYHpOQ29hXS)C}JtZK2*=R9eEF($9ZGVPAc+pqzN^ zj?f>M%d|R*M1#n3u89WYuz794f4^l=Sh94mJtg=ZdL?onn#_au2y;qH%811pqNB2P zABTh{#dTB!TTRM;IsIYYx2Pjq=z3FhgR$=2G`e1&tt{qkEwNwc*%@@dVGW}AnjtvM`08ZFb-@{THa`%Iw+ov_L4=iEefD{IRw2QAB%IH)XA;Gox? zbx6`G>&*3YV&6HsnG~?{22*z%!=vv80$mzkYSXjmdS_p+cW2&OPZ!@?%(1WY#;BRk zhLH_XlWb*sA5}P82Flw^6{>x`6N70WK$#JWHsf}P!1~l8_MyiGTA^x2;<&1mgx6o} zJ!rDea{E4Ge{$DwmRZxKTSGD5=6ds6nV+aL)<)7t>zg1|~w67d|j}I#bmHYI2qdbs^)q+LA5DOsjTpc_Km7L-?lC__y-KNR9@;(^${=E z)Xaltbd_eba}cbr){(l3)DdB{cvSzJ*;_a?5d~4sVz}(1;$G zZy~u!<3q##VFW|T9Z!AA8-eg*VaJ(zaWTjAAEn!VZ;WoQ8K34}U;W6fUW`{yb6{2F z!B1!l>^=NG8@c&tSquO17c*t~o+$s*Sbe$|?SW?>Zq{Zy3XWuA95@_Z+(cExgxEB7 zve=Y?o$>;9sy%Lx2<(&*f>59sx3hDwO@Id$mBo6|zwg8|4yV~m-3E;`NTYg0_owg% zPb0Sp7YTQfL|%)K>4bA1c7Cj}KyF>Y<%0bbQdm|0wH4#$sC91x*PW5t-eT$p?`LLJ zc@yCApfvchk#gS$To+>VMh+}zIy~-zeXVA(_Tde;+v|VS9O3q$Y`9)*9=~5fr4XGc zH6)DTPcwHx*{2^T74p+w9tx9y?P>(C1qK=Wp{j;JDH3cRAYRwOj&wY*znjo9zM=Xrry=#N zf4M>T@f4=09GA4BDxRrhl?~&WvS%=*vlOXS>%r+!qsl~nE>hlIpdY$N9_L`CpA1#U zQ=wa407LDD+^{#_gZ!JLG|qTUGBt?3{n>!n&AZF`?LIaG%1pqx-0Yti%~#B5)#$Xs z<6112@fyoF;#H>Ykq;qn;t>lBqEEDfr1dpVfmGQU?QW_nAQY*%#C@c1Hzw21_bw{l z@cH|a=<#@ejZNB6@xM7iGaBBL{4uN@^o#wjTk2))@j;(nd9TImw1}C|J)aApA%0SL zm5AfkKqVUdxYFT;nN`t*y~T(Tp3j6w`R%>_Uib6OQGT`H<*$jJsvu#kJ4mhn`wKqB zM!jh4>LQnw_pG;;@kJyXu8NIoVIgb2x+_~b5uF!%_GV#Nb#W}P{w8!u4J^d+d}#xR zs+x=!iN?=9sHbR!Z@lH3xgVVMEh?ZWIZ4Ypq0DSGda44aZz2J&*hWwC7EYw#D3eMl z(x~J5$FF)av~c>WO(8FxeZTh{P`pJW4PM3k zHTm!*{Dmo?IuEE{9V0(JGDgJjLwYA6lugDf+dIWP?@s1Cj~tcyTxgR+03aB6KlH|r z>gK>PkdXcRbhH0#!`*jpg?;e)=vA=W>by~TUQg{*=rj!qioXQx&Z)U?IJHk>e~Bho zz8x=EoO%u0JPeyAoC$va>pTaqOyTi4hol)-)}L`%dy=@yEL$f95u10n9Dsl1mK^HRm8k{B4n7*nqs|tG83D@BdEr>I7h|wmZW|X>slQ)7UH|} zk$NPy0^fVd1??DEx9I0-?TTBG)FyfNfQH`?2GpQ`d&gv+&$YatN%SnYGJXBYR(N#6 zf{i#j7104NVC(QFdf@L-^ZNbQxx)OBJhVc@5o3ghLJ=;RpRFG=!eR2GE z4&)k};6Btc77Rk@z>m1ruR|!L+{6;Uf=Ol zl6-s=d@C!Gq5}|F6zBsOc$tFhnyp(4T9NW-t#-d#@6)hZ$caG}+y=-$(b)Pn4y9$g8NSPPCnLBEH0C2~VvTDrB);!P?fQPOQ5~bOA#T@&b^7<)H&hHXb?i z#dmg}nky{<(BS)wfvH{`eKk0b@x^!j9#*cd4>O!&A zS26j*5&s`=S%=xRg^J!9j7ku$6uxQ{RqW8Hn3jCM)}ny%B8z`Qg(T%0<=nYsYxT}e z`s$z=r>(I$L~h}qpIar%|2v)g(chmgz8L19kpd}d6O^2EqCzbWxtFn z&ACcF-13M%FF>}FKATP4zP<^DWlYa0cb4dUyB~P>de7USZxE4X`g<>C_fnrz^prPL zaQ+|%zcQbN`ZH_l@tlvbZ|^1mDxf#oZ_&H?Tql8fU4Jse()23#3BR33?{sU~AGNGJ zhE_BH&=GmKwFij8t|vA~ZNb+Wgqht*j;*ar14X2wH#_gg3h3eGx&g$3;?K2!y?@}n zWIK;_Ch6gKZWX(ajLsqhLKOfKv3A;r*4M#=2j>E>%ElH9W<(9w>tDNErJGG#F4EQI z^dkq@ELkgg*jde0O)rBhrW!2A=dX2{0F;9QI%%pTphEh(+KOHevFe(nZO-E;h2#K)dg#tPp{GTE}TD5qkhTdvqR;oCIO&^ z5M7Rw8vaKO18shu#umS_hwF;(w^#a?m(}}|@f!UKr~5ftPru*7=t(z&2H$iq(Vt6; zNkU^w3>45uPE(2HIl8hxmt;fv-tcSCbx`ls$(TmH?k_Po`EzTqDPhoXZZ>MAKoP;} zbi^+amDEa%X)j@(s25;WgPD;Nxrj^Y!|TCpe3y%#;4m3@DnX=B1!j%Lp9$A^+r0HygNbpZy{sQp#1<- z4J`!gH~IZ}eik3VptE%0coUZ%jU&{3qud`OCQlqq_FFbQ{Fd!`oNy;C@TC8D-J+nT zNq;GF(M<>_jl>V8GNWu(n$8+=4O;yqZj@os`?9FqsqdEP7qfoXfkVe2W-Bh@F~Sh4)GPx|Q9qsbu) zxG$C=pz$!fwgC3QJGV^)``|Sj{TZF(dtWP}CwOA@HMI*Jv)ClIAKH7-J({Ub(fnY& zw3gwiUiLD)`4axUFM`eE{VZWxDZ#KR&~5b2lBoOaP1QO7jVu!{_h z&ZKI=&p6j3>OrrES(oy%R(rh{u17WwEuhG%EIFp`m?6LPYJ;K5AF%;KniBBbzu~4e zV}$W(%!#UpKNDU)aZHJBdUNY=ChI^JYtO_|OpTC0#Ight?_J&Q*0emhNGC`ql>B0H zh(AWZG1WK4YbU9-_|7t1#pgG53jazz7VMGhAKU%=Fxpyn%bAj>_f-9L))&FyDl3aF z4rzAQ#Nn0wl6iwG2ciLRfwtXlpla zFm;i%TDMZLR5_b#Z9>O3ZoI?4h+fL2Jh`*E3r%aFU)}-c30b^EUXl7Xw*F>H-K#h5 zjDF~8BYiy*n)t9TX=W!-%g1%fH-Ph(1ibLw@8k5v|8tz)?4&y~9vsrF6L~4HFTMn0 zkt*Pl66D^Y&g|eLQf0htxr-!|Y|&!?M|?4%yL2PiBX6MD&r$pt^|QetgARt@+X%aw z8?mCH1K5XFDir?~>YbjS8}1a_i!69U2!9RWL54yG#MovRhknYb3{|VP2)&GZWYI_P zzw2V?Fr?WrcEUHQ+65#L3Iufi*PriYs77ORtq7&FhD4KDSsJ;baoOE)?Qrz{<~E&8 zvQ%6an;k)%{Foa_>;Y&|(W4V)b7NKXGRgcyiqJ)Tk>l*hbJBOV3tn-!P-QJX=VB}8 z;xTo-n>9l}ABubauohUo{`?=vVmjFBWy%dSv40EOA%Ct@~hj-;j}ww47x$CxgO`*u#RcY^X8g8D$PFl|!150#;JwZ1vUwo!Ziv=mh_PUm0TeTJ69h_d` zCIs^i0CdG>K?rS++v}_EH;vBmS}fQ2zImta`9}$C9s|hT1YOdBB5CeRp*q3((D3%_ zHvUK1>Bq+^#Z1~l^fUl9(0mr+8b1tM6rw)(bTefTJ%@YWgyB7&csS+2wR!Z8={Sm4 zuvnW7m>mKmwACUWF~gNUYB>{CyMSwWN*sSYK|Viq{k%cHyQ|xHiPOuWl9uR$!8o;< z#pu1AxZWgmsE;xrz%Y&C4|*l4o-+&D6XhL`<*L!*Z`!=r1ze8&SVbh1 z<(iT$latKtgzuEDG9{+cQ&X>LQCs4Kf}{ys@)zV%Dg~UV=<9R`%faK%RdW*LOmAE4 zLvqHXTOiimkN)o!NwxD|0SCnj$#Eab<_^^W(S*NmweJfXu==3CuK|g_JVz%Tata9y zW(1{$xUTUDV?1K*IvYR*w1*;s^s z8TeF+S>4Ed^Fnf1=#{~Z4mCLT&ZZH%DJ29!LJo;OC+`gfy`tmy%AAh7qz&*ap=blW zLj)BpPRe(q1izaW!sn{`E~&#R=i60K+&V9Sq&w0 zaYa$PW1dznESLqlOF39bwZ9iqS)pwfD||AS{COW*Dx|dPf3Dkkj2@ER!-T5uPHKtU zTI)DEIv>uks~npt==5`w(AU)0dJ2mpj9ma0ZHwZIFS4H=0v4^=dNmGL9rM5D=F2pY zljo6#g0mOBIxu=8qj&BPtOKx&`Zs%{^B;_t)dp;wYh?p!osAgePM@@#hzeP6X}ID$oxX-V;-5s} zU}_=f5wA61rS492j8~4d3hMJ@u{KW3#GqbZ9Uit3PbR*vwzo6uceeARQt8m)>Ob(G zxtf9uKh%7pw37sLw)izhMz7o=&fZptdy&P8ZVX|m>0MzXK2R9E*1WZqA2TF81yr2W z%zG|pa3QSaJ@1At^`QNz{nu~$iStTkd+`1;R7HZCF@y!v^r7qA5=Ci+$Ubg5&mZsf zR0}U>G2=OYU3TW&L+{eIG9+jaB}j{IDTUdS-defT_p7eiOt0GS~~Dun#vpmOkB-LYbmJ zuFtd{B4JO6ic^EU&O5))q`w=Xe>>2S#RJ`qTKc% zs@kc+G-3Gub)0z}#~I1l)(1Z=gMYgH((jDsgW~?a6Ria}+uh{bJ*4B?>U;oFRrfp# zSyM*(kaJQ1Tis`)oJPAZAU3#wIj9H06h3Ivm}`!y(7Br3{z$?;l$WSym!Am8vr2ep zKY0DxITgD$+Wiap&b<`BC$>xD`Jm%jYD(_`B%jw3p9}aqRBNaV*8Mp z5;PX7ur{3#<^^I#rTvQMrIbrb>o5}h2+6vUS>%adH5jkxt!%xnLdEI^kK5Q4gZjmb z4E&GictMLI&2SRuMM^WJm4w`7Bd?8)jgIuIF5s`N?m6n!bjh-OnnYKNgcog7FNrZv zfqpwOrR}D~_LDM&vne$AHvCp4{#!t~X!NY$o7u=3+2Dc;5A?;j9n}l2%4Zj98;rCx z3j&C+@L)Ujl|mzJwuc*!FXK^Sq>5cu1siqg$>kVTcH z<Co3zo6ss^MiI9hkQqdY-DZ3SR0j+3nk)yZgJ%b- zgC&{lUED&#PBdsA4O9nB0kv=GBZ#|(BFm4dr0!`vglp2-Bm>n!*YONfzPCFGXFY)G zz|nOm{6u3Q=*Zjm*W(BfFn>9XdSMG2CNLYFDNRlQt~^A>T8mNX;8s-gX;_>~!JQOy4`&y&hKZ7v8M?Y7@T-nddi~&B-^O{VM zaCBmDA^lZy#^ALv{9%^3WZUI+?9)vfV-;LrSGH4s`p9^BvGG=pov%1`OcOkM{62uH zO;-HDYIyyU;<&G2NcB6*z+4?TgWvyZ-BEk@{?WR#hW}6Ga((X~5coe3kN*es8C>_L z>UIvx)-7Qv(bu5$eF($?#{P^2SXmTy(xn9%^isaRI~JK#ym%EXgOJTLJp%TtZ!rR# z{(%h^=}R>UUr*xNiwq*~xW(s+33+D{vsi5#ueqw#R=e4Ic1xQs58DeXGaB>Kb$==d z)$qo}pK+Va_~AAQ6brX|V|4R&4ijkn_-Ip|CuNzOi(O6}o6etELLCbpt`vwz&8Y!d z4SriU2ppE$Al4Hj4r0mrBTtU%ob;>@Ckz}GWJN0k_<>k%Sy z_plz*JGJKf(DMwHc>8><*P$x^*(WRr|5t$B2b|r?btI2R%NlR%O0K;glXA`Iy8Gn$ zr>a;lZNcWoA%1@iRhHpPOPE=|^r5Jc(jTm9teR5xW?iS39NQ?J7V84j({Tp=|%=*{|`LN98(=dJuzwZ#;$WXwP&M zG~7H5OP#FaYJAZ+J(M<6>roZSoWEt<0i$*hl4{==Q8ryYo@2&m_{w85m6Q9H4Xd&e ztJX3N6fhH$eRVG7=5J6FI!aDdK1A!4NaMKuAA{8y1cxHB)P>>3^dgBnBzR!#lgUOi z57{Ma+v9a`sy^U?)X_s;Yi_vq=&1q}i0}yp|HpVl|NjL<3H=Knw~8%d*Ky#UU)_57 zUOD3Uujv0=*j2&7i$u`*VeycG~8`ZbjZU5?t?Ncv=9GjsI|a$k~_vxDNZTB z_*O4}hy#SmcxEv(B@UWX!dka!QQ50T?@mZRIb2O{GCyid-{j!;C1B*AtA75OLQ9*o zKhb{|m9uKpD=Qsy*h-`1_OZ(nIe_wo3@7Tit+qqHHFGD=7>n-?)OPBTIpb2Pnc=Zg z2%BDjqJ5%P$XN0Ap*8oBXbC}2K#XN|2Kj*LY`C%ad=_O@i$)$ms=;8Fo;f%iw*|EC zgrCjHe#F4sp`UI^_GN^<_fW4pW8kn#-5%h#J6>6`AV{-S{be+RYy zuE+(#P|k~eK|ouIR&VRTKTqYw1+dRaW(u)&o}9UQ*(mzs28X&D5g@*6&KIm!g*mH1Tccg-vjZ+x(zmPPxD+Jy;+n@=Neh37!c4fVvC%;D#5Lfuj7Q9HM@&ip z_F>K7#nY{6zCzlh*FzvAHBbDFWgnVFy3-$qxjzQwy&71)on&dc1eA8u31?2Mv;Zp! zh{=M7M0_dww$4m|-Qs*) zuH>Yv^kVs5Y&TQz_Q+2?5C(QcfFg8V+PnzD<2hSbk_FL0$niC6M;Zz1x3RTwEnbJE zM2e;660k4_C*w-}VVL0ehtHSsW!H4Piq(UDG%n$-9p@ze>0MY*J_8qhjtd;#q4N|j zvE7TH4Wp4)?x2xW@N6R2ZB0P>P^Xk}`&v<&oq_tzeJvZP-xT8V^Z(n)Vie4KZg(r5 zaCPSc6qLJMqD4F#FTC#)NB`vU)MFW~KFoxR6?<(lq&4)z3IW!OUQ6lFI*Ifaf5?Ty zIv5+=yS+C|f6+HjfYn$hebYcgk#YO#J*g*AVo$bTMN`|auZ#;!b%$z%FwB%a13F2A zTeelJMdpGPBA;~G9`T)Ve(U8mqwh^NFUt27n2HcSF@-6C&Fq%FQYy?pAM9E5?&Fka z^r0F^rBy3=0mD&L(Nm-9?2JOBY6Nb{jC-ZtR28@<5~}!H{YhxIGRiRMeN|N9G-z2= zfitwx)^K3EwsGF}QMz@C$FW|82DPazN#&JR3ir6Ds%mo?AQ4uU3pI4!8F~?ew=Pd@ zc^*|A%<@##q;>gmT2O(+uOHUfaX@%#jZ}W+>;XM+#lDwH+>24NT6hTJ0Q?W^xTo*q z3^ohd!War}Te?kx`0#ShovU#nP!^ki7`@vI@P7b%f&b&u4}o%jyY4i5o3+m!kBv-Y ztl6i$-Nbo?S%Husi;eA{Mj^T8`IM?B1UbH04UB@Kv7%PBThj>@4|s6~p#9)J#84iI zo;$h^#XA6@g3HZ+dQf%JMg7x=V*kr)X%@rH3q7P<;y@08XtS>XwwUTbyvj~pU9>-S| z4LJPP*NG80Q_UA#0jvtkpa#qR8e@+f6mjtB`XhwLQj(xKB!Sj%vwi5vOv^@VC ze0m>%&&T1v!6&x{&E&meprg3`LaT+199ZmjC2mE*-9EfkR1lC)6!GrauI1BTuTr`+ zszaP=9jxWcPlE6(7P#!n<2=Wcz9jsF)D)$$c|25|jlthauQy25 z*$?Fi_@uq|nTig<(1S2#bpW((92M|XS?wve#773#E!4x%LB9v00^jiwN{H1RlxFUh z7G9y5vhDT`E&3aZ{!P~XcY*i+fBwbX?yX(a|Kg8NSK<0t8G|9_XE?7Pv+z8{RuE&N%6T;L9;|X=h1PN|=~!3d`HZHH0jr;89r}!#pjj=UAU0G^5zpvcHQlOM4@86N z7?wBq&Y-67O#8<(AZmRT-D|rK9jha$K|i``m|o5QNSq6<^%^zXp|gqVd9i|d##4a# zGtWmsv7HL2h&eVLScHNqMP20`d7JGsS+n|qH_P~y0x~dnPJ-wUkW)m~@Y=t;kg8`W znL3Zr-FDw zxvHt!GBAFBIzeVEffpzPU9*5@#43B>%Y9GPZ?tS=u_7s4se5OE&uIn1CE0&{0o>`o zyj9Ss(}OH3@t4+EQEwYrUmD6R(v$}^DK-BW3@_uf;tCZ|MlEmx*{0Kumit&z%K@18 z1I63s{^?m)*hzc4GRtzI^QFzZ``HItwTKH-DvRFtd-TjA383=`*CkF`%a zy1rinRT>`(H47wdTj>8KHY9NS6=4Q9X-7O_dNrTa^>OFz&}#$HI@18+8$yu6!`qj= z-WZD6HWhtlkLyhI+A*PXzG3f?d$sxZ&O{AXN8t>`lklc4sr@xfd&TdJ(R*)7|$gip`O{WQV7 zKjwK^brsO97_HjC`!{aCGAn|tnK0LTWZcB@q!_r)@AL2VZQDCHzo}{wdTzbvis9qS z5_jkQn<^vEyH#zb>(Qr63hRa4ciyOFWVWBnP(I77H+@lLceM%d{8%^U>QBHk41smO zXdQG@+d~`g2W!2G*?|3ITUV48UO7sFkC^9x%`&w!<$G I*8jf=07cFD82|tP literal 0 HcmV?d00001 diff --git a/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0041.jpeg b/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0041.jpeg new file mode 100644 index 0000000000000000000000000000000000000000..7159d71611ed80831cd489dbf1b17abff8db1508 GIT binary patch literal 641490 zcmeF&eOQxMwkYuZCV>EAP)Rk|Bw(Qu+o=*@Yyt#=Rsv}G(tZqeEDBS=X_XK_2nB+) zI>W>uLaSwBhy?^^Myah9tfEkrKxZW3qhiGmBxpcN5ecBk$6d6YbIyI9^T)mS&wCcn z!}t~gy!pL*uf5jVd**J;-CMF~LuT@OgoK5?MC=G5j)XU+X&as|9ag5*MeVPLxXD}{45LpHNmGhT(jZdJMigD`{nn{ zzvjqZ8gYVuAH!$ze#XzglLmj_9}GfX_ue4f$OdpUfy0_>%6^$UVdI8Kd)EayuAH>>*MDuBI|djWThr2CZ)N-E4<;} zN!i|C-pMAoouj{AZ-MI<{`AXT&i{VTU#}D9|NKEFftw><-a;?>!C&6hR(L(**XtCx zriy=g4F&I%R{YEF|7iZ}hkm(M!!_;K>(cq3Jw*TccWU9<=GU)R!Zj1V{%>)+|8i~r z>-GHYe0YdS{xvTKuP6Fn?@j3CMd|d&9a-aL)dfDgy`Lsv{^XL1SH=%yLp1;3;e=PDOa-*o`2g+LjN_kz`*@)W09@Rf5svfT>sZtMA3ek0ZZZQv|q0uJI?>!|NQ&^o@sM8 z=6c9SglS`AYr|mL+S=MNnRW}7I4*E-Sg>@_;)P3|U&eiY`Lg9MycI8b@LUCMF3Vr^ zeo^q7SG>HuxbD6p-_@_aw8m>S#k8|qV87rwN5|(@^Oy5i|6hO1U7iM{j4o zAZL5$7ay~8Uw69cxtR7x9zHu#_u4xwUB-d;v(i(r>LuUbu61s{F9Z~veETzIIY^pj5yANj28^NPx<-+27H-5Nzug~0n|AA#-@Zsp=G3)rm8AhVp8w;P=NidkJCaHCbZP~mEvD#;skxiPkq#qyF?}%! zB2Qb!D~F{E%EHOEB)!dp(!}d3R`LeIH6>Bv&sB@c7Lpg)MB6b|Mg)B-2LTz`D7ATo zyh2kg)y&jTs|3_SHaR+=C+&AsENc`k#zbwDk}CcgQeVA8`doc|vxyF*t>woFWXwYC0j zQ(cE@?FScx@-^O}rwe*cZk;0^1^?;kuAtFdCR*ON?p^`6^ij0V{chP&wx%CuE^{}lf__YW?MpEKOPdVj08 zN`6!Iaz_*`T57{4=OdIc!-1vbJZsf!iKaf4$HsVf8?N)vomyFC*jIA5gGyx~`7!P7 z4{UYCC;uu-+t1$_cG+=~e`#IdC*SwTzP!6;h3733gEf%axR<#td1Cm1=XW2?Y#_3< zlj;w8#{cohF2XG?-$_0npl#pxa_RRU{BY{e(S^jgc4Q!V`R)8csiV;A_>XV{pRbji zI3JP4rnbM}(RG57)X=)t25xtA?5jO~_$+mkm2I;5xHF^pPfd+KzVzmSlO~_l{_m;Q z3{Gxax7Rjo=8xN+`*igCe|qsst-hr0osW(^m$LT2<MOW8wt$YWIMqWEEl_th*H<8wz9yAS_{ohWvjBjrkR ztzPw9-&c2n-fdI)4qsl{95nTPO>y04+SD9>g_P$TW&dt&BPHT+0BDZ*b$tD$+n7W45t#)3QFP zv-#%6Der$cktN;=>5_MPWZc5dPF919;kV0s?)9&_P<)mA_T&#=tG+uH`LAql*ShaN z`{Hd|x5=R3*l5*zrpBkxOHy5B6=eS#@bK6`L_^|2K$(-(Hn50yi=e5yCj&)w1!q%#Oli#h(8LIP|8waa z@%m?c?Y69$Rk7_=H(pCk5H>S+$CBDtzRW+HVhFTo^QQiG_R8z$J~^$baMMI~s9yTh zmkDpKJ3Vv#J%)Lz?Ajl_wtupDA2X?qq6cj|GJA&lEN8ddpO1aQ7q>8<&(1iN`sCaW z7Zy7%o~yarrt(^C9(nv>MYZDXI`;?M-gvIb_N+Ski|7Bmx%K2b8#ZimDxK(O39pjf zH&-1rET`E;8}I#|r3*N>$L7u+?e;7;tpmBS&+yNSVT^JiW8bcIk5hyE>Nq9C4n5yb zIubp}0UeXg6T5{eytRv`Gex|aJmYWE*PhvVweOz%kaRj{%Eb6Q?%_kNHc_{KuxRx2 z?_Rvs{_)Lqk1hw*c27kyQg1%pqgrhzct`7)nzya%;J3}ahpxD}2L9Nqc7^D>z_?K_N_m!+dnke<<$1onpaBW zR=nXU=k)SFe1BY%_`6^`d&a9@Fr4nsW-H#ZbNp-U4o;#x`i@F`eqhqa zH;#8pJ8Ce${pGPW2dWHlS4w%_=(s-keAe@+YnOUx zGv>%p_LfUK6)UKKv!0$8Wv7CgRjwXwqf6h6THq+(d&TM+UUtRpPE)~$7e>``UC_3i z0oq4@J@!e+l^2a6+b$%IwIzQU-n1aFrJqBHqI|2=clGYLCCtW z)3nu2cW!GRNphM_UJG1$Xz;?HQZAp~Rqft=_o>OYfBDD%8v1j5Qh(@$is(w7YHnyRhx^kdmb6^5{lG zgb7;*SGa2mK$!Kn+X}*_^;I9_stq%$xW5`*`I0m z*=qie@+S_*d=pb#SN}K_#q92$<&7qH2W(Kp|E>9hv!T;}eeKZWH=Zr>`aG*JKlB#w zcV7sP-rn$=SkIzY4&7Maw;<;0iF=ziI|={r$@ZuP;-cKkZZBTkafR2j|CZifwtqU| z=I{G`GEd&@|2^|e@e6-`b)83u-IKrUyZMiEq)2<{`TU4W9n*^RJm0Us8Ls;H!g+^p zPyb_|)9riTekB|G@CP?t<%!-^GBS9kzIH|Xk9}Q<8~@UuzqD@)XGth~SDcrZee1wW z-$UL+%_mRP)>EJS!OXevM*7;S6FnO%zW(sY(%;@{@c)=W8C}Fik9bEaaow3wkCpEK zc(-!)_g}o_;JhSUey)B)v zC;h>PYwzCd*ks%}_QH|AsLeYU{muL2*;da7zkl-dyM1m8GpGOPy8rzS=GT9Xey2Xy z)Y($FiMAtw{OtqJ_iTTp1$*vXyYy?TUHaR;Z&zGD<$mtc!WCKZQGaVZ&+cUf{=R~1 zKK|g@75dx{b0c%pTQ_27ASxMD1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2 zD1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVe zfC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch z0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2 zD1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVe zfC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch z0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2 zD1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZVefC4Ch0w{n2D1ZY0g9S#0s01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWA zpa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S> z01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW0 z3ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWA zpa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S> z01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW0 z3ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZTIMhXOt1BjV(+VBrFL zr-hCS7cX*Jvdq(E*>le=^LoL9>nZjQ@E3W%x;9Am&g((z-+XQDtC82@Tot%W6QL& zcUV9X`Y+#u|NZ5=|K*#D2#uoCX$-oJ4TAx{CI`Mp7>jL|tPlpA9HpM;<;qQuo?BEM3NNS^nZmPr<5}e1E(4Rgu_l{c9USWT9cCqmRd|;}erlo=z=>Tc$A> zbOv+&mMPjE_@pmp*sKuRE(wliZkId1;GJ*BURQkVbd$ZiPt2p|cBHmDEcNic_u}aM zUH^R3|7Z8S_y6gp|Kpzjbjo5Yb$!GF*flOQs7_qsZr zUrOT4zIn^Od!6tuE#Iv9Ve%STNk+=V%z_cAt*v|}iR;*>TBcQwhH2(VQ7jQXD18*# zCY0JYHRpx<6wf4)mFk2^JzZnX2ryBm@un(CUbD)X$r&nhiV$Ce=eqH3$u;7cTe`S8 z+eEb`@w>>mN%PYVXYDf+t+>0)k9CF=oD~d&WLx!wmBpMRwV_OR$wDF+kesj~YqpL) zao8l6F6ib3Ic!Ri3{cWGsj>6AI=;lOU6drP&F*s}cN_Eg>vZVi|{L*h9G z#-3}8=RD-sh-<`|`1~8rt$LzPpG~VC@X9vP;vZjgG2bvzdzaO%RFAfb+CX+$ar?&{ zF{f8kO7@>h6<6pC0Z}yd3Cl@dbEf67o~}37_%5Laud8VO{JjUqJ>g5W-1qWtP+AkM zpp&jCJ>q(jxJ>v`+9;}p?i*hOZ`!O)pRCYi>PdQE>G)N$f+v((f07Ikw?0ilk1wf= z*E)w}y62auqbS|2RN*ylYD7-vh;%`#tH58jbG_*U!*!K&Exn_ZpDL0x)R1w>vnZiD zO;4<5iT|Wtttl8PBYp|GiaUGlEFLBr)hrL5&Kfetk{HEgQC_UKDByvLCA=#+0f-JM zN_h`EK{GQZWpc)?6{l=v<~u5qW!xsd#MeH-^}l{ zRV}ODJrp8rzT;=2aYqMbyS`NsYQi?Z)qE<6Y#-z1{;om`H_}-h&WyNYC7&72u33MT zFu7yKvmBRx`LNVhJR%F>nAwDu!tx2RlQjFAs6$k9NJx-_uw+uj6A*WeUx#{@=OeZ6 zQJcJ-*u-g5Z+w|}5}sSy$VO|>HUaTuO!~&<8fZW2XXe5Z6O zT~E)y>#L2wm_!CQ_J}Q~1e85zBs{d2Ui=isyh+kDJwS##G#6kja$RT7@@j}{ws>%( zq4wQa;`Fbupm$G_6ot{HPqk>u*%(Xf;?z!-d$kg=x0!7%Fs75h;r$6Q}Id{edTG^2gE=cbCrd=ix<$@3$UZ_4}} zH=Zrc7S%l#J^chJW6OsqwJ5@CGQSzz3!2W{rTf$zmfu)7#M9 z#WMo3oNUXRx>I_jl}%FYD=x*umv{@AB}R+Xfs>uPrC?BJC5n<}ujEMg(Lt%5pFG$w zM`|GAb9a=+@e4+WrOZd;WNOD@ml(y}{6ez(Lso8S<+JfYDHEQok1iA9e#Y=nVX&0y zo$!^ndNvTcOMi!@#;T_m7$Ls8Y`YLeY(r$etcHC8id`$8YQL-3(>a_fk^B9|sWKvR zTYOz3)-#%T8&>sl_duXbRP!6MD%8p;6Iu44*~eNTX6m}g9t-nfl3`bn1BqyukGslo z_!BC776fVEKj3|zb3Hsq5*~>)$CJj;8Z+OTG-@h-s%UXy?m{S>MV>nIyTNG{D$mkQyH=A zE2!(a5k-}U+ga-1F69M9-yS~EW}8z(9!dt`dLpX+QrSfK>)Q%YoLBDtW+8Vx>*^h#V6X~g*BYu&a;vl{n&N3X2o_j$>_2kQPlc+FbxP9wDx-tmg|T>KdESVQsDmoFhkxz=iQ?e~i40_-hr`itbYjiHq@`j059DB(5^(J$*@e zMCqjn0p-*7y5~-^Cn7~U6(hgvOQM1xQoVPW!*s8XB@xryj`3%Dn6c`(_A-)s(3_LE zXYksqQ&H5-Qu7xO&m%70u{yUw5)tQfbq#NesK5Uj28DxgR<@A->KNex=c%zo6CTJ z%6xWvW>w(P$XMd@c*-@_JIj|qetL`L1euEab*)`nsAxjZn3`bMb1aWw#w;%Cn^~1< zRFX|rhP6UnqPuv8==U*}M$VB~ z)gm)D@nKSLVo8U}@!qA&4Xg9Mkq^W4i4u@giaGL7Pxo*xZq<)A8XE|&iC!xe40gj1 z+B9w$l3Ls$7wG3Y#>K$Gvfv*9?HWOu=`~KK) zSpHCXiU1aEB}+_)wRd%<+iaXdRj*@`U<|HcbFU2x8Ngrxq z7u1mQ`w+SnWwy+`4i(i|Zjm-;TYL$Z-ZYTZ7CNrCY4dXkZnRYqZ*QmY%}tUT$|wI; z^sNa!Js>YuK#KSCD=; z=kdGJeMxGjZK=-Bs%JEZWXBZMHjtGYx#PYUa^W%SR%OECCD-{y4@p@qX2~}l8H2F4 zPsQ{nYLY0zwWy<%qf&Z$z>ZS>4O^8nTVB7%&_G-)RU{1paJ|JXdrWpuYD+aoN>2DZ z&X+RTsX<2xH^cUDT|9i5n@|>UZUWw6=Sea)Qbx8a#wRPy5HO9~pR{+Mm{QEsvC{$} zk5`XsV-lfQVf`fU<|PYZojm5-Am%}&Q=4ShAn*r1^^I4LmPdw@HyB!4kaOWCh%3`F zv0Hjo<6t?|SVLt{UqNbc)zwl)ycDt*RIDE=YBxWDnQit+vqHA4fkk^CtVTRPMYue_ zV4QiDaMM?ooF%KCep3e7Pj5&3MM(S8hC?9zol=C2yIdt}f(}=CZiA(2YBWUNp`tm( z+Kr49k{G3SEUZ910c*~M=;p{>0^-{c3V$hMjyTvs=EbkC7}t|_c%i5gvdV#lYd*BV z0;}=qG{vwYl|*QW-I@lGA@B?z(wN1az8{E~JY^JR+*X>XeowW?anf)-znh!uOX??W zHDQ($dIC{hb0)(ghC0EY>bbd>B_D(kc$r_AM6V5smJ*(Kl4lvQ6>3dPUf49uhCS0Z zt8S}8dzB^{YuZ{N8k9OP*AI0RhrFEt^;;8r4P@7}nLZ}HZskE%rV9i}d6)T8xLwmo zhiZ{_);IkpSeW0p1X|_l@-Fxt7shAP8X~_FT3huDCR23HMYK{y^K%Xl_SA#qO0kID z0N3kZMJS{WVeazxJ95lzNyHP@@x(985R|ov+-Dt5kCUjZrq+sr zu31Rb#v0u;w`&{}ccDlW89iyL?DX+TJ)>$sIlG0#4}|zBqn|ubE$WRg+??OtSx5p$ zt&*96mbPUuhtei}kJ^w`oWSykq=znwtHjMklB{U*g!sSc)SU|bNO+iMomp!CFq8l7 zVa?hpsK&Gh72|rYvlUj+<|wVzy)gKoJw$zSE8{AM&T$#nQ~Q_;&%4(>u29Pho|;IG zpB$dJ8H#?1U6ud27$vM2%`C}@5HG0#c(Qe85;bQ?no(Tg4mZSIl(R~qlI=P}ln3)( z+7ve?CFc6(B$wW)NxhAG9Zc%Xp&td5fBO_jW$#dDC;AfCt?`el0+vJs`H@ZSV3eA3 zLiiAZoSjwgSXV89=hM<;TXH;%4%2B3BVHm&rd-!U?#(;>q@#R$3*>@^$c|E|Z>VLp z?{*X^$=dPOW`SpMK_OX}6%aE=oS0BWk(5y8P@&!P!LVQo%eF1ZOM}{d(|d>O=p5^a zl=&Nl+T_J?mO9)z&?99`R2tTIMbYA?liGX+VBSgukaN&9SLm>C(Af=Rg(Tv3X2nwz zZJeFPh;JZVNB$X>?lV1Oe0Ixws%5p^Cwb?fRIv~Dxv6pWdw>ihub-Wetf7{&N5~pK@a9yc&i{Sb7{-e=bj-cES6lB zs(|3@sEkrT3UTRvh))a8&qT>0O39u{y*Xex@wZm6Nt6;Ms=9Uf%jbBDICZB9XvII`llWUZQ6qH>TL< zS{r9JKy4Z>EZAqR^Cf8vxj`Nr2$dqs?5iCE`~{6-!@Xeh7(~}8ZF@nNb+C*O&o3<3 zOGc-Y$kbSP&`ZQ;b_1gy0Bm+wxiRZRR)|A3{676Q!Y&6jhG>(>SS`<1mKaU2LI)&6;Thl8azFzt`j82DS z4T^Kc-A6!E>-sUXnro?r6g{%-u!p0We}qjEQWeJf3P#58pcL-!0Yq&6(G!YKhpc@n zdb2GnyUtbHn0@JN|Z$K3rdD7}?2a;bxouFgps>`*z2oAYMU5+?gI z^ft4CeEkm8!nPjh9v`{8Ag*Z%+hPpGjY5F*Gty#LzP#vBT8mNPEmS3%%tb~POtEI-s{=-+d5)=^o<%WNT`oUCp zv|8BDd6>0O8W5*v>zQ{OxJy#9_w=b)VF8ZgRsBmcp^&?sueo5dW2C@rr$3RmK__Vl z#;+=LP3Q@o#u`l_|tweVw&Z1R8 zt028ODAul7%qA6X3CeN(RDN-1;2Cl$ZAx2E9v*DmN#08U5T`u8Fop-U2-mZOMFvIo`L=%!Qsv+)R>D3=1y-Z-_^TxGNs z7KG@64iy{vC8FbIDE~EnCw`)m^x)vR6J`LrKoP zpdp;0=^DZu9|?Aq`aoOGh8IaMB*J1t)e7kCFgM@aDB3GI7woJNcTK0pjja z9vl+6`ED5r9@3oQU$m06ACpFa$F+((KP+<78on~#he{Jx72#9p2WzBNK~NAblJ|^O z`z-yy;4%?qLwtASL*32SeG~;{d4Tg!c!=G@5CP$TTT*wHFnfIKpME_GGdJRgtIVD< z663UW`FaBcO70vfhL`m&XUSWKpz9;eoY1FcvPpb#$sNsKIw5k7jqBqBEWA-ECFVNj z#kyo1vg=RR6NO4tF#s*0aD_$$3u%i)U=~BMUwxvn;%ghi)W&-jeW&Iom}roU=tsLx zU+}p*zJCn-UZ!z zO6@rMB#OB^7K#V)^eT?xNuP?HZ>-%V;BcR1K$;x*2s(H!fu)46T^_@t=xlOi;)PPP z8(-H&KD$OQhFdc&f*zAUj8A<^X#b9qI@c<~*r8FeKw{fDz0*H0`I3a%^=idc&)W4+ zgeMz$BQ<>GOkJ@iZ*WnX=cicLjt$ze5ABTnXda|H){*jl*vR} zTqZ0JmfA*So7h^dT*{1S)>z5nqS$Ty=D=z;`E1C*jxT?tr{5S2?u8^lAbC>Bn~@Dk zZHZ^0@R0w!P$x9W7{$#nmi+cKUFDeJu@@4L3@Ef~xKK1QEO*p$qo+tur>lHIB*A*R zL{w2AC7c$cS?1!F%O-N2qEH;5mP>8DQ<(2tE9!Eg^?X-X9=;9|r!bF_TVql~N~9Sk z8#gMqlr&Hr{<*8J`yqjkRQhX#H&u($XG=4#!MhIkgUx_~f{o30`(UT$U1fA9=PKd2 z#Uu_%N$;3;PsNztrne+Ne%g;(HO|UW`@nl(3g;1bO=Pegdk&=kl zJqtXS#H`O~-teMOzkHsnxJPBWg)HZ8a7C8%Lx5AiPxYujXhXpO7) zRKOpd;K{0(cVAG^?A>)qmVozEtlLkkERZ!Ox@%TJbnU5H9csP{(ddHW8eKdD-R^>i z*(TZ6G(AHssevN)mScxD?hjwep`*?^ca?5}SLkFr124@Fg9-gE9z4paTwg~kWWmsw zdm6)XnqL5H54&>$pdYlp=V{DoG5;3)x`2eDZ&mPy7-`cBS9ri&@GsmNrwRN_Pgeb8 zH*7fR;x<|=wG-xFFWP1$x$J6{zloN2fD0j|sPpE0Z;UQ^;yEajFd-Qd4VO8+yOYSx z42z_RUBD)}`_GbIt81HT;RUreFWgpHAZ40IWbQB$oi2}69AlR6B%-Ea8UGAPi0@%@ zp`!F3HJC$Kkugc6bz8%MHMMz4BF)ep@y&?85k<2Gup|Qu_YFRV%9!b|OwBnT6R(4o z3gHU}I}Sex5;7pkvM)F@+Nf7fz+2}D4+@8aTZW+9LAY^`azZP4c_)dWpWkl^32b<_ zL$$CWw23n;^-#vc&*{R<<=-BzfsUzP6fK<_-6e;Pkk@Tw$q1Ew7B?c zUq{!SWNWX+FD9N@I|LPkb*dvL?}8&V5d&r^lprPMz-8RT04> zuv1s~?zY@_A+Eec(NyvulPH;|Mz`-U!)($<(YPAx5_>Mci&UGURQ zuxChi+=i@-?B>Dwvj8o+tnHqSg zXip-!dGQM}eL~EH97L-%(@MAyB^mt>O5hcP8=L(hC81Sl zy#}9}e7?_|&!ux=3+s`X3!y=GS*L8*Wzh#9(shjYvMxgAJd>oSGtw$NxRw&g#ko1F z!k)^+u$dCEw{#}YkF4+PC?sB4SGnadMm(QS_BRn9m|F8aUYLPS+cxkbBfnL#6eA;%*PynHCwQXWg}Cd5#D?3L_3CEt$3zh??d% z=;Wr{v^8}WDHF002EFAPG;9sxs~jsE>bz>U7J4b>i=ru$yj}ru?E2yP-Ml?KfyW#f zuc1QT*ncX!Bf}U@e1?ldQnDaj^Es5l@@v=4*z+JK>ST)bzTGIgr9Cx6e-v5b9an{hWf3LTWPlHj3!u31Ja>LP|G2s;jrB)cI~wJ&Z^tp&T-84|`?^yMbHlsD0LvZiL3IBxleT8k5WJ26Uvri&78S zyJh_36Q&+*^Mj|R9&Gia)+RkYu)ei&Kn`19np^$Q4Pg3`{oT6cJR{T`TZ=kM9ZTx@7oqIfHS%;UJR;Kn z(n+#1#Q%XRShR~-TSZ(ix4Oj_`WQcSQWE>2vLG0)f%?L2w;<%uIZRHaM?Cb5wP~<5 za~$@h-MP7G=k#>9+z+FR8!giiWnvD#x(Ir%tsJ+Kfh6;&FNtRq?R1)NxfNKeUKxTo z@`+VEWr9p3a+e|;_L=y5OVzN)LN5>Z@+pF;;8uR*Ye~*3fB2S#8T6;Vf_=t-2}q4B zGX4cai<4WaUGz{7Y_{$Nqx!Dz*0z5VKX8`+~x=C!l6A&BK*qMLm}dj(kS(&;4s*HBcIoK2<_b%?bwKz1}5jw8q zLK?p`ou+cG+ZipT27Kl_2X?*lBY$?w{%Q!#)?H`h8viJoJfivSdy$)N?9y9~{d zZhf3ClT8$AT~~!m6T~e@Hnt`fN?NZHre5S4%iIL1{#YWOZ-@O|6YpEdz=tCaO+E?Tfs`Udt3c zgnf_zI5fd3a5K>q#_bW!VRSB>u6X2ACW74pmKd4?yf2}ia53)Q)IPl*VxK7w_9|{b zImzm(hA{Qe>DY9#UL%EI#H_m?5@9QX`qn+@0AtiNdPha0m1z05lUyUeQ&CNA(rYU- zr=FQ;M=RCo5``4%qs~tUhh-!uX9IEAlB#$b6UT3;q2Ai1fRg=WG6pM;ES6t#ChfmB1|y7Zjb)!-1soZC1jUZM)b| z({o{3KY9b^bt1ii!4km<9$K91gu0zzJV+*E9jJ9q{kPUJL;5V z5gjrSmSk0qeE`hX_cX#uSeLjNRi4^n2=pS@@U&>4{GRnqpbTcIZRXU3o?j0QHkOxA zM$(`V))woIo~v{W3CXR1BonsAoSUhkZrB#})8*7OD_{mM5rzs&+TehI;n|i7*c6ha zuDX@7P78INudeH!O3`G9mfAlqT5qj5Pz~L89g(ccTnLSpj|xojh5iZi-Nvgtaj%K^ z9THW*{BYEEjH=`IK`^xE^eRR|Qu0_xZeWn>L!gmLsxhGGdZ}hV%)8>^JB7qvKj{V|Xq``I15>jf zj(>GF@LKfr4)xxg8cNaZ{rU<|C=ik%%VH0^X+GD0Go;ETftNlZ{AM_vqq}ic0BuG` zYL@*Oq$agprl6pmFdqVz2gCBVI;-`;C(-3EMjxTtY~@mW_2X4uUa48bQsR*o@GJ)Q z7Rbbdk~--19t!P^VnsqP*G_h#YN9Q37;+oM%>s~>P`KBpMCuS}{wYZkIC&37md-0V*s#7E z!fY`{?;7T5)4juPH)lqOm?wca)qz&%XlaMtUdo>6YrV#BFM#$~*+4qNRzU zj&3zrq*N8pr;O~fpFLB3dj9CcWF;E3%`c zcs^t9J6sDpbVS5Fr3kxQNNiU{LJC2hE>WAuDx08m+A8lXTu5Ap6D&7NT}z)$MbYK5 z-B5w9Y7?r>4XarpskQ>j4mQw-jS#e0-nVNSEs)0*bc^*91iEGQ1F~3Y;QLXugZ|Ft zv3}|Edz5WHWkMsj9QuI_-VeqIK^uXrkjZHho39sLfw499UcPaD9hGA@K~g_oQ~VDWa*46aCK6%pM;emU=Si z(4nu}3n}jY+V!wU^G&v?O1`hfxG9SGSuUOAY?b%a`Vwt{A%3G)uG5v0^u|Wl@U0~p z1suAP1iL+p?wsHLOAAm>nQjak1Fd99)_73WDmUE~C`JPJ9p|i6VrhFg=5g-m`lBBH^J(*nj%ykt}*BB&)X=?q@P{WhqVG2AeUg3c^2Au&+r3 zLyiG?swe-r%JEtGNe404`AHP*%a1)08&*ohkb-fkf+sQRcz5V99%)OKaRQv7V4P*w zx47k&lKA^ZF@Er?P7HZS zigL&NhNr70*>4Qaz_BYh<03u7A;WTA(~v|(WVu(Lq%-Cl<(uMp_0t1w-a;vv(ws?^ zdBS9p>o&N!t3qK9n(iWAS+va$(%pbDofY;!xbf#Ip^(#9*cZ(1zU1!*8V`mVeFcLf zcfg1VyEmQP$CF4vuflqb{umZ&&r7lXqp-a*5H>g!CH8=HAdYX%=y!t2m(Gr}efky5 z%!XvYN6>=UAuOa|S?Va(c!@1A&<)#Wj~78H*1Uz4v*A_UXHXkOtS#Ey?dv$pgF07| zEQc00++tnqS~Cp6%z%k1k@4N{R3yW~<)*V$Fxu8|XG?WgM&UWkk?#tMppQTKm1d!* z8P48aKU}>P+Bv=aE_odsRph1kgylo{b`G+6-G%t7m9SOQc@p+#>7)8FV-;B)a=AIoy*zxAiPo!e z3OwR_6f!&}_t~>{sE-u+IvW>PsetqyhUSs|ElpC3`(CI@T8*1@H!9sG;V7of;N6oe zlc-cDXv4#n-qR>JOnFE3^N|i4kB=Ki29kg7=r9>_t>PLJ>PxN3272~#sTIoL0VTaH z*&ZgUZBCoO6LL)d=<@k}va;y%QP?s~SX>aMRlsqFgMN>aTDS@bYvr*N?2pHD+5*pf z^el=t*=QcE4B7*2i)AMUpT?AYD&oLf^K^e)QQHlLs|UZbJlyGab1Lj_!$QrCzYE78 z)T3%sm>V#dJJkKl$!5co2kIGUF9vxH8qV??2FQ>s#)fdmVSVQ&LUjRKxJr9}xen@J z<@3i=AT9Pel$y6VY<_7N?JlaK!8xR>uzl&eP1g+D4sb#?MA?^b8f(jhZV@+rCFxctEt%Eo7^Y0dYr=lC){U@9x*9gc^mzkYdKYvnUD$Tb-=Tp*INRq|u>Ql>( z@oGp+@YR={ts-a>RgRU#DDM}2ErVvdG+^?_g`S&!g438Jm@Z)khdVJDy=i91cfHqY zBEJ}vot}m~$t>HYbe1mYIh7s7a?pV8CUwW7z=koY!!nOV?qns$+By#Fr*A^4?FK?- z;cVs|6-ylSE%dZhIgry>n>ZEf;u7ecZqk<7$`AVO(bF>`%(prcG}F-4u$5$Ye9H5A z0!OQi(H$jY(7A0Hl5LxT5?`H~)CT!8Dd1WfRy{q}0-Jt5#%gnjbP!JD^Rk_;bd0so zREtvlXW8_}<3UG>3!~ufP>5{3R{5UF)+&L)3f)tMNvAd0aoXUpGjYF|B~b)JXg;$2 z8U$u9CN$E6=>9E?27K&*q_HyP4NzMSAZPEx}jScp6{b!jsT@HuO7R3i}bCR0eire74 zR&Q@SR}t2g2xt2WnVaGss2tvHEQ6HomIHJ;$b29nqM>U7NRC)`lBo_&{rkl_sh*7K zx{i3lvsuI2z@<9XaBla!qKhZ5HCsut9QZEb~wNG)$rTwi)CkF7 z3T1#KvCgx~6Jm^h=t)$Vhlz*-tuV?4gk>!66gxPM>E+)q57SOTG@c+GPB6w+4c#e$ ze1=!k8%13uihQYUUYnpt2btI6Lb-*}Ukl5Ct$zm>&LcouK|9HauOcgN<&Vhn@^%u6 zUUQAR6eg;3{e#00OdX-4)s_WE6+}atBzL+Rb|QOt0}?MU*p=pali{xRVvQf1hE%62 zHWqD%chFO;lfwy9Voc5Mm?JFj99WJSQ=?NIj*3G`lu|cGHbtdm$3+qUIpTA(q}J1y zRPe<7(()^CjDjIQBp=*3{o3Qm`IL-&Xb_ug*>O6tJ8!V}s|YKA{eJHntJ|mQ%@0rX(A}>${fV09ZxiJ=ru80Em`~&DVDHe&Aj_XoM4M0qQZ? z`<78C@HPD=TagvAU`OR**`_5d{|r4P*FBkVsWk4oJ^#m;z6#xR6g6yM=Pmu|4B79= z4E(l~pOXVQg~-5o@JUcW}Hp%y5!0=$hA= z1qWXzg2n*rH5Y#Q0m8F)xkgv>&p{(Q@UzA$59jj70!n1oc9it+B(Om6!mR8?!g4rS zpe~GIZiP)Zveq2%zc_mPuqLl`UwDN84_HjJWbYD90v5)=oOx9U*Gv);2zp5XkuT@$ znb+YUQ``l-+$|v}5DF%wt$R2ze0H=<2yp_Udmn3iMk`E3p&bZxZ%Fuff%=Ii8e&95 z5>Q0u{bl~@bybDrdDdF@egD2z5qI-WDiI^g))D8D3GGe5+y^bh5aPu+W9KUGuoU_} zQ2T2kOR2@AKN8qON~IIXto69Qwcw<$$PT>I%t**)fYJz|TUboMQOv@@u=Q`C?@Ey$ z|4`qHMXBx>`FSEb?U$Bp?W?G09-0`vJ}QF{Ip(&O~`@R<6csF28t@ngZ&5z zrnuoU{|cIskdWrhyAxEtak1OfIR=#D^*2rFLJ8Ka!j;a z?4oCw4pv9ZqOZ)C4g{~a^~ zq~!qIZ{shO%JV(uFgq}y)3adtu(zz-Dl=3rwCd&Q#ur&hjX-V$^@3M6D9y~Emna^1s9%$Hy^-#SnzgrIg z_g)HFkHJzmse6R$`*%Wh_}){&kjMnP0sb$&=~Z@|xCzhsW^0e=v5HZr-ck*17e4IH z%<*)Ucg{xB>4Q5Z9Z)o$v$)glK%Ar)aixfrRtTg{c+FR`AYGMs!@poV(fUaWy=DMd zX=7p!tBm=H@2pu)S@a>_Ze!x7+1sBC_k*rZsupfsRlmWoaGg)x*E>K9u*YO(e+UBT zkzJhC_d!2c4<8zq(traQJd5daj-lfi$yj+ClRWIu2M~Ua!NY4cOF~e94NKbBcv^JG zJd#INR?HXBKgTu=MhHa@FVa+A=TfKR1hf`>J-+Xg!pP@^f0U+~=t7CzHd%%C%r11N z%X3g;{1pA`^G{8xD+;z6a4L8#Q>&d^Nc^`E;Se-bY^iF4 z4X;qfMm&^!Y)E@Hr1Op~+a=jFSHqBAebkXnWR{Sa>bKQ6R=G?eHx@KI{W771LxSW( zE+1>jO(-p1{L$ujv?H^66O(6POcCZ4mrR0#L8?d>(~TH+f;mfryCtOhBx+bgZJoXy zynr94rm`#I3l4tpM*anjt_GXisi}v>D^OLW%TBrfceo)o{nn@$OE{!#LXl+*{M55` zCTwvpCtNW+Z1VpvUhR?m!02bG)>n`SgUfrQS9!TOT~}9K=?SHK_LCSs+hr7g&=U2p z$t#jzf5PlU)MwWAn5pkW%%6jNX_hXrDxWDuHI74 zzsM?oCeEgfJ%@&oPH!U=y`|wW!1bFLe-$Z0>+ivEt7vZPF_JSyYW|vgA_~YzX{MZ2 z8)(F$|D|#mF(M8Q&VuYJ z*C>gZo3p#qur#@+S%A_nh#&o_jhxI>PPCl2sC=p+IcDR4NdLM$Dn+UQ8}+1Qe3V2B zT@!WCkY}Ycj{tzQ<_xc}d`nRzv4LsIE4ucMK_5+{-16gCD{J+8Dq*YKVeZB&8*!(I z*8qeEt;fV_I>vAy&zb9tbipyk0*Ri_df9 z_#@~H*5=8YW4ta!frF|nB`co(06n01#d*60tj9Zbzfo5{QCFdq{4Cl6(x)_wWd<9= zey3^jLWvmPk;5F*l__xe%(SfK`X=2Q`e=H@isguk?5VwV27N)_87u6)L2%Z}&C**y`JCNsS9n5$xEQAbXRk2>wiuaY0#j5 zE!_Fp@tT@JsN5K7ojF#DMND(NbMgNqh0K?na8CBoL+KKXkpn`42?JvCLPTeDzvXL~-*0mht4<=Kf_fwse@BjwTwXd>&6Gjim za*6vLPRlf%hr(QxP%6K?I}bHHWKA7Ivr|nt^p9j6dY$wC^$hRLUEq}1=0Qu)yzs0uYQGGC*5a(3pr*A%!Sg?9bWGMZQ7GK@c$KbDcl^5eBv zLmWJaIs=}*9_XYu`A|Zx_=AQFHI6yrq`I#~M{vA64^Mwf^~q}!MN@c5@e zvIQELqz+(bi>?D0ZONi$^k;+vm<4a6*RFYOD?JE|cP<-eg`cKjsImNMF^S}6#dGS( z_RE_+;2qj8qO%EcPK|dAssE{;`l>SQ{YJ)q5}sQu!6OfhQNliH^pr~(zjb@1Ze0f{dhMjx;O=PJPvM*a#|fQ5K+z@ z-6`||KGuwu+YRV4w}Zx!r??C!(Zt+3{4bIGTUhlv9-|oYuH8MOe3W0A^Ba?z0m-1y z`Lx(QDV&G9mu|#ijS?f)8m}^XU1))cHJ+30tEIcYQh_+g;MBGrg+!IOz3z0;P*(V1 zLc*K|)wT?W!z(SO)l{v=1Y#hC&nxSCGR7*PQaGMBmDS)MTMOYZUraaifnXZm7+bdj zv8PSV)mE8bz{~Bo1`(&c02K2cRgnn`2UZUluKn#&xs+yF8n9zqaW+^*rJ^wuj3T!8 z{W{_Cx?;jNs=_}R@Arb3Vh^~6SG&lP24z%LbkW|nZ$l1s?7kNFZQU*!th$6>N+2)N zF>m3&~LbG3Nl72h@a zI)2FZ)zabx-HD_xNEj6nexZcoV3p-pA1%7ya5fC%0rf&Z#cAsJ0rb^r-lyC0oSdy# zTbG-YpYPvUT2_W}CC2{qGHIWSAz01YsScfwE4JUlUbs-gcm_#poHTXS9MHqoKude zl(MIcyGwX=v#ar@a>&qwJI2m71_M2$CHa*IUgA*gzBsQ~@X7n;RytSlu#zz)o`B$o z2%Uxx&$3q#q7du|d6T2A0d4J(oG>lp!-I?e_S80?oWNk`_7z*hfI9I!;uzXZg+yzdP}&!6%Ro-&M8J83tBW#JyCS;EX3<3eA^B$vf7ZK{oi5cV-YYv9ItO zkqb!Q-V~w97cEiS8h$fn(I4S55p?(!ggS{*2g($u59 zq9=VueG8w|6WtP|1?BGaU@$lkMbFp#C3(RImr?uf#ABEV4NuPx7TIxR+9psc>vO1tk@OkQ-Cdio24?;@}!91~> zSR~rt&f+l-cAf;qg8m)flKEe(PkIp}Ld2`A(+|SiL2cRVW356HJ>oiHey^1U3Wacm z0b}%$&K01$=)!?j)0X$bMBqJ&>C#(kXsq9gZF?9(JXI*-7_m6tU+$p|@-tdsRNbwd ztWV5`rkj32G0G=chz`^k3@`;ncP{VT)0ec3C>+k~AC)A8I2(?UAnxgjIzYgf*!IxP zeoScAU;R4|MC=zyd2K=TFel)}FWD>xxLlgeeq9wPSkOTVc1NR`${BF1`Pxiv!^Bym zX~lAw?9)?lc6^+K$FfCw&}XZ-0dKe=)QFuD(%)QC6@O5h+sJCnp13mgqZ#&;`3(%XQ-xB|yt4oaxGzE&W6w%taHUB1%@{!kB z@gX0H=ANiG|Kxy7yL6qgj^;`iuyHkHtKNLYPwlrS2FMKk>)>o*Qo5`(wtvx(P4+Oe z+$nIDkT2(G)2+H{tVvD>f+Grz?a{QP-IvUw1z>?BZ75FWF%_Qq+X*0@A|X*uv>Lmc zWW*V@Wl!`FldF0WO@tQMMrOKU-R>WwK2F~^gW|KfcV6H+;F@Hc$iIHg6fU^Z`n`!$ z#IzP|3ek(($iMAEc@Zw&))in^F~3r+ny(!9VZT2}T?*?xtS>wFGkTaKo7O)ska19$ z71L*Ny5Pk*-9Fqf>D-^y??p=V0&Us?#5c8!l;SrxFyRj=2g9?V>r_0C!DOa5@BmW^ zq%N;SW}k`^xi4pzI_Y4XH8T=LTSf@P3L&S`g$qz4-EJ@qz$ivmfqvno*++M#E#k1! zXlMKy@&z!BZFHpPQ=cIFq{;B;i%KUQ1ILT5e!5hyB8eCDft$(BfEdp+k%^S4yk<36 z>7YnmZaK!W8LfQkiF%87NSwQ*nejoj+)F~M2rc>)L%EQqp9S_EzT2LfYN2tcyS^w;xC$TT!<*?JQaQ#ZI)UI50H4}Z84!Z^~|q* zc1^m5P#IMgT&3u5}&pei? zjes|UE)s2A4Mo}ZtV^Ret`hB9?$Rslp900~T1&DU(5}FGIJ+{E16j^&e0c4;jZl%s z7_o0ldH7vqy*uhnHVO9MYOtx6r$9*swl23o%)`%2UPy`SNB9s;W$%mNm!RnlKeGb| z4%pH*M0^ki+@I|!fVqG|kAT359zIU)KWag9zRJPsS_HP`Gu2%Zt`|ElIw1y9L7pn2v6Qw|V<7IcUr4W@+ z^+!yRx1iK~MyKD`y<0VhaF3AY1n;_w%(apCw0G+BoRdK08Kso~a9=tAT7FujTOKw? zEz8Krsx`i1u|*chU{dh~pAEy%LJ2KbuCebbW&oT%a(hfijbgBeI{SH7+b_R<+8K%w z;Y;Xq)Imcv2x@3KgAq?Tu>HFbOll!&;d-iB_YxvoW>U3lROnF8vT+isx1~%@cHb=a z*imHvPD)?In4=1Ds76ONES;|u`TRWpv@bm4Z3*pr?;H#!(2t8ES!Jg_ttUAn539_- z2;zgP#(1-Sap30ybU(~IhqH@T|1#bWStyZ5}}?lzLqC~izZ0fpC<n`d`Nv2%)KPAir$gY#-t6BiRV$|zwee~4dfWMKCLPKKt^>Ox6wZr|MBA=+CGNw zu0ut{Av|gE(`q1q!83G`hjoL7M_cWf%b98hHZtvC$Nk)BCgZvqHBmCn;p>?O8`bD;1%NKM${Ts7x zHR%kYq*;gV>v&kiPx#G>rzLUnS}<>~=M%E4d_zijW=WdxsS?I5(M}u=QJig=z2NN> zayFC$^9i{}4bJQFq{kZBaG4TfY$n3hc-olEM8CL}GrJA4kg{U>9W=Y`ue@EqNPUVv zpBWMEmgBgJDtx~YFVb>_)eh?U$yqd&LDds;g+}<&7|GlJGU{I>Q2e#8DXW}{4vU*4D9*bSQzgrFP#KIq-EgP&!>lY<51+^O?$6%bN=&}#2> z^_%GHOUZW=Pv7);OqNpQnZ;x*Gi#@g2I7gsOQNU4ycmeLwJq3A#gl?HMM~v5IwxIe zw4J@NSN-nUZ$@A*f;P19iK_|fReft8Gozl+c*a@3vurp|{PT{6-qz0!k`PAL>%r*P z|8MH?0>~tcWmS*PHkEVW{XFO0}rSp_%jR#ZfdLs_7kzeFmqs)`N}mrcd4-Z}37N5%Y^mN5`1n3>x=W zbg6hUB>^hi-C>ePYK09=Z~NE5@wEWPb&R_cI$fh2MMt4#DAp#*n0eW>JHqu6m7x|R zomFZ5sHDo>^x8%e%Dcv>>&xg2!Vd>ofb_88>1_2{M={R_1q}sRxee*ER4CJTmC568 zY!R_NCfaGD-g|v-u|)SrZk3-pw1loh;bf?aA`n60Fy8!gV|9oFOY^8*dtl^v3%+;p zSgihPujHjXo|p{kV5`!26@(uIqlzg*`GuFiL6+6Sr&X-#a*Xdq-Raqb)nB7FGlHV% z-mc-rX0#k~f+p2FBm$Z5wO`}D@)hOlhZ9eC9GX_suR~E^_}HHbA$$TA8AM)BGwP;_BPey8>L@sov%bXu)04WypnC?_%OUnqLEUH;q7r>X*yult&(dKk`hj$>1 z0OlSDp)jbk2L20^Qx6T(;&k0XLbl!o7z{_nw5eCUX<^RW`qOAJkFKG87>IZa zl{GIbem8ZA!m(gjT}(*@K$7>*mJf;{a7Ke|urY6e@meoQOFU;Hq2p!J;(Nv~$MD5M znzT?RHi227cYKwQs8-(|6J*H)H7n1DfIpoiaJJ=NC59rl(ByHTf(NP?=gdY3 zmyu*4l2+RYjX$-PYoHGgKn_7k|B@WZjgVt2`o_(x zj95(E-?!QoCjX?0#H&6C+w#zB?vd0$o^?K6fv^*iOAc&Qp`R*Zk*$k^yHtf8XmJHl z`2It_8<cd+h*|E=~X@Szeq$v9_ z$@4G!!{QPU*12MTiStkjbF|?Fs+l6aUxx{Ec$6|MVZfTpt&>T-kJLo;@S8<>o=;53 zf1$5h%Ps{kY7_%TP>~GhWuflWdz0}5)7#tVc?BRhlEU=UYVLxYvgE%uZS6Qx2+nPA z_mPcnP@k#umT4`Hc246{(CW^uO0R zIqiLaX(Ox`#dDz$QE?3aWa+)okZ2dx$s(nhr9q~nc*|+7#$ahx zr~lD%iH!yzX)1I`{DJyCW0+b#-DsC&lSFOPfiLWJLX7+omJ20SPvt;F82|gnhOeqj z9FBGvO?J|!Yikql27?D+k&hgO7foB_A7dI9S|+b?151O~fD2#G={G0d^CdR0ULFW} zyP19mSp%=o@H;gPpWtC&@*3SKn0yI3qkLffpucuj1;@0ZWg~uTKcG*&Va^uYkbxmt zOVK5{UPuf5Lb4eXVxEW4@6f{!(+2ztQFE?QF8$}~_v(dFBo>&=J~i`{Do=wjAb$xD zq`=!Vg#C<+asv~bmkB+h!*yS`eq$5K&Em7Oi~F6IeEEWWdwq8Bd~kR=*(g>C zj%E{;X40KfIk$v%%HV98XyjF4&rV*8kT#&J$$3>YtyT;J8aNsD{#pEBUh9`IWaXQ$ zAC>_XF%1R4gAM+$p5}YNVNK5+HaSQH$5r^aY+6oywdFzf58Vs4h+b z7V&dkh3VuNhFABD^b&Q8_X;Wi{ZzXzWq|WyGaSbF(6v%mV60Ev%W0XeQvsYke$r$k z)y|~DusE>R!hdW8NcnE8#R-3u7EhI?b4(ny8_+*0EZ#pOfJvU&gC-Sn&SDpYMm3-9 zqR^Z2k{1j)Aen`0=wDvZ^;Mm0pIJt!wWu=7#DMBMje-N65N(o&1gmbC96gZQYUufAbqOMH@V)$j7nP60h=3Dh1f@B00mA%+AsoU=p-}( z>Ugu1!gEUOm_GZh%3$*=-L8A2^F_aU6Tj>nOZs#h$$!*nRBEh{gikfgzc$~&(q>TI zT{}c9a`Pwq?1axkVX@0^!Gz4$$dyQRCA3G4oI*?`af*pn4zI@)SXb#`b*|r>B466| zG4j{(@ze1}3Nbjw3#~R1m*t;(RP`Ya_4Gu1F=n5?mkVfkS83|7d%7j@ z9ISATQ{H(#W~@z_$nKj^&(gaHXVa0r#k5=MF83@9(X@nbG-J^q6X^3v+Y*2h(xUYJ zsvI9JK4IQv!7wrs%zgbGzIjI#N3fKh^B_y#{Jg&H(e=vm8khJWwliY)nmgI73afb2g zr(U;jO_V!w40`cfG+{>Z3nuI27|8hH+LW5Vv}Fo#y1moycp2nM9)1cGG)Qqe_a=2kub{VS zj-9e8Fq+-hn+iMfqoH51UfWs4VL}H)TWl`2?u~T=)%GqNFn&1%B}@<#51>#MY*<2( zFVt8eg`-8Sc)RX0uR~0xLS5l!R%g?i8jc0oucOv3U5_+`clrCUY3yceevGm%NCN-B ztE9usDd20}y_9yU95O=aWQ6v}`;Fxb08i-`#M$VyQQAsRZ4=lqN*!&2_fofw{I7dG z_;YV$7^HVa++}%$Bm6wNPXEc%`QHPS6IpOvxhF9|UL1@5M;vETdO{)CEw5`PzDQb$ zw;g}Om}cSN{F$;(gCW$n#ajH- z??u$bf`{W?swnKi;a@HSb~IIC0?Y-lbS^utZ%M`|*juMqK01khC8tsD>7=7ro2-}l z=|nVE+4WRx=UpY|o-rphBbaeUq$Md3Zv3+Ge2*pb*2L!`PZ-we`YSUCSmMY&)ehAM z$5vLe49sJQte6aQ@IqjxIcYKLpm=9fQzy|PHZF5_6TrpqJj8wXSXj*ihtW6 z4SJ&9KD#X+8Lc=fxNGCH0o9RAkr`1h|yuyc~wr1#5&-Up+pn9QGx^?)X~vH0yBY+tZLUG^|`o z9k%;pO>wC%I-BjYO`*%b;m|WdAKDEH&rU>|!6J9%$b|sX{F6054P)T>E^c+GY||c= z!LRzmmR^@{Khd>QOY)DhSfsCqek=aOTqF2dN|+Sm8r_*0P5a#pz58-l zY_I4w_EGQl-hSsqqkTq*BHA>RLL1%vv(o*<|M!1Y7*Br{6U=_*`x1GI0aH0?vRpLL zIde!ZrU#U!qC@#ce!Wk+cjh5?S87!Cj7&Kv&sD0GNU*Wo8;EeWiB*8fEs<Rt`pZB>?XlvRr8oAg1BID0ZmO z`AODq_{FA%eLe*4LYkmm;U z<>%CegVZnXVlQtAe~GCY$H?-6TsX9!aMx@M8rZz^k6O-~0u~M91IK+mJ6*q?)6D!k z`5j~hl}}oi?`G5VrFh7?r9~TPgG{C?RR#QX7s?#{=lxzw60oO`6Ru>OnbG|mTWr(h z1Be+8?h?!slmE)-t`s!nHtmG2Otn=Oj38}~3#^c$5lSbV23Fux4`Cti$h9UL-!a1?pmDuqGdB<29Lt&57C*!-Dz^?@rTR2+$IZvGH z`2be9K*d;$FwuhL^mp< zBY93=LUoatr*ZOS^9$l*ezR`5l6?W>PXlna=B8Ob$r-DC4rj!i(Q_1F(+l?jY=`iC zh3siuF_K$3MHtT%4X0FD4+j=xjLmg2mr;>g$)Q`7kb^|oS)HY^26$rW^}>N62qk4xdu4}6Hr0nYGe0ITmt55ZYOr`yYCOO{gtr8Tke2Ch0` z$Z}xDM_@b$;6dfc>Ppubi#{mrfg0HGeDXKV-Bn4ppH}~# zu3XeP=U7JnV^|qF7{SreUx6tMiztjY75Y0dOne-9rgx=VIm06D%#3Y^u4NsP(FPV? z=k8LXBmFPNIsc8TpRSI0=d%4)M)K6+M~%zij@613fkD}jEJMWdy*rK2PlP!W&vgpv z-FyJ#R>F8&@+-~~U}Rc9f*0rp&=P55knZW0Y~aW9e^rpvECM9Q2&9g&{#m&F56GB> zT&~5Eyl*Qq`d^50amIDNSl6;U(6o z-|Lyv!r&$t3mPRmg`Vzzv*i3EnZUyOu19jhk`Jxd-MI#5suH?Gu9fdj9#VBI0c?E0 zDtBIiW>tF8hYmu}|7yaMcn^?f@Vx=EVe1&oO&qk~BT@DXVzg;qGYIw}hN?cH0Peyp zHYNRQuBuRIB5xOw1`F){h0`U^3&kk#TjX(#$qND88&GppXc-0&&Kyx;0t(X(gcjsv zI+1<@Hy(iCDX(d%A%&;Sn-^QMh$wN$JAd!qv_VEYWvkK|)ccuAV?NoKR9Y z(b|DZnV=XABSjI;Yh?G6{ShO0t<~d5V%#JewjMc_3^6cEpTvt6MP6~Dkr_7QlsY9A z;%w%xO*8^-qMO`?wcJpPbU(CQ`Ri*?X40sr-nF>>CX4BGl}YRuB`x zi*1KGkd#VJ-5=e-#T3@L+2l0f@k00#yIdRJ3mS#=1GS|?yD~JF)NE{3sAtl$6H5InDinocEEiAVxTkTscnf%kN;{T_blA%?5af^ z;F5x(wme2vKeq9uQs%#a%ImRTj#)rv9?h~>Bpu``d?&vtl6SSx-vM+~*YY~oD1H~G z@}uZE{SClOBfgQFje^K)mjfw-pp%B%Ov3C@9Y+vM^SkDsq@wtu2!rYw7_LM!? zJas()U_@8vn+nT$0P?R**u=e5E0Oc|o`8+6=j9PRpKIx8RyHW&`7-09_V5F#) z*RmCwJ9oIzwBPD`!!vU0Xv1Oa(KPdI%>!HQRZ>3xlhuU~cYuQ-pO}mnbkO7bt$6Iy zbu~+nC`iv(HIy04gy~#I^|3okUD>lsYTc9L#sHt9JSJl>UGkcu9$@`asp}w zcet{C6}T2$=_<}I z_f)Q$0`=RCQ@&(--e&F^<{b`wWIYDLT-dt88{)~yCpwJ*+RU8Q&*GI>wzXHw2Lh2Z z?OZlrjKYmFb*XGSOAWks6QtnXU&X|ov_Wj}i(Gm=IrO7562ZXg?TKG9n&Ts-H zdB^ZZe`EX(tp~(b=6)ZOFO5ONR;U4K(x(}*bn&i2F_apMY7wxp`qF%T9|jwHhZ9sH z!DTQQjRMBxQFz=qQO*!ZfYNg&&qH<)(pcHKFb45-mcP9*dILJOIHkpv%a*m88Cai``8YM{3T9Lf zD;&KDeC_4D`>mvqZArf0;uB$A^tazXD;#5D+5f6(pv%eK6Akgu_^{9MVC5K#g$w#q zP$$=3HQt;em+ux%UZcOHkQSgAnVTpACMY=}dR{yq2V2#c(AQ zwuJTF71oa*MpN9~Xhka7Nl};QX$n{(S|pgdC3Pb3IEfNTpGxgUU`r&Y1+P0JG+Clz z{vIvNT21tP8awQHGz|rEAvMte&63yja!&vW>yPr7r<}ZVc$8&nPIp?v-`+rTC_)sb z9eUa05D&dfPV9@RHRG8VwRBwe$9+XsSS7s?&U#AvJ2b38UUO8D@dl_*|0`vQt=eAr z&s)@SQeZW~;ILsVG*3-xMnwIT#}tSjqpfr#3nhQ6M=V>;cg}4_TmZ@A_PXnZ)~IlxISm;d zj}Pds!1J};BTD*$`v5TT+G{BwBRbzqj$Fd>D?_RRV zQQe6rt#yHz2sBM2>m}6dQ4)shDAKbTlM7(pnfXQwi=$R)K!wh7n2+UAbfX+LiAia6 zVg)Y z7n8($u0+=(^*>=q{69ZMH8q&P(i~%yO2yE*(J#|mkTqge&KFw_UQmak=4Fc+#1dw1tDOe-BOncr8L0TEz%WOMeUWazmwPj_JjAMuF+{L|=>0fz<{ zDMcl=!n#zmnFD3tMisU)sObtP-L?uQMUPE?j(djtTa41&H_Rb404)70x6Bt-VX$}6 zxqm0IGU~{cKJJ@(B8uOVW_^vS{cl#}B+1^sIJdeEbsQmM^1P%u~p-RQ!v*0wHcfN zUAKH-JrV~aOh~J9*|Z^X4m6};tDQqqBusVnV~D4HkvPS>0R3-O;us*e~GdiYQY)=%b_cW|6|~bm3EGJlWd+zJ3L5&AX2quacWz3OgeWdB3C)IYnr#lkvJ3a+lB7ote8vV_wn{%Mw&~`Q)j!Rfm;JOmZ9j`&?>$Z4)Qu0p_Zd3wjN!y z?rS3{BM#@&lG0#1vIjitF4ec0Y1TX&DXpCs@h-H6U_WC{eZP6uDuaVY=dgc<61>DU zQ@8cO^CBMx6c&;F8y%Vo1J*Elm3Y`4YsGKIUw-C`^KkNvhHT_3enz4X8B3+gv;lt0 zYZT-KLqYi?opK)aiz!*EXGS#!Su>$W%nT5I^0)O_ZWi|-iTA4x$$gk=-M4;*ghl=9 z|COzp3bndTMDD)hI?yjEw<6)9Af!kd-5)j#-iPJ2Udc+|x1jfdi|JzHm(V`X=w)w9 z1kVI_(3x98MD9_%I3*Y{p&J-@1foP~*XdE^(*3*2GOH3jgpt}GZkJ1*hojJ%>GD+XOeTPs{u0hgIVP-ZPa0%y^>f~Q+{}wR|bbx)j+&5d%TQT#PLwYbw z2@`g>T~A+&p+_-6I#5yBK;39aO}irRg-I^r=qiE>Zt!|M;L1$_xMYm&!~nBXQC}Ld z;xe|9$Pp0D;g7~-5M^mHQb&8sJz*N+i~ z@cowSD7sZhBETe+d76H{3%8aLddIO06wx1eeV0r%cqqXuT~2yOzl~Hmk!B-RHlT?a zs-(K8=2C*LmHch(g!%gKae{QS`o6=En4LBj2j3MJXG4dRRVKzlkI+u&jqPZ(E9U42 zY|9{<>ET%!<1-h60cl+K(^_usC)DzK^w4im7{36<+{I16i9XX@)%85VM$g z9LbWLx1R0iBn|m+e@gqLfXV;B9;EwnjES<}eZTeS3o#ER(l|%+n7>Ve_Bw^Jf4A?N zm0c=a4z+_GwH5g?^rRngV4xQh4T;QOxtQ+Q5K?-AUV@+s1csL7WLd!I|-1p`qM&7ZWX5RQm?J z80_h`&^wy8-eMky_?K@Lw&}(vIyYu z^g1e84(TtNC|5(*uCmgDgnFD~kvwT!8t~m)_i&WG{WSECL~o`?dQ45Q!DC#08@&RM zY;Tn%Ib9kH<@d8<)I*M5D_u@r$7PRGx@twLJRG7arFL_W4DDD;W>9iLG*p}AKYW9vl_)=`twjSw|HtBFQ< z0qdoppSsIex0QmNMs~^($)1_G3KUtwe)inlEe<%6s(B{OeCjdXM zx-@Ah7u{DR+p<8FGX%5aGuL55r5ij0o=z8#mxk}5h|nEXF{d@gK&VrFg^teZ+zPuR zcWHKQ+?a(*w%ACqIE=l<8yuToXp{Q@VwATmP2-wP=!`@k8Quo)zZF2k8dtpDgY?k$ zm@<4)k4t`BgUPN9ZZLuVbeq6~S?*MN#vJws5Jvtav@gx zyxiJWOmTaG(z{4)0-@{PmrC1jo8`b_&XlH5W(PDTxi4tVsGV*9!PSoxc~Z^no0 zYTV)!?CnaNB0MVWQKPb@3Hb2x0fYXhMH%(Ui-4UXP7xLhLy;ad=HoLacb zf;7~28P7v{f1nwSSHt%qPsz}5EW!RhkQV!8`eXw}E=%4fMiA!ejDoo*?dnm@7b~FM zTC8q)%3KjftK24?1K6cTcY>wFIA38#0l)Qw@igBsDd#+b=Hd4v?j}fo31~IFUtbbA*7$* z*#g?C!0Ad!8bYSp75Z)6i~S%hG&;pFcQgXxgiWy)B-Ibt(Qn#?R(pM(3U}ZzA6g3( zsVUQ<0VnACU84RcEthaKzf=|0i;>e5@HDL-F7b|ssF~P$jshB5sS|E%U-U5L%Vkm5 z2qClgMMoEQlWNt3rm^NUz|MK4{P+EH6?nSavqlm2aFjNQC$^drlNJ?VfyJV%c9C~u zg2SWO*gd6U@7nlP=M#V6TE_NRTvAmR3$`YsVWTe$D59V4d47jI?T!)tarTOExNEI3 z3$9qFXssqD4(Dp#yXkeQZcXxlRD|h4WRaCBw6|MzMGq+bwqJd({ zN>PQ&-doGnZ3Q&5Y||G%ODQEVV%=)r3}Z8q?~R=aGj0BlGLetqV~z}ErRSe%uJl+B zlVn5D*MNrUecKgNn)_-bHuLKAWxkW9UpR+M#@BMZB?Q=LEs#}R8W|XkSZtC}Q9MtN zqXKfSsSA(`4l7he0wuGB2o))mMX*%1njcJD&`g$lTk>v{%c(CBCY1%cY3-=VvmH zQ5VqMNOAJw&V^tAQ<$SJc_O97LaQ&{7IAGDHS;R`p_lv@j*Uo;W|I{0i<-d*H#$jU z&8bZSxcjnd(GJ>u8Vi^FA=*)=H0#*D0ertZm&1Vg(byo1sF5}YH3!f%URP0(;f_ZOtz6--Xo zCFsF|IP@{WR%~{|noar_Xnq=!(`ojIIRLs_oCdFHS&PMw>^?Z)kcw+Ttd38BEb*>K zgP#hXGPWmVqusGw<058ao`zc;e!_5F<)z&ShJMa7&WRNeoz~Sv&LB!EXJ{k7~{q@Nr3a48evnb!?2^rcMkGX;mthFY9 zW7mTVebXxq!bQ{=vthQ4{0j1yBth%|LaY2K{kg86BVy2ZuUt~>ZT%1ED6D>GU)6xg z7u?pZV78!X8iid8Q^+b2sHK-wA)F7@p$)1$Eqd?_-5T?8G8b`+o2p#X+SyeW(ITmU zO#!L75!%vGJGE6~2*u$ZF+R!b!94qw3TwhOqk%*HZd=|_j3imgL8@ui$IAY3tdgTT zVPc#%s9+~O#VE;C>V2ZR6P&r~T;7cj)huJxQ|YaCdN=OI_N$Z_uXIXGtm&dqq~xIX>%yH%*cD#Y`+``Rcay*k;nI*qgPC@@# z?xbUJZN=%8mdb}v`Nl-U-w>f90Y93c06Z=(l_3D=J}wXOJ9@&>8&9&1!*0c^ z(+oWP<~gqA!NK+`P6!ij96o<7cQw+4N%wmvhbKr8;M2QX>S~IG{&oy-JHjLoZa&Mp z2vig*V10Wpy%7o~7l>VySp8-MM3VAI80@IoLbUVDj}oue)Ij&7S+iB+zYue|e1QmJ zWs5wcFtU&MyBTdjddk(UqjGFpi@UAGW^>ayep&dY6iC zpC?^bDO3zYB(g~kM0O6TrHGnv-`Ak?&An!9_S*%hKYCBP1JQAva`9c#?1qFfu`{s9+gNvZe>(l_c4d zIfx52(PxaR+~olZ;?X!oTNXa&CG)K?#Cr_xM(F)<;BxITHD#x8FW@k%!h=CnjO-JQ z*5hk#p)jq0dWCi;zLP^A#uLId6*$<-#BI#OxI|J72cIv)BD}h{Yq%d_yuo9XLdFNz zOp|9>rdzr*TZyUMLQS_U$CGC-%R~!lHq6Grq8pCDDfog)mL=kebNenKu7YlCzvz^n zLL$2msYXh5?Nw4EcND)pNa&*H0{O>?O|hPuK70KTW?Ce4;ZsdY#>cWE@2i=~_jO)B zb%+x3s4^8kk8|d_ismSMABMZKJqNdcgH58W7c1~~Paq^^2z@*4xiqGBX^x32{DLLU zmz_{p_I5@1DSZBp`+aK@CsEym+@x)R$r2_S-YBIz)1?ERsjqa&hN&iyIM9dO)4p`9WvlEB{a8*59%)Hd^3W;JYVF+}-|Now z;2S~}+cN2=D|Fv!BfnfS;4&d&nkj(&T(y+S`~V$JZdQ1vuoIhCWG_v!r%e(Mpp6J^ z$}>)#3=alm={v6>;mFkbdsodSdkgNlw+8&hZCGpi2gqZ z^=sR{VKMiO$>2LS*GjvGerlt0~CbTx^6?CxTt@@i+%^aW2by;5~-M_;tRmk3Hx_-09 zB}93x0N#oH53m%bHV;}mnr0|rrSln3fcH2r#f+-+`YH^GLHCw*Z z1zs}MGZZ^uUT=fb-@>{Ps$-yUq9j~R1>7pr?^A!&TKA!C ztIjPe1N~)$#rjqyHm|mK_z_^0-BQIM_phi?F9O2)A-bfl?Mk^z8$-ulxU;pO&PRGW zwSzlPp(5J*IB*$K$LkNBF%xGmK5#;txWOmUHFF_}gKx)z845w~KDSDGE|ueREIy_B z`0?as`>XX!VxU5Sr?v%-WjtSww`ZMT$s;RibA_`OrkX_lRG1PSH<4j00p^8MnzGn<)x9h=DC+7{orUA2l2oOK*Z3z__ zhNe2eQN&)KkJQ}7?TW>?@O=)XMP9B4soqUrS2+s1uXOEk7z`mk*wU;uR-)@@zm#g> z0R=2o8ET>>99}f+*b^rgTA)Wy>ORImygxvUz?ia3I1A8w`ON$cKx-gY%kV~C=_0>6 z@9DyzxJC6sFd3-h*dcv)5`*)8V<;v92uM?eP;EYwN(FLa=s*Xh=KYX#SbBP=I;1?z z`cMvk3Wu-4@#piw7e!PR^gk93I-^O**qX3>jk3Dp4;VwT8X|ClDIO#@*i% zy(pga-W7$pMk645+yDWhF`QU6e+%fZPqCp$$&AI$#--tGdc+)NE!9i1+U0-o&EHT& zWYFU0r&$x%(cGS-v3*py)SZAv7f8s)4w>HzrIEVq zU-*?67OA1QWJv|<|0n6~!F7Ty9L9ZLxE5ef#<))_VpAEBeNF~k8x-(72MZG~1U)JkA>B^Y?cDX6JQ4G|HM z0E)o=&S!uB%+YBpC3)`Wy07c}x{lKtAOA7{xK(+oSl}#Mxa7()tqOqnMc%vm;bjQB z-M-;za9&QLmNN^QO@`Xm(_e@^c*VQ-ZHuYKZ9Lh1L6`$#tY9&;{knw#cHWX9yzt6| zjDjxN;AEKg;+@;l?LR6q_CW_4q8a9P@?!yoz>EUJ9`v)pvm;5DH>rp5WCD+c%qvIf zJDE!VmwrnI#NfXkr9OY43!IsV??jyGalpD3Nm9M3xp@ex85aOI-$?8x^yT}{Hw1{89j^3sPvt%f=P+F0lvjl&WHek>P3O z?G|9xd^*Unx-IvUdLYaqpVyn#n1ku%Uyb)Vumkkl9&(gFO+a*qA&^sB?(5<_0Xmbl zcbrJv9$a(z_0$d%rl)sW>!lXl=lPN4vuQ}9C$1}@M^|9osE7VJ^eFn%eR){k%ZJdT zEm-0N&^4(m4nkKP@JKT$^NNhuF?@#UI`AZWZiGKBV#qmer>jA=hj8c=PQFMQwN zVxMO+8gfGL>}TM6gfkn!l}~6q)#=47jC@Ny`~m*_Y4AL|N~^P3&*QNj<<}cIJ{w}T z{4qO>SKw8t7*WLY+){JSjPV(G?&4+J!>3nHCQt6{Phvk>nA=H4-Y^p z#z;OWHL_LXL^GCrdMB=@*x3{SdI=@53QwdN*c-`ErNwu&XzAp{qukJHCYoe!Xio&q z-!sB9HD=n5-M(-$+yi-00{kiX2oaXktl-Ag$?8gm9KBmX#MaU2LCu$ot@VV_S!S~)lSJQ=5mgWGN{`WCLPv@ zGg2z@j>3dv6DFhMln6l|7Ky2k@di;dV{I%AMAsNaGY-c7)oF(AB*30AV!RsiP#5^9 zGy`&#&j4KzRXMA;L{#68t2NtN;1|q&XlkYfU~#gH&=Ai>tgm2%t>tYbg@!sDf|d*{ zwnmo2k>@~vOJlSZ)}~lPi4=aNKQ?5_xw&Y~_A#bjjsjT6bBo|Ti`uU9HyHbYn&%qe zni#XvlY)4pIt#(Q=!*Yl-aMDuJIpIsK#L|VjF3f5L%K#zlBKTc(rz@xku7PFD-Vr~ zVVf+9B!5cr{|$n;Qns@=5J4>OA;zqEPC-HtW1!p@y{b5Cbk|n`K(y$|Xtl)#vEO&m zWXNMcVnt}-#AneObJ&S_2#mt^z058pYa0m_dBFVh7lg+;4Qr_utG*e~uxJ-vIPzK) zM<86LE=D3M@#n$E4MZL%bvzABNjeSQQ~Us2v7c6BiLQ24P#5D#L+mh5qvDfnme#G2 zZyXL=Q_CZm5QO&2<{RQFZ3;OaEzrPAop#f!tyTpq(qF0NscoN-B?qy7^ZPfW>5XBy=*Q*#w0~g(48P_9zBOv0Q z$*>^bK>VMH!%hn&=!GDIjqixuY{9QHb8}|`-D1uvuR*3=)7R4}D@CP_9s8$rEuyAq z%KZN49oNp{tOi8Q7yuZQOX3;r516R$l~$h%T6zA!iX$I`*`y_EU}PiVS;Spb#9sl& z1d6)`RWiJG2~Uf>Q*dZb7@kI_B46RH%E18sOI(ZId#yh=LVQWCA6ZyWk^1{CMgr8<49-lOCu24J|;<8tU&vl>^Y= z3ne_g{ZRU$To4b2d}i})9Xi#T>QSv47+tj6B3bHmVQlM-yfp}cX;2XiajF_^W+{lv zz^WIe(Gn-55<`hu!;i*^pTf?36y>8k`mylxPB-u0^Y3>d1}?{P`MAmW0?bCod20Y4 z=yMfKs`wR3A~$3iYtU4%4~?|u2cjVLdT!U(cHrB&g3YGr*8bhet2hheWq^0$vnboe zT3n7fV@3sEGp?3zT=sR@-^QV^g=!4o4*Z_>yYXA|pC>kuq|?_9N`5s(1ASFb*nL9m z`GUH8naBE@PR_MQV1p{JK;Nhp8DAjf%69GbR%an~5y#=>y>Xt?^4;bCQO+XNOZEaI z+4R*Kb{}&>jKeYHp`LZLTxB%R$^#QtsW!?`JH1N?;{ z;>pFut|Q%_6ZEawDW#H*?n6JzDfr3;jXmL+j0FTb8#|r#6fX9CTz&)53y^o8lJz^9*yn{`Q_qGwntf8=ePq@=Z zp-ECDWf}DsFYK86w-OiLj7*hNG52B{#dGs;9G8}7Z3k6vmOJ5Wci++*DLy6=o@T1t zS}z`~H6!?m>1Z|ZSQ@kog)kiK7bcXXE#Z^fKk!&uH%@J_!&aHkr>xcx519Ovs7Lt7 zquNoT9;~u^;9IChz0rw*0oRIo2pl-VOcYyEBJK+0&E{=fG-}By>PaP?EG3NUCk@53tU&k zwYQP|aJMbjWyW~EsbYlvlghm?02=~Jw1P~1gvg!ov?OF*uIrg^Ka4;^mzaN^VSgLG zF+&rTb^ms=77424HC`E)mu)_3`F3$^Vs!<+gXd3L!6KrHy0^){VN`X*kQC zeEN+`Tx@G*f{SS$xUv14GLXS7>KFfi3O+OWQ{;UzDSpDyj)+7T?&mz0$x#QEj)mCC zOJWxUw53#r7MV-a(hZ0*)?nV5Ql)`Dq|s;Oqqx|BGlKLzbSX7BJ<1kQ~tAz$fA(=-?9wmGjPrlVc=I03c`HK>84L_&J zUwOoMfEF^?X&O!%KfVexa@2aRrIly~_%-J>;~q1_D$JYF9Cu+D%8%L@o38dj6RV>3 ztrZQ6hoQqV(H63KOgDw`0+AkHG}oT-v(g{JO0vX_Y)pp`QflLhRjL6w{2hhuzz;fPe1Mub$&3hAaSAlP(|&t8q8_p1VcR3v4wF zYPGizc7{*NOa^e=j5!n>r+(H zvQC?L`aZ53)0`Z02(Qp9_co8wCe`uzbSb?sJw&5~J2dos?hn`r1xICHU~J>T9AiH) zD(M5`9b5>z)cW=dI}rIE+f+p_TO-DHr1ZsAc6x!-%AQtsy)dR?HwEvAJFoCrM-M_9 zl!pfE)3D+IJCq?L3U*meL#|@WldNQ!YhTxG-TtLfRUFI}*0rY-MSb;#Lsw-nki-ZQ z4t$;wS&Fun$*+Dn2Dd-Rq~`k#IgBQ)(F`DcdGQ8^GtK59u9*tY_Hbrsbe`fPmf}08 z>>_7pY+XAwCIbQl8NHDK4_lv7c%3YAkN0+ySs<-$AcszH6HOCeG5OM0i-1ag6(yPb z$gYJJbI+TG5DN&DU;cT{NN(&1rhSmF7>xM+hQiU$^paon^WLd5ivF8Cq7jZ6d_#GK zJT(|QL!sS!8~un_9wTqK%Z6z z727SN+qVi{%OKgTVB_xO_YXvgP|F-?e}$J_ZPCIt4`VpZ%75>{z!Jx)`Z0szlrg3AP3 zk+jV~2OG5@F#PIF{X0zH)z+eBu#wkgJh>Qun!+%-EjM8sIdot=yIzf7tnY=jt`VAV z9Hdkw1SeaKSeqBtttHVa&GSsZO&a}p)}2vCU1Lsmoe6ItBYbn?RarDhS`s{Dqh4fT z?Bq3w5w-$)94IQ>rcpdgJ({s|9aj``vea=N9+cdxkXBrLKb<7pg7M^G-2~27PC=o= z!~saGWM15%WA#n*tGMx`tm7QU&8nf$le2VDB=bX(_zD!Z{KQ*k?4hS}%_yTZxq<8U#knaAbHR-a?!Tn;aipH&neHg-@ z_v)Wx{j_3fbW>)^+hnacK}_r$k{NFSlCyBy=rh!k4kvc4iPA;gylOorMS`~Z0sVG#l@djGG+M3jAeLkcx5;OfwiZs`uo7-I5L~8rvrIu32c?-rfka~Uoe&_n)+T@|VRqD~`ck#8Bhm%nnJ6Ar*-8>d(;~N_K1sr{XUYCv z*$u*+3z_9~d@8W`$nkDU_?A$&Ce&o3|P;ab^xL02V zrFQ7z5<(Mb^pPAm4ibZ<2sh+`g)eaN*?>-XU8~nr{}!&S!+1&6wADC1T2(9@~F5gCefG!MJzJ*Hew?OB>wY44l=7 z0Pb1km7Gk!c_YKmbQ|Ut&$F8x*9GFAFIXQ3Bn3|d#T&)e^dHOV^uh$!g}5 zL0}sSs_bGyPj^vHWG`+|iF#VPn>ZRL;U|oe;0Pvtc|5@=6SA_|eqq;J5CanLfF$WO z5-gu1fvR5WJ5x0@1tqO|;)4Ie+=L~H2+t33J83-f4M-GDb%N+8yN<}Tx0>YmFaL>O za>w4qBkCsj0Qi4?P+yy)1rtZz;}^$}5!iZ@vc?)jrj|;$uh?A9$g%XyyX<|K9eR?h z6P?I!&(pVO7r`%HtR76oc;HIhMDGGI=ef(QWV5ArM_o%z5Z-BLxv(wAX$fRot<2Tp zQY@2aM6VWmZdS%#1&5i5>@6@2U5C9ip# z>NhK!As$j=u~jEV-wk(cS#8)si)U)hZvRF!_8GAf{RFo|2)3P^u8B@_uCQ=oI{m6_ zXRHi!J%vMJSZHp%!1dXF0)R5y!E;5xd9^n`(##U2$`{dkq{@~&cN~TF!I1E8x!C0o z&~xW@u1E0wr(ZT+4PLSaB67bHJ8?;6((yh7Klp2I^6mH`8H24Wqa;R*ovhWkwmru0 zS_M^HOd^%mvT)j|Vm5nj8^6~P@p{?jdd2@>Bl%!y+yCAKlDnH9w^4Xow;^YtsRWnq z&_9`7kBXxh=}uQqhr~gQI$*^R*yn<*NsLYjoXB=Ht)@=nq0ow$Z! z1}_6s1|Zh|S=G&GPxqn2WcRSIYcSY;nCvT@kk^-bk9F`lmW*3MatZX@RBNFgX(WcW}An#(JK##a+@FM zNC~?yd;#M=Bz6)?wiOm8`jf*pu2(Lj;{gT+W>XkWbSU}8Q{6`%2fHB18We{w z*bg5mjYWe|Ug^F#ntk+taJ#bZEeo3@GN6ou(q}d2M&jb62$p-w{+IDiwRaK+S~Ko2 zW`sA(sb3mLrOOZXV?rhq&pTv3McX3lu`;T7%Y3iMrUI@wQbNg&AI^>WO?@(+&GAvo z_xc51i46C(E-pwodaH>@2DJ=185Z+gw_BOte$K4lPbhAQcF)aSk^wlQf`UgL8#R;St_aTiKhdD{vyAK0EOL38~1&1vp|V-Vl4lyjwjM zG4T6uC6cf|4_gI+tRN6b8z(Tr^dudZeICWwKEBuy799=}L)x^5AAlVFS=&X7+%!6! zbllf*C4vW%GO~Hqs}Pvc^PIkfv<7ro${dT?Q6HyeOunw$Vs);^GaKbNdV%7hS-ET; z{VrF$I5*Hat;MysAx&m;M%nop^o==Sn~+7l=ZR#aTBygFi&g}~60Pg$P40ibs)cb= zy6i)&?jwjf5ESUeVGM@ivh{0iA+S$>-BdOZAbwBct$qr&kEZUJ{$YPMfD1ERCzjL6 zZ;;#9q)GXW{TGSN8hKu^ubAUL1O@mTZ#?{ppfqU5p?oB|Ga zwv7sZCdTM3J70@4Tbld^CZek@*|*4yeLl*tf5jCV?*pz%c4y-nN5Hf!sa1z#u4%qW zPqo?y5Cny+0elDGL&ei$h({*3`qNp-w%3hUAx!Yz@LzO>AauM<3LL7uG~<{m8simT zj29SlVD~%42{3Ks9`|RDG(A<}kz=Elbjw;4qY?>EE4OY{Vq`4jI9T-3P-!69gVTb5-RF1pTbC5gAzkdRgjA#h6_WkkfCEIy)s@+*PDGr{c*6HC- zhgY|QMgTJ1_a&W78i9rpvNv6v$XpRGfYafX<>ULR9UxShCai9NCYdpDiI&hEFwC1X zw_D`&d||o%!ck@oSlu5E1TpMnmzOrnE_5{R!+jExpXi!NKTg#dp`zGQuWdifaCL*L zcdj%ZR6*OC+dEifgC_8}5f>}|Re4~Mi^;u7UbFS>%J`Npq`4G6t)B>bcM|8~hPNL^ zD@J)5#E5(*57LH(34P~#U4A;`hfj}GL)*&gu6uq9@(1Q5FAK4nR1=Hc6%8O0q43YM z8~b!Yf{(Gdz>{b!W=ta#3#6wJq8H0z(TqX?7%Qu+J@8}pb zLnKg7A^@dsTu>9mb8DdB%M~sBCV4|J72yvXsbV*8%2quG%K z%0oDVeSwA$S0c*A(tz_}tDtxLmoq?g|IW!raGGD5=Nc4RC8d+k*j?fM0{~q#pJRbW zwvyuTG6_l4+b`@$(Lxat+a#IWtK;b6q=i7+#NP3F#SqsYE|7X@Q;9hK{(1U7@tm>c zr&J|+l8}u@ep}ckEdieAvS{|sS$qh!aESr2w7PNXtc<^e*S40%Hl?K00DXB;ghmF}9cg z+qzi?Af9<8>3Un7l8>0TS8V+C)_Zr+og`}RELoeB*#Oi+>Sk^mkgw!v+XvpP<5BX? zB1AO%M_h>RcP;>}eg7==UZ&Io-&jqN{R-l(W>?P*2@?S^grMAt(QwTeRx(q>_-uWm zG;~wEcUrLmhZO2l(w*xkk;}IhE!_kxdI_Mui;4XW7x~((d%6vxF*Nj+8us}cuOXjP z@W9|QvPxYZTjl|6Aus(CvT6e4XxKPA4g3~vC)UMhuyO^==hjX z8Z(C!Y|B3?HE%P40E|4DB|Mr@!KeS^*AO&9xY*V3_ibTO3PwPSPb zeYBkh+QSRxQ-45oHe=FiI~!e^X7?{dq=;N4@s4kh8Yl6N&JEd)jI&O87rAJsA`7ow z*#1Q)T1l&`A|Jbgs_Dkyk7|D(aKdM2pUS2gcf1DYqE+C`e}5lLbN3?)%7!`~+9T;I zFc;!64e+D?;Q99>i#G_DBaYM$3tB^*8X7R>v247unUu6&ev47}<&yn2_Pnhak%G(D zj3EH4_A+O`)*%E4f;q+%O;jQoL#-fhLbEXb4EQllXrk*%moU6A z0<`@uAvV@0Wn)$m^3}ErGPYk;240ZPtCW8*fWU zvw~EFVeT@_*R?xN7@jHU9BC;_Rx}+nOfN=IUEgtnw`C1xi*Qe4I#pVgggc($8FLtn zFA!cb6k!I?Qq8;%9VD~j+HDw9WAH8ns-nd_Xri{N=WK`Hs!;)5NB&EoHVn1Txk+R3 z_0sRaIR=IJ-jO(5lvg~;@j@sTMzxN@9>C%~U5}<9K(@!9!bp(O1M+3bhNGAuh_*$# z@kAiYE74}Nz^WrC!4GWx;eWB^g#i^QOkD0jmPel&Cx@Hd#TD}eK2orrdpXbitmC^V zhHOw)EEoD`GMa}I+xFvS+gZgOhP@ltc8^SX@dDM1;pr4dGIPhV1ijnoh32;0K3g&% z6hU`9q}!UwuYQTnX7Y9;1|xfVk`)H$sN_%J{nw{ikm3+1>GB7YH#e7xBQ+SE0NTrN zYZ&<1Sp3!69SE)Kl8G^J_!VrBeJRHh*V`a$=CP@x;h$gHka$ltiw~MS%m}DU{Qp`TjuPR3hWmQHJdUn0ax3^?S`mU+?z6yaI@` z0=hYukhtJAAMWq(vM1K{Tm8k!C*Dp^39UdkMMvN0&(KS7B|bQY{s%&A$QpK*KZznt zgDc6$?z4v}ZUQXBifl`u5}%K`Q#>=fpMr~bDD+QHAg?bXSyQ~M%RZXl93D!pxL3FQ zjSt?;VArHKWvmfl$#Q0!Y8BgE#gC-7Mm$fA#7M6 z2QaC+SI`UgjDQwlc{oU(_vZzAy-L}~EMUO>=7PMMSjDuKfZ*sKcKJd7#$f7#hM})d z34FX6|4%Y4`y$Hqv;DIm+*amY{4a|7ZD)bRE)A`0Q^gM1#JJMLp3kC$1`yiNf8Dh| z>s+B-UyI(!EBPs62?y-pKM%k7T$g47p|B4w?cB>m zGrnMXaKb+(f(beC7CzyB>!@o~*f0d_9*z{m{%FulCiOrGHXIRo)W*x+;>B5wo$wk) zuipNn{*_2Zos)Lz;Az$Q#SDaBek&{QfZ3j&4;{YyxZ!#hg{ADk`1CIZ*VzCX( ztN7-w`9fwp8vbFMIO*h^@|RU~a4@f82#l8vOK(*P&x(X*<98>~l1|Rtj7s=J%W% zPsA>sgMJ>|ndjLI+g&%(W9Ae|IF%b1YofGouYE!^RjWk!0K>+dQG&m*xqKbdtc`>}7!>$1=bboc??D&Ihh04MdQFnBeYZ>_u|S%b zXGIDW=zZGk&ruVB;=*?qBA}><`bBpBL)cicdBFBF@ETBJ zjp>Y)e58n7F?Ph8=@#&+%M4y4t_tz}jrk)R3576}i}@fxWg-Q5`jC91uS2E;cCe9u z4f`sFmpiV7B5zp_afyg_a8x|3GeBr}>^PR(um`bQI*qAUjKttT=1=dzFn`q*hFz4x zNQ5}BF84qOLS9>PR|7JCAI|t^s0Iwz#rQ(NC2*Xj4P&{3@KJHq9FBdk-m4VeM^n<0 zai~iei5=I~4r59%u$jUs-Nv2ZkrBz6n&czufiB|uOolPa;%PsaphmnMg}tOG+B;Q2 zopX4>H1?mBmqN+aCYh{g1+$4S?9-i<9ykT8ylj&8lLphbp)noN8qup_UuYr4O$4NB zvX8Zi&^Ox;N+CW|{Wo!?_k_GzCd6-gEcbFLC6mHeX6)_i_gVn4?tdW>f5$H^j0a98amxFv`Y`S@HqFW}1ojVwJ^R$$Oo7UaHju1lwkYy;1 z!`5LqN7{Fc@>e@LX_YEAa1{;C~L1NaoVh2HCVX>Bxwr-yW|M^qVn7dA#W|`WIwA zDGnBnqd#3}ifzm-f^?4Zno=^3ByS_4XY;pPpSTwRUP`>r=l71JQiAo%W>P&Yd2YY$l3QCvz()OGhvk)AsH z2cv!qb|F7%;0;ZCKQ8rW*t(+pDa}v@q?s9EXvTl3CFn;5-SF(HjREvvlt2#jiz}Kz z*(bjWX;FwDr{W8oY{4$BUbo?0-R8K@wvz=*5wjT4X{4X)Y|PNc{&n^_UZ?s)LcXT| z0WzT`QK5pcj)-%>%sztfu?T}xfBVF9tqYpW^_|wXeXj=LH-5ZTf3NlYqm3l`zyz-v z2B7N%Am2iOJ?Yf!-umC0)o`?v9chx~nY}HFM_4fDud5n5yUvU!9cKX_IP}*yTIszy zA9`BS>>B08uv~nKmRKX1NzLfB)809`>&gXkG4cq#;zHH)L?%iP4$nitF>woAvhxgY zbCYz`&$zW4p>a6ZK_dz4@db^aMG>kyKEG}N(?xaJL(?PC_jFrNRn4_H4 zB~f|RPpe^`VJUO;yLoOWD^`bOg{!rJCt4(Ei>4N3edkM4M^qHpx{QO{sqTkxBl#Yh zwIQfKY$|eFb`@6wl=2lmJY3pa1Bn()gyADK0nw3I!Gqr_A58MAWCV&>rfC$nFH?eo zcISa{9K6y`N&@)G;6i`u7i(07a@sQ zZ2~T4RF5{~*6TLa{Cw$awi-9H*-7}$gs`Ay1bxePX*wjbzA()2a87DEG@0X2wR(fW898QL-7d$(}fORBTR@h{4UktiObt6Vv1WuEtSar-$y9R~P8_Aa4}{krZK zm6G93yFc?jHoK;ag%=1QC$(2eFcx3pCPITo?aTCQ9w9R`X$G)U`OFJ^RS}Vdx9lg* z!ayApa@B5g8B7zlBT{43x=7!MiBdD*x{VC?cKSLWA}_NAr_n(sqWe7`#KkDTAyEEP zBVJoQ==gaE+8ATqIX)|^3b&52L+tu09>ch?lG#Q@8tP~Ud)VeU5$>lFffiK??y^HS z*G=J9sXd8x-mL9NmC#s3k2AE2zc2Q$%*!`Sl?HWmXN^In79^h0?=|a0&k<+f>Q+6_ zQFLBFG|y9>@xiwJJ+B23s3o(ShcIKN)w3i(S)M@a6H_^j$(+CNt>D_hLs#V#BplwE zwB>&j+xa4Zq%3o7S=8`$C9Gvf#Jr_s$BE}X9-c-{1|>qK!;}cao;+eAIk&aOrBlUD zjry6cT@)^KN@OEY8Ihxc)6cg=pBGT zo==6Xy`6SvDGA+Y$fB4tvY$!e{Ow!A2D@vucc;-gn{|73xR9g}FnpkWy>ExOe}E4k zZ#VOvt}3At1c9G2r~Tl8XFmG`Vkr3E`FNW#Se25S9LcaVlSnr=ftp?IKrwcP`qDC9y;6aalj>;`Q#eqMR8^}*fZ%|FSm(#uL5S7Hv##YLsr~YnW?O~= z@`EievI|Y}K(<r@Ojk9N1{_um z6Kt%FiN9eoR?elZ16Zgz$87&z$GqLj_y&qJj!kxMXb6b3zt48?z!(y$i9l`8$*OP- zsTc8CxM+bl>tZ3$5Y}FGfcK*Q`GvxsC$UzSqr1pA->Snj2ltk&XvPLvLC|o2V7-K< z;E%4i(s4KW7msOUjaCS?J}3lhGk}#WME3k9ovPFK?O3S>U26c(lE(C=r(u<)4daeD zqA$AtaS*Uy#JXo(2&tNB62sR%kz5^L@O5B~4i6V_ZtNt8%*5qEtwT+NZfu$N2f>GQ zUVes*bPd_B(2hk($9EO*BrE;1o3wfB^hiEGd-GSvkV8vm29B8L_dlYrulnnn8_cj= zj2m`N|WxBa6-IM6Acqr3i;bp}^cAC5FnZ9dAY=zin<=B<1j zsKfA)X)|>Ni*`i6{qtKVhp6#?4-D(#{b>f|in~BRo7y+u1TKb9?}B3fkFLfLEq zktSI>(Ri8f22oE(F%tYTa$};?zi0^>n6p5i1qOENVQX3PoBS8x8--;?RnJmP(QSq079+y>PlAK zedx0WvhYj~V}^kQI@k)sF+70ZC?{Neo3q!kx z!!P|)-5{Jk8Ov#%cuj*n5rKxnCkt{09>rpOm>ni}AC@dVXwhw*xe;zZ2iOpuyfhDo ze+~iDrv~uoqe;mh<4*y8!5ut<=oC&p0OiV1Qr^+ajbtAts0f9hscKtE4NH_Fb<^+_ zz`oa8bfE&zSN!8wAVf$$m~@#i2fV53$*1U$_wWak;S@0XPPqbgq53ISF&ph?`TB2< zZGy%&$Keqm$&1lgTTG3CNv3A7pvqi_9M2FflU^Ac5NXklYLC*o-R!3tMBBo>6M{?O z`0W$tg#r+US6(;vqOOodgw0P_{-)caH#Pp*ivEsnxKr(XPZ8bx6+TOZyYZ%*Dq1t-gxVnw( zUR=rcy?-(cg)f7G9J0)R{kMdqdL2fM2{YA}r}2GCF3Oz#=1v)!0n3Lzt0mTeG=$hY zFC79XAQH@F{3xnkO*dqR4zl;lSwI5oYy+xiG^Mju7cXXtB%k9=p!Tm$P_430i8qar*RKJ>`CA8+J zQrJfbfa9EY1)AR}!G~pCtCA&rQS?Tgj_)8AiHWj@od`urz0~26IB{7rZ|$(tLouMtE3~Ds{2kOZND&iwdDG9v;Nxp(-u_FK{fH>rYppHzF zk(#8FUCf{TjiN~u+(*H)w_Ee;>cz9Du;Sg*HPbvs{|v&!Evt?rhy#MM#+J#V`XbZ= zj}r89M)-M`Y5YNJLHhyNfP+e^ypN*nTyPy!OG-*jwDP}#aB)W#3dLSykF_vSkG^Eg zmcv`V$;DE4<`<>4M;JGDXhz%j>E$eEA=7wrE^RXD4xC%TvPxbi=ZQy%FrE@B?DQ^v zV6qdxt-oP2ghzQ`e({PEEwgT>Atsgv@KYZv!wYwqWJ$TFG0_#|BW)yC14$+8L)M3x zT0=cxkxk8Z!HX_8dFv@^s_xE!=d=AxD}}>Ire4`>et8QehRo)jQE+JGke8N3C9pP9 zMobslpMGCAmo`kHi{fv&KZ%{dhD?%R+MOo|ZG>dPxpuF*{UE|4-qmfYZnuT@N_a>i zc;*{Cwrw|5fS#XM79Z3tA3uZhz$+g{n&yWqAqdH`$*D>i*CWVtBWScLkw3xvk%V_mKys4m~(=1$tPOyZxZ*{wE}g8uMSe!DCs` z?&mTFT!Q1`PTS3A9=JqT4BE^}S*)Dur3H-2-6K^P8;sC{iDlAx_`V9(dABC?P)Pq| z;El%p5s;)`&F8XcT1vZYT}wVl0x^F?G~oyI(Hd7(xuwWE1aQJA?x-8nV`^0ob=m>I z`^AVo_fCwXVbE4sc}oC+SlwoL+N#l#4Bw3AZNYjt9qYwFeKIUs@pV9T&+gCG&%n*akZ#DX0zHkExSr|>Lm`|++9`xYkG!oX_ zo2p>=Kg*s;1LgPJf&ZzOW^aZXm~WZNzxQwL9>BF#60FErfj+h&!GnUiv-(|Q$3;vx zErC&X**r$p)7_VDgkt+q(aS#c$H^IkCxpEiJ|-G3@R~8cFL$ld#p3N797t;ek?PKd z=L#WDrn0?*q6v^Rlz;8zKDnz7BfLzLE~YM7 znITg5HkRnQ1(caCc6j}_o8fPOF7K#+=C}Wb&cYYe20#8SDHQFq)NxUALKc+dd~TjVf8x^ zzJNEBB}x8>OspF5%jdej6QydANX9fe?}R(pSr#SC zow4=G9QA%IDnUvLBhABg-99}LX{`AK@;ifiHZ=qCYt5l<|y>!!69PTSp2_!y2#~YU-Zj<_wp0O3v~jz;ONJYSb@i~ z^UYZIc^UKqXCOQZ=5t;evcIlMNVWR&jIdKr`(MeKzmFZU!U%5HdU#1P42x7qih8^x zIZdu|%Zb{k93x~%5+AzejwLZY# z{44`ghQRH)uq&ID0sSMht|?)$wJg@-*R#I#c6YNo^yU18!9waO#*m(8KdVu4X#VY2_rHrx8-1DSXP8gJXFYGrxUlohGQ7&;*>0BsZ_Ty% zGL+k}k|N#}Spv%Ze%`YhL}rN}>ail&&+M7#qnfmt


    Ts)6GcXs_;sd*XX-kULNG zMUP0)=LxC=`ENw>4=pd^GvehJC4b(YcFpgqI->iA9z~WFseqBTB*5YBMCbmAM&S1 zn8YAn=@asCA*;t0y1e7i;(o9>jb3sSruw*E4D7ToS}FV-#j<>+AvXcTy-*ys{(sro zEFkyqIUs@Pafw&5^JrmVpMQn3HlCD2bT$q+bVX1knViI5c#8ITXK$M>p23r*z#1}| z;DP>?fN;ahsIRyl3!n(Up1~lAZ#Jzzr(T0z%DK|6Dx$`?emcfLKmVMB$+Z!TSuld_VG?{$TPx#y*hW%ceaO7qCUBGPE*JTOe@XVssg@e$?^UM`Eg*_YdN#w(<&jf{&Z#&5P(V`}*ynr@zTZn+juJb>mfN zmj{LhX5a+7Nf>{A&zLYOoj-l=Lu`gx9`bcQrn2NLSt$j30fIp#tI$;&^23*q+1Wgf zsZ*;_l6<5jI0Zq=_V``NJAYe%Ck&8*MqKdWhG*=D_q{2>eYo|LP8NpNsv!_Dy^`Uu z)2|`t&vux^RlfM-HUBdEcqa2NQ>sea!IVh9Za|&bnDU}?@|nBLjxzs=*Z1(m^N&#x zNkzYG{ci?KPy__)?sHuJT5%7Cu+sFkCp9AZqgXiJw(3+$nK5?%x~u@5(oFW%(bz0b$l_BXa)JoLTpj=v6(Hw@90YZ(Xs`KMakX*G{6cQdpq6}B+~ zJB^B5ohoyebw18``wk9?p@7HR@&fu&{+IV5bqU*95z;*B(2ppb7-Jwc6eENxFT3!O2*v^4LORNdAuXZt;DIJIlgmAPY!Sqz-?g)(rfv{D zY1}^&QeyWr%^@00{m$kT3Cva=2d#n4Y7wt`sa_9g&ibu=!Ym7#1cT-BpS<;;*cmUv z@|9T_8_=bTIY`wCQx&5G<-qvbs)5KKax>0puYOjvQ z1SLa;q^gOQCjzqCq{^sWbuf*sohxx_oIlP{V7M7HC*-r_J zalr7vfb)OM9Fm9&{DgY!;VWTuWv02#N?;HhYeO@37R+N5cLg)xlGAz1?%(Lpy?TP@ zlAxL_c5xxt!Y+o}062xAJs!Z_ME6uyWT)2@xfjJoouGI1gaX-Qx0vD`L8z)Za7Vok*C7bU6&_<&Q|XkV%Do5+#0Af%7nwSyOY)cFsx) zDHF2&**QQg^Us7GB#~R-Y^iNE&muvTh#=^Z9LmvPsc{4KU&{Bthq(A;=us~2%M!`M z6U}rSA4_U*eDv#T!E=ZM;wpz{Q*f)vu4gZdF4Dh5qbe#9k%b#FH1!!f3@*;>2yTRo z0R!EOdW&GWI3|Iicz`4gEkXDk(dp1o(T+P`sa} zc>u5b&od^kLNf4n`xz@!GHVg-wOhG>MobIH5Nl4eD07V6Kv3Dw{2gvH;@d-7#Io}- zVt**D)x!bbU43YH=7)QTaYN{`XtxnfSl?*ak!aDl30F#e?_K+u&WMPMbXuT%N}i)V z+PU%5fgvC~{$<1bs(3@j-C+zZ@(oE#2`t}L1OJBtH=Kw2T;W@uFL!PK8X>k;a%Fhp zTKu(GVPxUgSj`y}oJZwkBCV;c49mfUvM-8}M2kMd73qOlcaJdK!-R)R?VUl0NS@}! zyE;~y(3p@l{#0cA0nmbJw6J?e>z4K-tD}DZr+yLm;eq>nlosqoi-_0PvpIP1k@azv z58x6Wvq&?^E9LL8FW|Z`k1~ul-ijtA*uD=kwfkpg)65V$XkE$2K(^6a=Qil4mBW*4{b+86gLaY(H)m)Z)mkD9>*NTt| zLsVLaVP!3_hHV=0|gwGV-@7qD`zm7!#&5Z|AQXxJJ z;KyS5X!`{uB4V}53&6rU_=yd;1qxjLb@$uY!p|T`3=A6bD#s+R>quq)NRjPrJyt4W z8Tnr&J9K`M@NAOm!hFyChF8z%v>zsK#YvZ~br_8f)N4n@K-pijYY>-b9tEPcEoZd{ zpQNgbDUtLGE|XIrJ6b)LASHmL4V;&%ef=B21 zLfAu$6ltm2t&)#LtZj_ zlS(qf=#vdTFhzk4L&B z>F5sUWeb9Ns7W!*28KNXo_qdrsDm<&6fsi0gUJ_oTN>R_49EymDu-=QX8JGmKQx4_ z-o18^1SegsZi|s13``fg_PAN6%?`m7D7t@U<<J+Z?{>=cGbN|sLc)kLBvHV#|&6hWrN2IToXmI5m@XhTt-Kj&! z&Gr{%UG9fi{Qh~DgtE{Q>gs}5(*}k+ksJz^W95U+{0rf3?0iFW;fc7k*kSwuTbnf& z?C3LIJqnz{oX>2MZiuU-lHr5NU*`wxED;%A0?-v2d^dO@PUI2!A>J)B1MGD0V9iI- zZkG^Js*)<#WPHJuNX;aZm+9B?zyB;=sZ0CMJQ#t<0Xs(=_Wl0wNDwLvm#}fLJ~cZo zc@W4=H}g`5p#sfasG$I|hlo*Fc>)gTj)j**cT3)YQ1q#^4vf2?)b;{5?9+;4Quz<{ zvLeRsE!XSrH3=B~LyL3xQl$Flw^m#i!AO#2%#WYgm_K_yM@_d2`ZPb*^|T+lK*A%= z!_?)5P{E%YId8j|E}Fmv-#7oQ;CV;Vhh_lA7V`&}+rMXw1d!;NL8a{6q7dOtLCL41 zhE^}pS`LGg^^Z=$@oU(CygNN5BZ7nZiZYIG)(@c`1WNPDC7tWMQryTX37*gbaBO)l zp`a(e&?2@UB=RV?Ab{*j^l&>YF(^tyKJHSdEDha|W)Qi$_WuMLu9<@7f26jR95*^bp^z2;!h<36zH-U1oGm?B0$1(@eo)5)5H!FPIYyDXm?{iirz#Uo%`481-S|E zotHIsg}(qEo?gZA%1F@SL;dpSQYc>X>+Vl9P=Rnp+d}hbqU3CGAS7`IS*0+Nu`h%{ zR=Jhp3?Qfk>V%$-Iz3w8^H$d-G@Hm8VA|hDl;p}gC;Y>Z#RNTj2UwOJV&Of#!3Xqs z_Qy{zOc!#p?M{- zH<4TwT`B8b!~_2SNP6?Qrq8r{_znpK5EB<>Of(5t7z6E$vOR4A1VVjH05xoOIzP*U zDEtcawMqzUC=iHChlydeE)5|LAUe+|bxJE#QD`fHwk8NXbpeGMG$0}Z0R_={Pv-rz z%Ll?Y-}}0+>zs3))3SR9SPf5T5I)wxM6Do+9xbbXtt7D&&usAAMkqDyeipyhT!*5u zBp8Z)>vJ|-pI-Y$Qt<$p7hLbWV}enSL%8_@{ZZ&XRIr(wn**H9c80mLr*nA%bW7eZ zOj*o@O~WAW-IVr}1KG=_Y5WauUU=1i;)HoXV5E8+qt0TEc`wLqwLYB~Ij@zm%2{~x zuFz>ad;^Ww@Xk~_Wl=0|sYgOnp_$o#wDtIz|EjgrPD2&mGZmIP>u@01B#Y>Ev z%$R6k)v*rq9LsE@8Qu)kuF>M`wIk1iNRBDQHJ+{jW_@5(GbY#FQnBpShT#&FoPxrg zgs_=g|C=emhO<a!H#ZrN1pJ+6LRrq2~s-RWU8l!3D4PyYk#+#eg^FTx-)i$***& z5PmOz)ZWBZ)bAC$=<6?V$BBT>ffBc|EXzLD7d-KmcZVGSNU@>2FDpPa5hJ*-axsDl z6S{j`1)8vm`R8sQzT)>O2y#Ok3|oZBTUR(^sJnKFdM&l;N*>2{lkk2l7Bq?SsK!{l za-MESUi(LkI*FFt;4eterMSC|I=B$LC#+F*&t%p1ZcAA)m}fUP1{7{RJze+Fm^%o} z!g=>E^C;CgD8epJO8wRdYe=N!BAWI21+&4hd~*Qc1qrrLzLTGJdcbhK_}o!cs=Zn5oV0KF1YorLb9rZjz>^BjZS#XbeaKAIz~!VHAvWDaY(X6iJ0Gr6t`~5|#$sXnufF}wW+%l!XbEO<%jV;cCT?$^o#I^iRT2QCVYiQE0;Yp9g=Njvb6Wzr+@BaF^I7UM75p z%cJfqpDE1s!L~;F*}(clyZ{pb{~h=1Ww7%9mlX62-!oROg=iaPw|@aXY+@I8nhdB9Pm^os9G zZ$EmdVxBN8j~l%(z7c)H`4>ojq_cYX!}~O^Qt-nDRK>N;=Ybi&$#zSt0ZN*sxRcwU ze!^`3PqKko>InKLx;`GV2B20VzEbH-n5tL{J(3OlLi8Ic6-bmV<(jDC_7vG^T2owabm zED~F|kUblNuDTM*XtHa_j0Q+$z@BzOxC;vn2BQFfNX0=}s{>>E8YQ}5&f38R1ChoVp`Hxvgj27HL#YH$=UjYpE%EZLkvS%FvTSH1&>0+nzvC0(YlD zW=&=O4a(`<`5H2 z{`+bgOK9bP^IykHv2g3Ie>pJ~RbOcldce++5fEWLn?d1*;I-E$R&_pg{QteKJjdii z7dA`$uue>#MOv>Fv=D?=X%Ng{LE5O7i5RcakDk)H{_hFfn0RQwDxy7~Z9E_Q2!^6@i6o7o( zbgIU#@(8PyR~O)X&OI!x(K$#5?A`&5ctM<$@$+1iP`wlOQZ-7nwP{pZyk?C6)44L) z8e~wR9S?>-Aomj57;lds?$vnd2LTmbGJdn!zDYc+iI?l6=r{(S#LIy6Zc-r3(VpXx zXIYHJu0Luc1#ZjhWe@BP?28`O4c^|qHP!fyw6dN#i8)NVb3=ShXyc*#e5IK z!g}pqL@8ST;#xqTi|a8d)Qqp>$LX@s!m-11AdJ%iV9Bj+D&uwp-iZ|=Vv5X0QLlfp zW3C%)!t(e;xBLbS^~yK!tBVd3N|uu{w!r9xU4J_D_ME{@tsOoMCzNp@Ggxyzp4UN z&fYlfnaqg@HeXkN09XdS{PLmlM1HiO5d- zvmch~jR0v~ogxH4uGPlHo8ST!ICKeB)gRRtr_Xmgtwdt%rH3^fU91BWNsQpce1%@# zDU0!AmqycYeDo0(cY_WtX_Q`}cgorHL>mii{3JTK*IjQ443yHucZl=UgG$`FH3LCy z2V;d`mWhQkn{i#cllan7*H(Pu_m|46knwoz2F&AVin-|P6lI4#$HPP8OhRrOnx{}V zW|9pC$1o?{pUO<^_t;uaa#`8q!3mNG8`QT&FS$Z@L{mA8s=Fql12t}V)vY2&s@KM^ z{6T}kRmpaPXF#VlKNZnC2IQFvLdj4^t;L&b^|TT&u+*YtkL`V7I5^Z5c zIYdlWd=hqG%w|DL$Je$`_82M=lF^~U>UgsZRb5xbOYi)`Zb_u{U_sDX5i$_w=|nk z**GC;6&AeHJrBDE<5eH1&XP&rS4e&}N&N78lBcF`U;ad9p;ICHiZmWnCd#J}JLZ~K z^81pv59D*fG9-MjKP3`A)~vO=DATM%yjG5W5tF}khcedbBiujkWs*^ad2giRiolAp zFg6<#l)B``z};yr3%+IM)S<9&N+r(wiy;Sm8h~ZXW+4C>s<%cTSRR}y0ee07@bHsu zguK$XQY#)l!SReVfAW6O4YD~3i=Pf(ufFUJ;F>6EdN);Cfyjagvljy=W!eJ-zcL{z zHqz-_kdL1Aabn6mr#v8LKbYvKY01ECT`a}yo+YJwT>#&ZZ`HTzX0+gI^TFaW@>1;? zH?rb|g4Y>L+8p|>Ww}Hmg>(=3F0YWOLeKEgE&hNy4cCXRoI2Wq}c4$k~Ekl5QX$nmfYegcp`p* zdLLmY9dN3XyxTW&@e(K>3A5xvkQL~uDdv~LF$4=i4c*R>cZS!&sgF<(LZ^eD)u`@t zcL$i&qr!^1!U42#eahRog0OZ?xj^=B(*$t1*c7Gcv^nNtj) znbpT>C2uD_2jU3nTb2xzqfs|kiBH|b*H}D`z4bb`_K*y-!M6rU-b27Rl|L zzWgpoY}_=Tbug1MijSEJ473Df?bzSSulBwK#&KC|X`7Obn6ipWx#JXymo~zkp<_tF z`&1V1HAlx^k?R=(wh<6U8HM%ckE=?$HBKbi=SdV)7sbg;z$gw;!&xFZ+?Be_vxc;n zF=19rRBLsR=tE?xD8pq=J?o5jxaBjfZj-F~Q!zIeAL=82`hr8n!p3@+6?1FgCl-9} z1HMR(w)B88<%;{e?a9_%aOu|7@gW;pj`1eqcKb*V>4|Asn^Gp--4 z^o^o%bn+2MaK%!t4Clt>Uvj zhczL^wT)u{8Y#MzNr?+u1Y^u2aVJ73sgI^6#LFeH)0%|pqX4fXDd0vm~kW* zhYhCb+quwxsPgz^MiITjmseyZXqdkUcGc zAPbJ`f;i47t7YPU)kie-bgv|&6vG;|r(1$U@3K_v-Uj{2)X?b&v?k~cpZEq&0fALi zB<3~<;m+%I@Y3T-9t13jw~PmInMC_Q#F?|UI~6cU_IWOv<3_CkZcE}e3>dxPhz*a%jN#slu- zA2n~%D!@r~hPukWSGnuH@tg!(lKAR%`gDyN;utGWeSx0>g!PSFY}~pO>!6&Ab}Cd! zFMyeEa+7Wa5;7@q_7WL!c1MD)E+JfqaTS@WRA@9Dem=r-KWd<&qKcv#TK-M`fl?D3 z82;svgC9OIW?=v1K6@Op>>OpD>o%ds`!`#b0dxl5g@W9tEW2q$#CMfaci?${@T!Mj#DM$69^s&t@%eaobFSkGsrokXd-?3ExQ|yS7!q`3x%+DtkJstT&AG;%$}^Gj^nVXmhr?bX$LP7QN)~Bm z8|VyKNd9y(5)dG#zuoc; zi$$YqmvJ%pE@th*BZ!*SkIF&vc#b}-<*dq8W2SZvJH;LEv{p@`=^pUHPEsmW82Mc)!A}|KV7u?{Y>+^^ZUUsXFTgzF3a2*@x}roY3oHL^#uvExmdu&>YKzRC<#M`q&93|>lW z!W6A?5(23JYa@bm8s>h6JdQmN*tm7^am9Hn-X>?lTP|mayD@k;`M{&0&w&|-WSl;S z*Q}DaovoVWSA7og*hF#;k%Jrq(W-&>v2fH*acB+wtBt_LhO>wL!MnbI@u7ZqraPT^@U#Z^F}BaaTwU0M&%GWQfz!(x$d%zphKsZ%D)x9ceQzX$c< zf!he(z-r*xNX|rb{y1X!=rj!|Jv}3x;+M&ggta!X$6yl0MCLgab3?MlJI9cU4~Y28 z@MD`r3h=aPqVVHXC@L7oCZ9qM(1SBJ1y?crEWRq=P_Y%;p+vy{szEFAp)jJJ75T;p zEUYI|luW(tDnqGQ1Oy1fTZUbpZ9Y(|21TY;<=@zx&rPR@p39|Pc5Ry&?YG?{2i*dL zj^giMrH^b_M0cWjabrtzmI6YoZ0>nn+#HYHenkjuqO%un;!O~SX89rxA<;HzJl~+% z2M2aaf%~Z!5PGe9_G0p$S}X4auUzJ9H)v0KV>8SO=vs6%LIVdg4*l|urC9nIJkS9u zccL%_`}l4zZm@APtd0hh_@}un!mb}Cz6++6 zK{@jV`r4}Dw2SFxF}ieWk^lA)1Ah97K_W-o0i3qUwZ>xitA|0|Ix8*BO>EFw5vOBw z^}C>n7c8)s&$st*?2^ap?#b0z1=xk$v9FT(J+6n*{x7^|?+OgGZlTcljju6r?6nW+ z@|^d+rO5@YL#bxSjk}pxc5!FQMw&Ji0JC<$4!(rpVnl6c1YWKh*rdImj|5rHTvVxU zP0_Vh()QGcUJurgBvTe=q;Xd=fhW;f6Q^UN1R^@#KcSfo@p%qt@Lmhz8f%&#z#wIX z$%%5N1D0u&7_sE_wZ`~0+x=htg2}wMJ3u~h`hEEmDFg#Xv1m@hobl}u9)xe<59Y83 zwQgB*nBkE<#tYmJH%LO182zj1h6XMH?b)bp5Bd6KTzyKF(LuE?JWQ!XW1_6@FdOWg zKp+mEBHrS{r?db+nV?lohPoz*cYv`?B z4)7fdv||RU04Q?1pvx1IGXw~a+Iql1!gMLZcpie|Q;)S`PtcFqhBtWIG8sq&@;4?T zZuH;en8$Il84I~Ql;RhgYLFsKU=>QqH_LHd8-M_Uc|h1W=74FAfz0%_P(2=~ z;;?_oIlZdFDE|p~YcTv9~>|O&fCUx_H z+6F8Wuhx{u-vthkC)(yY58l(r9!;w{9ifHXny1D-^$3~@zNS2QR$ieb@5wa|KIRq~ zNa4VzEBRi)NBVAoMQ9IFLb$k9I|#*DY^12o@Wx(1{>(N%g2UPed%?&si&cfO8)(P^ z!!DaOylJJzhMfTl%<}!U&Oj9i&o|0~^DytJ@2*W5fo_lIZ|?BnkBb363n!ig#}@hH z>gTmDC6pc6QT7ih>sdNQy&=l;;}cXWra{LsP9*}NH4Jem4)D2NOL|b9z@i3bKJwVg zK6AO`W00X8|QS0?X^D@+mY%oua?O$O4=!nhlKhFP(%RK{#b{doN0* zb906{$RXS#K}=7qOc%1Kr-{F9;+}dCH2O;~s8)~D)D;K@-WO@=Yqz6k2W5Uj+!kYL ztQ)?zjj|%#m!~!qC$+Aw`w>r-DG=V^W;&9UU7a_4VMPV#=9KL%tkp*#K4`eKr^~<- zhUVI_W3VVx4RxqqNs`(YRnz?^{_uZtM~~YQu|cMuoWJKES(J%Xy;dSucW3;#S1Ywk z_SBT%^WKGH@tkY{n8bBpopcV{(?TK23!H`ak`q6BNAz8B5INa{9pwiV-(@+&t6gJX zW9NU?;t6VKc+Hje1Ivn?-M6}-!$ovd@`8c026LlpO{L?;DKMJ%wN&h1*PgNviXJQ$ z|5bDSfIG+6%oRfK>*3ea)tw=rVn1>{i96Peb}0I%lD|=aMY1YgHOWY#?6L~jXg3dU zi0TowolSe(O>a(&bvQ!rTMHL|br*+$<%1~4%hacW#^MmzqPUXyHa=%y4@ceyozn_c zVJChT69?${r-&Dqdrj6atHh2MpEKbm+{95Dr9u2}tr+~nLao^zj+ALV<(rNHAbg}E2 zxx((1EY1?b+uUaQigy*xFC#{HR+lpwg@s)ep>X=Ok%WptOH5V6;;_B<;R)?gf_wj3 z$Ln!xXvHVQ$$z5U#lA5{3Le(Fy?6*+$lMJoR%>5^!9}y7i-R2^M?KJ@l)a{7RjK*g zQF_1nc4*Bse(Ln3B%fgTGBF=T>G5_Kmh7hAMi=bLQi;8(ck&v&u3>BycZH_l>iVu& z*w7M(m_73WpCv?5{(zloNv7S0fX*Jh)fGh6GXb`X=FFmT7N6saZ47$NB+QRoO3VF+ z+)49Ar~Ii2DZDMQ-E3>P77TWQ*R95q0(D5I=rPRIeE$mW$7=0pUQl^DSf@_XhDscyP#;BY@+qY{#MzY z`;k{upo`PyJ5@X!dFA}+qQz{PMLZ@F#N1!Fb$v?#)lr#FcEG9M_MBQ!#Y*6HL zz-NJw(0TR8J&vHNJNXaxukHl(ar@FuA2teEplRh6_5{QwY}T}tYec)V=W0MfBQ=!K z2s`xw&coU=`prrNX*)Z`G`o+E`6ivYq(g~z8nq%zQ_dLdT^wZ$t}e_yyGgK4 z89ydRudP(AeEX}FeXA=YMS!rW6A|479-yL<-b>C5p4&;i90SJ;$4-lGE@HA9Z4!x% z5#KQ<=AG3uL;D=mT6MD*%mLwBs(0E+eY{)*rIGCJIDP+o6RZXSnYyJ+-1K=Q#{4+y zXcCR{XBXVb@0NX1s>Bp*#XvB`~IJQMZsC1k1DSknAvpV(Ij|owVwH{&y#!< zt2Fm!wqRK_PE5tu9;<*MR9w+pzVUh0JaBC#;m6rtG-&L8djc2>OWs>0-M^j$XX1)( zOw}`GXgr2=$%?J1oE+XTfVtV{^k%f1NKhEyKHtf~Bcp_7FU23Z*LhLqFLtTE`$S=i zn%+J$E_*gf9ywx^KH6VotNmz9;yrC6S$D<0CX8#VlPyO8NC8p8TMKND`esZ%189$y=a68c2kZ&l@@0(B1henI5!dTS>E{g1a7lstHJMTXKr(*jN>!V)$nl`q_#vj~V zcO0RH9|bN?TzXm7Z{E_L1Z&4K1u*&Nnmmv1)k*{gviWx#bRVny8p;ar=e@^MSwr3W zJ-)2OJ|}MZ2lvb9AZuCX|5=OKl%iW(31@SE6Vq#O27XW`iuF%GJ{kE!pn00)6#8vg z2mu}D`QHwb_o;8AY+_@+stHY*q+mkI1L@&E?kn&LLL_D&rta*As#O|o?bS3;xYu_lGBNFI>-0_|dlAYVOBul$ zFBXqJH6o;}2F(gdyZ%;93)n@$&5fZz6gJS|zz2#^C`Al}GR-3=9!JMTWd*mfv+w2$(j6w337?I25?e7Z3i z+4q$_t+=YJ)RAA+H;9k*TX|HaKTyA#Ixc1r25eZ{E5DCwQO)CZj~c*0W{<0HN2RLGTUvL3)M;mRO7=5?Tz z($$y~#G*ooOy8j#7f}p|aRxep-?Rv+0EhXjO0e99#gDqfTQXf(D3Hx8FGE;szv%jI z;;t+Y!1Ir~!H1+wQ!eDlfg@Y;qPJEXA>vPzM@QK0B6RW_{M#5nXLiSYhRire%U9;# zqA4o}*Fx^jLL~X-VV;2*hrc6tO=JD{Fu6nmgXGp=_M1QUf7WTO#4b+7!|sse9K)&& za<-X^5o9cRtr7mzo|XPET-`CsVPweX@(`j*W;>VirXuM-S-dbjSb=zQVYA)J7UgBLth z=#E`V>rZHMZu**H4lIMri+DRYQpM>A6c?j#;*o=2)m$y}92HzQ_lP|=0-2VZuLy`J6yA}ZO( zZU%cWDQXz%oGq-eb7%gylvY)b6H#_ynJ@ZUyy$m7t?v=-Akls6Vi>KIJ*dylHPGECkg&rADu;6b>^8-v`)O)1CpK6s z3CTlam_tju98~A7LD8y~?kERS22skD^B%H}^L_;wh6ZOD8UFShG`L|qdcc+X_F})s z6*TBC^Ph;$8HGI_xQ&g?hNkIK{0wUMwvDtL6GmWIhM9!MJ*b+K+VpNG8Oj%RJRrlr zD-`v!AXF0@oQAE{#wVa9E^H=GrEt30s+j?znNA-4)y%&gbP}jf%Wn}J;KEt?7}1Pb zs8qp}mvNZ&;QmI3%4dt+-+0l!ADJS&!5P)0g#p(q2x=3uJx)TcXQiGNy)=W-YN5_F zT>ge|eWjc!)Eg?&v$eUFL3_P8>+=K+eq`9lZgDt7GW>zXxTok*l1?qf>qBO-%Nr$N zHyX&?V86|T^VVg`1icar*oRo^A4U7c9PEa2G%(5(D5ihbC%OdE>7Ld`z_zaeQ9?6! zYypnqkyM+D>DOW2NG%WF3q*_FXJwto}Vl8)kqzsn&1a$$69s@e#{%89dc2 zeJea=&tNts#%honpB+11B-ug(;w~dIh4F6-$4J{ zXO-@23Uiy@{f8f0fh^|V{Yer)KQ}48+x|8)4zV12E_s@wOjvbKZ{$$QvRn*KuWKH6 z7Tq;ERNmvo@Hu_d9c$j=)U})B4^H-d^{8||ZfdW$BPd5@owFH3(evMfVsv@&B~3m& zke9soEAR?$mugA}YzBvXkDo6P$}Vcxq;{K+AbbnI8I=+%7nz*V$2FDop8|^Dgci$6 z#^Tz;y3A%a=A(6AIR5Jg_QPXWM>juyVW7l5dv!h5_p`Iz%F`tDTgM~6SaI;mjE)Im*};V}Rk&$Ms-#m?93&$*|(qhT-sbj9_R z+7}S~ZnVKL)Zw=e&!&fF(~`4MKpWI;a$|I$&QiYzNUD%Md0THgWez=pr9O{1<8^ml zyKxVMHWG#L0=@4|EyRG9o$HF;K=9wP9p|QGZ(~3`Vs_J>xA^!+)zBd7_bU;mQwoX2>p{(DIQW%^s)KEucq}D4 zp+@@Ejn}317Y(*2>f?F56Ot_Rj8oYIff}|fZE^#DP$UK5S^3Vn-R^ihQ;VIE) z1eV@L&;3+m3$J@`5tk#I8@@hXF#!EPeV`cu&*OSV6|JVct-fHs2s>$GF}(&%LpQOGmrKq?Q^lroWiNopA19 zZQ!R*49Fxj%9$k~vp`_#-^21jj*7&7_!7AIqm7a~>+%r!CQo=9rm zjy`JM+L|`BgU5zci=TUrmh2|m;@iXhjK#H6hkQ-b->ZJNx_vmN*!Fin(clvLp+9;F z_WRQHt-pc)Es~`>#GYg)CcAMGe#N?zP=t>2&wd?Jf%WlY{VL{nS?l!eJqEfXI|Kw= zmT2OGT~(K1P9k7^?4=J*&~9C=iU^(SSFQZ=FSZwtwZA(-b21SlUd%uE6}sk*e0kWj zW&jq;w49&nKYLvHyr7e-_*S**{Os$1lHin&&PExxPKZ~&T9f6HEDZO3*zk(6Zd!?h za-{e3cOJW~5#9`Z`lr^tU+bU15$Ia?ot{sPp>t6S>pC4x)Ep{$u{$bKox317^Qq)k zwXC4iK)>*>f0LZg&8(om(>@$8pp}!%Bmd>`c8fQo_21-A?>;}tg+zp!9Fn)$U)%b3 zf0K(>Gw}1xole_RwRU&iJH(s(MYmR4rds`X#KIIO*?#zx&H2<3D+$z< zpS4zKvjtJs-iwyKdv5H-!nt1peDCeDvO`nmap}3wvWj*(eycecI)~Ml|LNgqZMof& zm9X)!3E89l8aIzG@d7xCdY&*j{g>~D*8Rx68$>ieTK>>+9%S1wV+*!b**%;zpz6(z z-|@aztBT97u6XV#!&bsZZ-D~V@F=5VDb-OA_QB%b4$f@kU7QV$c+BAs+&W z4Wb_Yw1^BD!GE2V1e!0Nm2Iyf|gGU{FsP@$=)0zl-x}5ahAl#z|e0nGJA<)z^MCsvLG=+OU z39iwBp;UWX%hv(2D}>4k_*kg~!<(f%DZdMphUg`)RxL!@2lGdOr(6Ea(c2~X1>6Z? z7X)=g$!qM_1_UU?bi%N@*Y75Pa2}p$Nx)*zs?@pdH7Kxk=YOce<$BO6?wvpvJPG_6 zx#TWO@u~B@P}|6Rt#4H{W0Xy6pO7Zzxsy{5(SRkr4q1v%LrpQig@20Ep*yu(eO;ZR z)8b6vE`$CV#SVp{Cj5Kt@Te8fzovfNN7Ug%k+)_+L1e*romRTtoTRhwH$=yWy_myX z(dDUmE^G?bJf~y)P!FyuZl!92p&Ip2s%v`am~|3*H`QDWiKQNWUm6h^)!XZh_9?iG zShcpgf6--uV#Zj-mzRSaBJt-)I5Rbo3=nmWZ+PmVUu)=a>H(F%@~X$14$9|ax!B+p zOqtQ414h!Nln$IuTy+Nm?H1kZr^?-F>bO>=a)jpiMISE|MElfTQwk0qWu%~F5z zJSgxLJ)zod)N0CjJ{X<1j(6Z`2Mz^Y|1kovFnPNpvS)1l2QO$=g8?Xm&f^pR!HOSX zD=4(8GIlA`Ja$72D* zN~+39!489!%Kt{R?+`d?2_(2*lF0!)ma#F$%slJi`wuZ;HIhG?mT6c*p+G(OpXNgA3eW+!ODrH ze*W`ePuFeqw4Yn`s(Mr~Eko8g+)f-j6$m(SbIBZF>{<(7Q~ycg8fRKEWRkgNnGNqSpKA)M z<=qd1iAOoYdc;lub)l-{Xpht&Ol#gdr!fqW7K}1D@2#wW!$2r(7_z{tOm1Hm0>6J# zyDAdCTaPuV^B_TtO>8gS3#^`{h(h{b;Dqv8+ow^gnMr}uQptGO5tW;AM!E<7Wc8-n zG?d6S`PP|6ywd)#(cmg|(4gt>cxvK{wh^B&+oAfz8Av||f^vIVPN2g2wP;4$JH0Mu z2>)!eo4aJicvgImg|!vxHipi=xeRO}B;_7lJETd@lcy6`0|s+gt@0U5!3e9Ii4L)W z`C{4=-2ElsWL&yKa)LT=TW##I^q&Av2fMi7c)RHwRVB-C#;5(=Uzc7NoDqi?ZY=uF zzL^fYig^LAmgv=7X_E&?P^1DK;~KGKv)0>{5d*nXP{%k$fc2Gua41i@mB>3(Yp;TP z=e*g~)Gh!p*6myP0yu$2(^pv91M*nTVg98FlAW?+GV=&Xo}T0|hW>6y?xh{v?ZFS=^Zi(#c9w;VW%xA=>77^CsY#}&5&BeouSZl$c^VG zPt(ufVZN{}X9V^j_M(okCvLl6%@e`_r}D!Zb^Wtj)Ad=nUWC2g14q4!@#az=lQHUp zv%7xy?5=k<4ej`q+%G8VwD8^88Tv!3vOt}U|I_73X2XAZc5W=~T=~S2h}_pWNs1^h znjJY#f=SJvNF;@oL~r@5-#}$~XQop!olQ^6bNw>!i_Q__&t)$30i-y7|12EyXM7rB>%fuL7;6Wmh1cbY45M2tAhK8d(o{s@&9m;$<2xSR#GZu zQ9~2Tt^bqaY2o6~Md$TdakR=ZK z;&#veD&#(KE1-qFqIEI%#YXTwbr!INl(Nj?g%H=+5C<_|075k7i%v5&D7!nV8^e-_ zo-p6&)xzd}j-BLGPBzP_rcY6p?+H9GWTw{|3bFaSSrNe{@~Lf60k zMsp~jlNrc;oKwH;`9ng+Q^~f7KYZ&M(7Xr1A2D7l`<|!MIFCNZPMhlxLs=sedgQez$$HchkuHO@GkeGrK%`4cB;8;?m9+&9f65NWB3^yHci~n8B9;`!+CW}e+t2uE7V3& z2F1t)zD&|Is8Nw-Y~z1h;u?s0=x%9aEi~}^aKw?`EtmwsK$heZrOw`#@F8TJ^?-P1 zM|MLl#`?egUU+cF$6sQPit@5JhzI9dJtRV!k2Tl>qv{wh8(L5Y!hY;mfTg~g!->1n z%*T%{siSm^6ZagJozC96eJFH2a5=B%hS?}6;L*_jiU1@O{Ev0YJcHBRP|&E%875#b zAjn&a4PI>*+;zjO)Z}07h}Rzu2l4aJs<=eU3PeRPwe2g~NwySUjbDQ@PDjnOKF84R zU8Bzk#(R@sGeBlqSS3NQc3?F~>jx)5xM7vu{JzpPzQ}A5uh26XzU@XBogDWSq7s`T z-Bx-ErXg0Qa?)%Kx-?v2*7Nf>{{XXPePg`#BlLrXjO)A6`rW=57%5vxsMzRyI{?Qm z|Hh^)Wgqb^L3lD87{h;ymK7Fn1^Ub#p9e;BW}>G775b?XvNGTcV$0I{9d8^|ZnvO{ znh@g?a7!4qpq93rds7QFDurh#54OQOgF57%3%L?^Tp%>YD2d1)J3S87%;c5C`fqXp z1zBj&W8PS&9b7wt8*6G(8chN+vV>G8`&iAhH5Cp@x_!BH!+U22IAX%`YY^tUb7-WVsp+Q6HNZA2#k`{KI%kI zqs#4!_I-%bUOpRJ#lw{W8=O=dv{A@Y|8cNwp&Li<2RFyp`46?mGMT&@2I~2ykx#aD z6Oo;J<2HNF33epHgLJg_XFem!Y2^l=AL8yP`pXy6q%wbz)!9Yp_~pr z^3EbGx`-5uT@%ffVevl1Gn>j5u$LzoE|zM1|728M<|6X-lLcxl&)rGC;7OF(p?Vut z;A0Q(ddq)`&VFa`)gY6XaM-KQ`VA4Mh$Z2H{U<+s%t@v~wJCjH>d6~CR2GcaiehBH zXwGuppbhU3ugnar8hX$u=yvHeTE#1Asb5e_Z@gCqY2$lqWKk2^otxyh4UpGyX0B)w zB4G@qOuG}y4EI47xRR4Lz`sB?viOgi_@EA{>`+SwS0+7eK;pusgLeucbe^4?RPq!L4E| zu6)na0m8kdc+l>v$hgw#uEIHUsaJ^Saje*IH>>+vE4E-l_H*d(I41mXiYtZQz5- zlyDTn?ps<$q`@}*5*s6O=_R|g$v0}-lAYodFZ!^o1$AW9A|oFM8~PV9R#`5V_13Lpkg7h{@`;%c9(#TQ8JYG zpw;e1xXxR9_suQ^WQv_#%i}zZ{|!PzjRwlxwU>AQ8C?bO<^{is(_6si@$ZM`HgNI6 ztWZCwtiGR4k)xHAwXqG3R(adh-0i8B)Uvc1tN0 zQ&@ZfeQ^B;f9@gmpO6pY{o9Up&^tnXo)&b&famy!!ww;9Pb<%(#p;3Kvct~C+06WI z=Kh4A_Wr!(CZf<>98!+`ST*E+Wi|Oj*=P#BEAqOrl1aVP?8|*$tvxj@b~rqm8B$Xo zH{h8D7+Sh7{7@`I`CJqXe50r{*=_ak>20QbtHuqK+}CA(il}3EX(?f~WOUdnD%rh3 z)xgPg!PymQ3#q4LUG@pQpJU)7qc;9u;7$uedOxufhQUC#5w&@>@=Nwr%cQn zTob^Wjlq>qhr`ELR*6Xus+8AH+AsN1DKISE1mEngX|h>M^lr|MsABbx^% z)w=nKh#hWP5Hil36fp9VEG5Tg=9Ci&UAHA^abo-InDsFzyG{4+-V2%Ad7$JFXFhBY z@<{K}Dg-Tte!(7Rzz=Lg>%XdzS{>Bf{(rrBDh42<$|z4F5RWWy{ zqsk(N8U6sltYjfHo_Do$pcyY#t`{wN?lhLxtUS2ydZk?=FV6fmLD#~?7K7*LMTvBB z=l$7PC5*DEA06)PoxkBrFYui>P4-T{*rnt>tg4FfVjCr6>&agq#tPUp8}}c-SCL(x zE*BQY(pI{-W>R|Q$1^nlY~zzRDHCEhuVZb!LAt84e6SoY95;pRkIJ=I!2I>CVlnOu z8!ZRl4g{I#^8Innl1>p-5eF#&NelfpzkBCKFmeVe>PP*c})4pbyw*yk}v_Sk+IBt_gn*6;JwbsiZIf#uO;f4nB1;?X}JVT zoBLzyp+bw8TFh>oRuAF;1(rqdYS)uM6J=s5QdxsB)7$FqW>2f{Ceu=HYY@>@GNeI| zO_?Pd;rTr>9P0uYmy5j`#XZexu{~E_8YiZhkXSNs2HnWgK2W^Cah1i{u|(oO@Eom@ zBgYoRc^=>|rm)!$?Fw=~u_-?^=Sh%&AICw0#Y z8%{yWnS_^p!SkrzZdKi=u{9vLE8N$@V9ZrHD|o9$i}Xm+{xncM6r@J~5h6&wFQ3|p zrB&rZla=OHg)c0piJGg#4h2ngJvtb#IWyeg zAUL6e$|Mmk6S?=y&_ra69Lswn21B;hex;oaC~J!+$TX@bsK4%)*4-(zmDOQP9YZC%`L^~ z#;5}=zn0=Nj%UDNh^%3LR_?rnueM;}<`zN$5j${mbJX+z{y$p-J5=Y^M;IZB>Fvh7 zfCGNA8HGV9$L^y;v4$q(-_nBT#2W-m5RS0IK?bJgMr|7slvJnaZQ{dAh^@ z^WR6`(VsGYHhAuz&KnBAsFamuirEjFb`8s%Bq(?SiIbvF_r`rbag8i+O3dgM_e|bX zy{l??rp4MptaH+b`j;Z#Wt+FwhvJtNMN2D{WXdAC!^4#%KC6IbM9Stzl`+LT3D?Q* z>I?2opFdrJ?Y6;67|VUZHh#N1N&ZyO$qI!j+E39o-XLk~N`Sgpn|ryql|0LRo#7;g z^cN!o`At!33rbbp;nxhoSHs!0DC37J72%pM5Kl@iXHCfC+GHOVdV%P%mbkuaN$(y}l7KtmKEVA)8I&=I&mH^%U=bD)|Mo?P(u%!eLM3&bs~7u|wf=T_ zm9JJcrug+ldVJQ^G%g^VWy84%&8;~%ZW|h$%E&z@foN?Kbn>yh;2iSZdt5UTVAzw- zAOhYx;JMaB&b)k7yGn?Qa!1NLx!+S3xq=&>$r34$jrSP; zA4%UD*5sM4`-TKQP)JbHX`%^Wp$2S6IgKVjAn2Bp8V=euU6!%5unM@;62dVE1gJX0 zis9H+nHb_g1Ut(^JG8=7H0Vg69SH(kte~bQ8W0f?5JhzMo!Nh;*EPK?n0(*+KF@vB zf2+0Vf2X7(H&ME2$%^lmuBkuNet02e*lnd{d7Z4TYuq7t<)*UMw`kyIiN71O7 z4xN>_2?PS(;PdA=zYu9S5yi_G!DxemaoNrCUIUn3)^-p#=a2~TdI!p>X{FOiFA@{} ziR5@V=W?4-%G0aImzK?nX9{(WrdCi5M{vmu33$miy91gv)R?4_YY!_|g|T*r+O>^^ z&s#V;2lWz$pN&Yr;LUp@%4-{>?RWg;ePGg^ol3h-$Aanat~0yd8NV_~g)(hpt>(C-kfZ_tnsJ(X_|)Q}r0KhR%I2Do>8G zG?LGKZhzg(Y{4-v5G}M8m{Ra}USZ_iuefA+It|VF!C$C}xKWHL7$gT3PcvzXL0$&( zUNVGi&_@2T#v}eDj?`3*Ixa-6 zamTK)H0~qN6VCh`oER2}{FoOjw(<=kdvim{)-^rCC9Iim{oTrYooNu1k9>CdU~|Z}nql&ZE?SsjP6K z)++f+0y6abj&9p2Kf?2DDLWcj*@Vl0qq|UcMJ+L7OdkrbN({5`$7w8|mxGoIpW5RN zJcA;V!ybOEbn#A~!?7#a*(=UuSg0p92yURl7C+6}mEm1k4u3XN{MyfYR*}q`0&Xo=&11I^ zF|3vmz8`m8_V2S$dG-hwM#?+`K@y|gW?LHswlbBMG}9<0CV>xN)jqT9^TovGrkxWQ zygHVb*Dj(JIQl+zOTwY+$V@yjq^!^$6XK1lC^XUQ8QNQ>vuT09?>uPsfV+(x?o#54@SiPY+t>x+#$E7h&qBUQx7Wm=P*YFQ?NIFRE@ zYwBsx*fg6)8j0kK{kKKNvspj)Qf}axKIe`aCc9C^KISa&X+@A-jtjt|Q%&`}XN4Rn z4@pgtz2t^tZ8BiGnjD>PS+B6E%ynOZRvj<{zcX zqc8H#^|gwX37!{WX8E<#xW~`vlOJV*RpN?jeB&UI3GRh(;z!GlNmIWW zKK2AE8kZi{M6^A#UComEJ&u7%!|UUonmOb^A}$Y3>ZHJurZQ^~?b-Kca2K{#_GFKUG5N9!;IiinFMc7%GKp8(s%$0i6a6 zQFX&5Kf2%?Zhck?WTCON!G>MN`u*$)ZD3 zt#D1n}HVE!H; zwZckMCTyRH&2g~M3(7uk={kv9b-rI3I3k|- ziDQPKQIW_fmtV-{^5hfA>z?J1h}Sa7%Kg317k4(e)^vtHI1Dju_FGcv;Kun@x6uz5 zjA_@%2GPXm&9Av!@7gBl4BSSj$#|M+yA4cdl-IS1f&7+a=QXkWo6ly_O82FmGz#@Z zzT|TrLu_Jp@KDEf&igqfp-`=_z0=C=3z<6E70V|3_b_Q>X4?qSR8C6>BUKrl0MtL- zR$ILVN}5!+i(m3++b&#^Si&0#wqxh(>)-Y*zo*g;lRWPVap?Vq^#jG}&m0MLD!?gh zLBIc2H^ZD3(KSwcEAYx`Z@E$i0fx9lE^#RX-T>hC!9{UX!et(L!(!wC^_;C~g*TOM7ct8#)R4p7~&byll&84*Ec z}nK2AUW)vBymQ8$27A#YjA3WISw7I}5yYyazgUSFpS(Br%dxBwEg%{Uk% zqwX~(+UG^*oScNcB78tLuj(`F3$32`#&tp?xY|a|F;*5&3;`+H6Yxi1S7bJ8X6~wstx&PM^qSu^c)F1d-UZD* z+Q{968CtABG~od7(?nW-;dFH1FbrQzM2pL|K)lLrc~d$^I`~~#qDeK76GPuLGK)A7 zuyddBI(LS=yMbeaw~Up{~$16}Vu2?>e!d$uv1aTzi1+%e zW*)1IIpI|CPy!~>oZ$KC$yku!BJRU-b*pc&=NwV08!m&1hbYDC&p43zGS!WyXQ=ZL z2Ve8^X@y0n*LYdsM0E6*gA3Nwg==K3tQ(#UX!(o>+B^9pqvx%Ak#{xT$n};=JU4i*dkA>j}n1-pMg6%|<;6029xfUplnL zGO$9je13XLviJ&o)3Vv7V>4CTXGgHbRz41lcZw6xG2!bw3=^@1{g^)1-wJ=jMz0>7 zKA!c5_=ualYJ5&ZUAU)G(p4qF$@{NvBK4yfZP(uzcorl5ex1LKYdWVV;^6ZS{=|>D z{QVBqLES5BqJ!nPLl1q_>vo*(3r|l4$M%QLM+zJLjNdEc`$5rgSqVO72oxK}v&r4WMQTZi7{xLsc!m@#9@W|x!lCsbOzLUAw15np1Lyf|-JvAfj zO-foL*&Nj$n>NH4cwx5GKSHkKe+T+Lqj6ez+Ff&#^%B~;zaCXl2es|ILp~bMP(=4~ zElp{IH3#p=U>U8*uUF07ZQyr4W@5595WAyUT((L$F`O28ln8$Enohe__1~XF2WtS= z_kWbxQ>?NM{KIPD;?Q{Ka4O|nj>J~jzuDX{!6<%v+ zk=g_z1`_|=KK;)^ljZu_m<6>0(Z6eUYAmAp@WFC*TP&r-Nc2|DMzw5&U962VH*LAC zTotrhCiEQM_wkQ^J8eR$lS|!3i+7XaliQuY?dD5^G9B<$Gh{uMyT`v=XYkP8MZU%s zRgJS*Thy~RJYB8rn3lMS`hI#^oLIPz8PQI1Tx|T)K|wO?&D&wX@6h)=UTQrX3zf-Y zVzyL>Uuf!$9Cb-fVb2kPCJmEIi1=TmPd}-Fy%VbR#$*Rg0gTKTgn3ol%}=TOKxRwZ zFV?F-;Zq+g0%(1J>6JmUC^$G6rMoiFa_iV@EiKg5Y7;~{lu$#n7;y>LS>6e0Gvu;xr?BAL!jqX_KxA2q4&iW^Ubd)^fi;OCBmC3) z5Yl`g?A3tCC;U?-lPM02PQW&>Ong{8gR21eGM&y{n26bF1U}5#Ee7D{R1|(+m4RIT z;M(e|A;K{}~wH08^yerX& zqEG5jvUW2@NJAF9`JMi){^m+mE^uE{ zI4nSWVs@%>0Uz{iK?Jm0xZfUUZwr3G^*2eKd~&|kqKtlQPS4Z1WV=83R`pCS-SzT+ zG6eiKht&VgXUf!p*M#dTq&Jgkq7d4;YN=E94to6sq3>s?LJ1(V#@$8XwN1a;k3AW8 zss>zt$o}3$R;f@&SPitMuQjWdnQ7IK+r|yz^wd z({>8u1n)gw)79{0iB4~8suR%^^)k-8_HpK?x(u0VAC{Z6HaeUPgMPkG1J!>~vYi~W z#k1SG!{oK2Jh{Y;@+D!5is8ybJ_O#^0R-8LxW*s+|bQXe6#OV&qmv+umJ~ugG~$;Gb=~ofjOASIvipxA%2lb*o;^IX^fjA z@=^Q$BXWB>vuV?AvRT%DU*7MyrYCx1rB*fu^ti}6JJokxaL`p5hkm1VM=xJ}Iat7W zCIelP$17e@Ih>?UfKKXmf{33LAl<0ZE&=Y_zYwm~4u6p@(QPHbEyZeI^e_@EL>K!xH8M-Q80pl9C8ef(EXPT#-#OPj2v@KUGPmTH2J_q^guoK zdh2@0e^FwLeGFgAJ@$d0)TPI8Opf*h;n4EFQ{iGfSVcBRq_{(TuroWynWxra!>)Xg zLY(QXpKQ}^MARdC9O!iF^N99-iBM?5A}xIH#A`{P?OEGT)~U}Jang@D+*KIwmw>0RG#1?vlgRrgd}58BRWL5uU$LNM2X@FsBP z_Q`rOyC?cf+K9Dme%y3fI!qkmI1}?NCUGHb5r)(2jCwxR$_Jb?@}%j__ZxhnM`Cu& z>(Vs#nudvf#8p#!Ny@x6Z8BDdCy*Ib6p~RXArURfpgFr{aoj|q{EM*mO2=TbjJ3WR zbjPRJ$(uFC(>S8#L#>dtmKj5s$JxZ+));CJ9uUX&g`n`Y$UMv;~UH(FJq48voBG{;}^lO`XdA=4>g zw~I~0SVj^ir?-usA#PKu{1)J^zF2kOk_hP(8o&T@38t!e;)mh`W{W&t6rP78c6(`V z0Uir>8Rn-sDm=fX4;!O}e_fz^(+(Z--aQHbB4^Q!7^r!bG>t1vcx}T|8-6?`Q1ME4 zrMl7qUPAsmpRz)Qu8yIR+bY*$90XIN)zZ1jniA}SWgB`_R(ocQ_^_}A(AA(CXN{8H zJ>QH(Ys05bWua0dtY!3me`#>{b1YC*Vfb|1Gx)-guGg3mH+i;uzG;R{gomKOyAPEs z@q=d7$91&>uEoImAG}OXJC}&|*uEC8BhXTCr?1f`m6>G908y1T4BsMM$kx>aF@v#e zy7=%)6iL;a31qp2ceHayaHXf)8pVD9DO%&V7oc{Eic%$mdLsE#^M6s&UjJ%5<;Hj_ zM>Z5)@cEtwR~y_ZME)qdBuo4hQ_zCB%5D(zew~$YkoKaH#P8We683cH9LE*Ql4kwc zh*t`bZ_^&%qnMGn^>uT-6;Nt?Cw_@%Yv8%gepNT~J6ls7BU&%OxqjBAC!&Ei4)CmW zqb;0iP`BT-K6PT@)6gJWH7FLJ-lrv(LpKXKI%-3THbL*#=UX|DQ7d6CfA;k9&g6A& zMut1<42i7@b>+=A_r7wB&JOHG z^a$}fw>zc8qQMhJJ~=}9465>cHB}_3Ud=kgu#yiw|B;ICYC}TQP<_G&w+qEF2V@dw z_jFmEe=L;+RY;WCQaN#r6PZhcc!=&mMb+Y3-bg>#6_81jnB9I6_fBn7t?0FZX`;B) zgT))|w)gE>Uw%EZ$`NrEn|YBq5`TK@dvSF_ijLT!BeZO3+|;&;fM5;kHs}x5BocwJp{bY^A;JC(P7+Wz?FO$?XEX9r~8n(NObfg-PiGWkw$AG|N=qj6-;G^xQqtRhpx$?i z4jo)kURhWCP`RS|+~=jkU(9Hp2Ex!2=sUY4n$xB37eIjO8mh?`Kz}fM$UD)2-~8hD z?`LOcZE{+!I)o#!GXJsmNE_SLYChVmhjI7^g{y)!_%(;?o1*}8a13?#X|a-b%dwz& zX3|!#KL6LRj2vF^{HgO}x8D69revy$9BJ$S;7D8Bt#m*Wi;0YnqTJdm!ZN-O=4mvz z>s^I-C*27bTBfHRjDodl3i6HpmUG~q$~3Sg-KZU9DDaCMqhI(np1L5Ok+^hEH11TL zxX8Ssr1j*4B^TTAggnz}{p2t}a!<}&uehz7f`1*((xpsskOIfAEGlw63n*3sojZBm zXT>du%3PnxCdwt=sTOS}nO4dstHj0SaHp5|6eq6?G~wiuErmBqf=xdGJs7RNdDy4O zNXPiR|A3LYS&p%nhoB`)2f`Kv964IZ75(L%lTX)s1HPUMkz(dYoMN>9yf44x$Q zSkeSKu}YqR`0#ANT3vQxZ6U%i=O%G3VrP&F_54tbck~NB7xuycLbEk~{44$6Mz`Wp zCuKAX$cyQEozC{X*VnkznZHaxNRK96;FBgKlGKj2$|PnR0uuo9{P?}Bj( zDg*hb4m+stdU2cW>|H-1@K)SFo+;2~*(NiiP|s%3wR2^$!wPO;QUG450K8 zP2%clZuPY%Ywg*Vzc^@rTFp+f@{zL%>@Q|PBGW|1z1gd|)z7#(#=i8_!_%=e6Qvn& zn7Z9EzqV#(;qRFH3QZ3r{j_lwVnTZ&p0-Z$10=xpTy_Q0IgGsbYOlq9Bkv!$6Z5OY z;~lHN?}`zhuK3!vH7V)Ll504bKb$j0HP-k&`gs=MK!w(Ma;UM!fdwR3vaC($7)9MY z+RP#9GE)e-w7d3jkNEEc?8CfVEFf#gGR|LL!>FvFE}y~KYpd?B+2F0!Uj9|GjO}Fo z0!%v<{>_h(ItV zue`oz9d zc-rF*aZcuY$^^dX9i{8Ru9uOAcvHXr(Gb204R5N-f!W(NX_*Y1>S)?f$kcOXT|@Q| z`kbH`&yLA$1Lo)8khb)1S}|c{w{p5I_9yTo#(v>7E`osKjQCbhARjl4oz0X+CqR`X zOqLG~6ts-xboNnDvGbjOtv*|LjbUjOck-d1>7n!?n|6+(0z9B&JX98I?0osRk$05L zY?kLO@#&Ua>H*Z@?lV90@_YPQcOP_wTY!!FPTB718L3~o#=ZuYeDJ!u(`ea|H2WJ< z#A_VkUtZ1gIVZnPO|SfL-b$Cy?p3y1RM|yF#d6OhUSAdJ7+%F^LtoM3IWNKJRf5|4 zr=#eNyR%5&H>nJU7HEU_d?Ix)bugDNG42VLkUmM&o}z^oLG6{ggxw@7tvl(0mNj>( zuRl6piTlLaYV&^jHjmlSc5Kdv?%5@ixR!3p;G!AtWNNIH1`)uAa~6^+8$3QFn#SHs z8zO_xeX5Pv=I%$X+O$%H;95&%^|`&D`zW#QJ5F|c9pzOYEWk=D?wf)`2s}$ZoT6AL zB`*7LUN=SIAPnq|Xl)FhrH?=znxUaZO9Q~8*^j4#S4&F`!5S zc}~2``$d1TznGJ^5*?I^nb;`m9w6Ca#auI_7!K-VB$>$7fPy~mQf{7^j+@jhBPnVn z^Tdu2yI@;m;iV=Q;BGCNj0vvy&9mfhm)X{vekYZztkJ~e@~?K zK8Xxv@dI(9rty2SGHIglIemAx9e((qX~|qK8)f1zlT}j|_;zh=EzfZ^bqRgF4i4tx z*nfjUlrgI&eSlWEw7DkF;&p-ut)99Y)qiffsa!oU-wZGucJBEW1H}abYPM#C97n(= z*Dz7#q%eP~bk9~Gyy08=jLtzN>&5jUG$}&s;vgIAp40o=?wE^H8WWw8hJ$V7QY5)tTY*2LK<)n);DGNL*EF zXI(kosL_*p8z5YV)MX#El?J|FH}t}QT3$T|(mz`bC~cr5E4Ig3 z#71J}vcOS#2;s?;p^5=wQKOoR$BcQVUxa=+N@E*rn9&}O7$!qC#hL=WxB3@0$)#qy z*=W*if;#N8KV0_fDNzijuFN55;8Cc{POL#dA4&Mc(4UEk0}?ptv1@*xN?p8b5e>~g zcT&IZXU!Yj&-2C)mZ&qa9<4rzV}JJpR|S%N;%isX#*PaHw9Y!0516d$^S29^e%$p_ zXHS4r$+&|gVqdzf-!ZnC683qWA(5%Ro*mYbte*m$?k3FQXR=9XWq*l(x0D&I0rEh| zC#u|Q-nOQnfe*OHNE6Z;r2#Af@)Z{spB4}7dYh+ZmEl8?4K>AYw1mlq@JWu&9&DlC zYD`+lL7ngbn2rEyZwj-pAF*F2zKM7uk4kZf*`A9OJUSyE0}O-rIt*_3r$*s83+rAz zr=M7#!#bRhErV(7?&Xbz(}3`%xZdz2%`Q#Is;^!`ty=vEs^I2+Jnv$oQ{@smlNrF2 zY33s1qaSqciA9vS)0i4z$h(u7W`(_0C)$Nejj}TsnpYkY=b&BlaKm$i23zaD$R4Fz zvw-BW-JOCY5ta~=$3Rv4#&aS%g>DgAk3v=z*hjwr_?uq3DVsXkVs=ey#VyNUO!a8o zGV7{Iox@GpCwdZ=IQ9U*lcqu{;?3#IX*x&zeS1XX!c!W{fr#c10$-NTNXXsTo-t;N z)`-Vv#8;c(Yw6l_n)en&PtHXaU+au|_=swXeh#1~-MIYbag~`F6bx6p)4Sir2)P9A zOP)hOEB18aqo7xWS@atc{w$!w>m<JgTG4FBkPnsM`h7E(0*1D%u zCkI$F&E=}H#Wq5kQi_uXo(kc-y&aSA&sB)PfRtAnD*#8l#_Wplo(=r-aDSa+%S>m% zVe{THC-depVbDxn{BnEU$+!K*gIIJUD%5vqIy!n4ENEOJ#9Sc^(Y-i_oj~C~N>SJvCJTu#pXb|G~mzQO`A>V^m*(Md0r@>}t<^ zpNY5uOjE~9h}n1MaD2{%5cjghD>XUTxH^ny{J0>~9rCYHL+EEE)337%jXoFP7g*r( zvv8p;?~(U5f1^eygoCF@PZrP^?@-k5RC3zIrqekZ!>bih*9d1Z`MEt@+`x&n?)+2l zc&HViRF2|V7VfFG_w%c7)_65kV@aM{T+s7h=on`8OC9TqlKo~+gM1R5UGV2u?+vU) z9)n=t^OJ>rONWS`cUe5m2X&03z&?mrATGxzVus#CZs6WmUrd_;2{&?DZyEp;fDCL@ zAkacR1{PUT$w6xt_4sI8w8#>hb;i)s4S2yk|-FY%}0RRX36E!q+%5PsrwLyU) z-Xdt)8%BrG!PvzI(qQDStses*Rzmc`H=f6I^s})`ncg-LphGy&7|$_lw;7Do2)koE zX3rd{THf?^W2hHin97>4#yvJ57AQAo(C$UXlazXucCP6t;Y19#JlG{Omd+86N(e#_ zR2GzJ5`(eK&jB;cIbgYQhm&N>=tvh?_b<{O%MbmmCP&gs@6=Ig2*TmfE6eS`u(wUf_n*<^Vq8HW-xaIX z)jEHAv!gFl#PbZ6uwwtnrBt)(vEf%yJ}Qg78}m^W?_gDsQ218RiK|5dUmA$J8V8Dz zA#82qSVWlMuA-4clU`M}=ZIPZUhA=waNUnT4*k)%rKbogF8X;iCo>g(j74m8#j5l? zwxh4o*_Z-Q($hIPb96);r6YusnkPQAc8mmT-n5fn4fF5CZdP|;L-|7oqkb%#wL38& zhsA~64+b*#khg4?f9`&$^w|3>yEF11#VGb949f(l6v>`|;1|hhM}Ofx1#ZT9w0WlT z`uYGS@w}pRM-9my**5J9dlKX~9pYV<@5#!Pg?aeC9GrVZf_f>J`g{){91n5me2d7J zR{sCRamy&5S@rG#kJ zy7hbrpIdZToaF&^8ndf%SzSS@{7uLwv1Wgc8P=E+=Pod&jF+;+i;Fi4V*8mG^FHBBs?;4s;!uCf(z=(3U)a8s6$bBQn{+mjiAM5re(4x<^^m=!tlm0%vNaX8oJq#Q^?D|PRIc;t@=%< zC{D@qsrtBqc^u&E55>v({ef8MHS?AUWGF-D&!@s6{Us%r8q$Wmb|UH~K^Aup2Q+`8 zq{9?Xw)0l)K>z?9&)cCjyj(VE*7GkTGFdyEYvH}BB1oY=O0S1isN`Sd>%9^gM8c!r zvb%yu0Ug#pN!TsDpfVzSzyXuVnY97ILpW|dsd({aOzf6#C+?R2 z%=${XvY}cEVycg;_fq2vFUcyVR}m>w>IVmc>87d{SqUzhM&ZAqaJHrCpRy>^7Let^ zhQ5en_5x|Ci@CI+>supm_Sxt#W;-ZP_u}*chjL>|i9fZ+k5)XtD3btI(hh5AFqM~< z@sbR}1%MG8D{H33U=l-zQtb7#1^*8Gi6RX=dF9}jUuSiDzp88`LZM}*xf;uatJO6b zON?tR>c9|{hud4UI5<;}lx^~d&X?b1+V$|Xo>F zJF|)6`dV}C*Y|;vIPvL#vR{n*|Dtra752oGTG&VH{`0$!w5VF3M@rxBV1FSW6-U7q zOxtgetCy10g>+BAv(t1nRGTGPK{VeVuzI=F$m)cX>bc{S>cD|dmF_iH31SqZ>6u1M zae}C43*7sS*)A4F?i>Q}Rv)(1R~val@h6UUT$|03 zHvBozC<9zAae2nu_wpm2)^_*H!w@(;Lg}b;OiBgNq|W{i4zm)&uwiEUx*mwo-mlSe zDmL1@en(sjM5c(BZ+49xy;%Qdg6~H=@=zE))6IC;D7hOHk?KrGO4vJ}IsPGPX>r%* zKOP9mj!d|s^m%t%YEy$}w*N6`w`dzJw{EC5I(G}jnS|$luaY9U0?AfD|DHN`4ni3ovo(bGsnXR=_p~HBG}$#> zUMpEXSb5dKe#(uW;TUaIgl1m!{7Zc$He&TJI_su?cZ=LcZI}rDJXrM*{Z6Ep8ZOu92(xK>m6QcC!gx+-soGKzs9J%0p+|O4oS9AY zX+k}y*A=yBdK^>+LtWZlfN5c+h^`la8x6h%MeaPj#LP<_Y!qFhDQ&8Ls%rZ@^sv>> z{N~JNBt!#2wgV%BW~S!#R>%@PjbeW{WT$E#R(}lt$ljfO0(d(en0`x`^O>OoTZuo{ z#4B4Dg;biJ$TI>VhIp@^)X21*WgbV8x8M{QPX`TsI##Y3pxrwcG2Hd<4v|GK1tkte zxKDQ3aeNj~dWO~yR@VIjtp+a*nF?tFEcUe;Tcea!iZ9i(_y$xF^wIjbrmsOqX1h$I z6L`SETK_2PgAXf}GPoSwEa5jMszLEJ%`|_j1MpGutAyDNW%s5|$SF}+vs+~0Kyde) zc@z6MLe7{+AS-abkAL19tmn58)602159vH4m#*i(9w#DRfl(m_`MNnUo{C3+P^%^C zx28@g+;|nga3T(?fxI(gO{fVz`Xvqbp?U@ z@T;%xiTK2HHf!4x{tVntTz;p zIPbQ%KJ_afd%Ik+vgkjZqYD?NlfS75MgG0EffgkAX9@L={=-=VH&Ul_aV^^*J+XId zqUcdaH^x%l+YiI#5*=b(|sCAVt`yJ zEefGMQ^_XYVD|h&t53M%)5AR1rwYJvc_yLB2W;qb-v!vpdZLRLLONbb|Bc^QRxybk z6bEql)s=GX@DahZVgc|*FGFLUqe7JOo*+d%MC;LXxq7Y=sh zKXm9A?+q&{MPiF5<|Q_{zxu?OEkwyne}++#DNL!>3q)f=OBeAQUGh!M?Y^(#%|C+1 zDC3E%e4_4~k(CqJNAc3I zHlYW$T5(_q!JGc0eQCyT)Q*m!>DgG%T5DXW#%1&3AgrmAcVg$>sbKkhE;R#MaG`E_ zE)7Y1f5QX4w4$Oo;}mP}W|IB3*pK#OobSqny$6ECIrvisBWfDwbWd|SCrn~ceeNx; zX`+6=K89uiRtR^mm}I%(l$!AE>7V?6tc>dx?(yTt`Z9}w(+VHk^;c8@DT_0M z%2jfuun$K^N8fKXOc-xmZbv*7aC-6i|7DJ$eTi{}){z~Nh*H0S2ZyJwLi6f0_3pQo zjfqY(o!PRUz($gpvfgQV9vAJsvvp-4lZM*O<#nxHfl9aUd~K%c8u2FJZ52-pJ4B1?AN574KzIVKMEJK)6~oJ&du{UhklICmK&v^ zg8|`nPOv;xZ;8e@z|0Qtd*d@2LgXq0nbmrLB(5vTYM3# z___Ef6IM{im*<0jT*^ZHBsSt}jFrV8g}4?>T)pO8U=_TTv5Sd_6ajbez;F^d8w$C^ zi8*FD!>~UDX`T&v_A)Y>%4_HCdhU`V*|2jUnZ$9p;P)7=Rj9cxTSv%e)0)x-53TH` za%K5q2^^J8ilismEInb!dxGa%>;=QKwe?lfn5616g#NS{5%Uw<9w#7&Xk4iGu10Uk z zpyt;$5VLzB{!|{LaWmY~jFtZ2tu^+bv(;U24M@mR+ao-KMU14=Tz!9NX)e5q+@&lE7$NPEJzS^FY+X#X0QcQ?L+K5f zg`{Pof(hG&mI{IMv5G{M*A$ub8@N4B{Ak+_p4_&Qii+zIO=1EudYxH+KZrUd0gpa{1YIHdu(m%FrAIUXV;Y{8 zn%L3XPczR#x_CfjJF_-BKpLG%Gt`wH5Lz5Gt55fe9z*7}Cv0{=h1lfZs%D_vNu>Vp z%VQS%0srl~&{_#=RC((^`a|n?ZdDdI)+Ht zOw9U;3E}xm<72<)Lo3PLrLqWWZ-4D=)|fTvzeQ>?>jInB=i@sA^D4W%?#_47k?R5! zzX>4$f|Gr({q+(nP`ee~`=AXddZ(U_lAcZqe{bK^x|o|a`wa$ru&cWTkrj7q&i5|6 zb>Jcn$sc}`kfTUWtL^0hQT?y+|7}*^y*u{>aiM$Snpc?1v5#Rvnn68>dwK21%&DV) z?Y7qUe{>Qi^B9Q>!_sudyHET*w`Cdx6!q)=y%P|IL#@*lTOryBX&P`yx6;kf$&i8wUNaM%{UhANJZVKC6*Unsx`nTnJBw z*|QQkaQp<3>3eUi&A^mLfCa{MSM65^meJ{OWv12sUl zRMy>sA-8o{-JY&}@ur=01yRypPT7E)q8=l-&%O&ost8sC*F{*Aja?~5j6?$WzEWB4JcK*$Q~Bv7yh`f3?vv53E;<#QKhAYzhr^O0CeyFD(@j9D{A;?-eOK*CxZL$WHG%kyY z#_%Ts{Bb&W-@(l{e7W&lR9)Dv)3<7kFmrsE zLIRjBr)!8)&fSXx3}x!Z9C7#QfKAN67Y~(gEmrjjMXvtkw?n7xv2xDgX^nv$ytG@N zDp!BC8XDR;y^*(3b_-zBAg!A|JSbD~1{_!?MZz9G(dut&e;iI9AQv^It*SwD9=;Gy zGA4e}%yWv|bPnOJ|JC;-aJ05+dD^-l)_Z}-eT`q8`CT>qEWYs(6k zMMsHzF2s{f?#uy~CoFa={qU;>LL2?R@^0PJTkoK7DayD{F5wU_Qyy^}LSLl*@0RyW zNEzw2sxSXZFaD!=aT&>vSslg5$@X6^*J7t0Yx+BDWHPSl%h-d}HVG|KX+(K^d0p2F z?>wDTrNQ@&FK8lFP88w=WyVh?W9hl!tnG;o0Y3DT0IrtPHBkI8#)QO1MN0h-NoD=P zYCpIYI)}7w3|^}F@Z+syQ>M5#;;Mc_82magdadD#1Rw(M*C-BgdV`Xz*PikY!3)UeFn38rr77zYuR_plPwV9L>7=36G<8Z8-z zRL!+!%=5%G4*K9}{i3DQC1?U!< zC)AG@f@KyX1$PAa;AfRuNOgM`^i&(>2lr^S+MH-3o)-LMae~waDaNoi({wPAh_^E8 zCvdXfFQ2S7BFyNtX(HMg5bAi$$31E^kWNlZ~9lz?vc*!#Jq!azqVa z!uNttROL9wM9-fieW*`A4S`j|W%&2%udU5412iCfToxL-DSQAD2+)9b4TE@xf8QcA z6vD=bNZnyxBx1Bj6=%oBe+OxE^^))fj2lAZg z0-rOCPrk)xmL~Ff)=XEIo>F>V+;96_yfwb){d*gOhZE^vE}$T*Bb+z%b=Knu8tj?vLE@dOjT_P9wVS;986T^-OlRk_0}a6G#Nd1Jv9Y>$5X z@TMt;+bpcsXlrz$Z}6!H@0Ff!&A-l=y3;4*2t9|%kKfj;7LCgWYo3S?PoIH*^wIB6 z3Q>TOrkU^6G0Ybjv!m<|P6#_G(V3BAJ)Q5eLKezOjJW3|hxmYpImDap;;w3p;)Pp5hGT zF#6Ohizati&mG7qf|Ww}vdFmQ+8SYGp2T+YI`MHW?pjm?c`V`|x~%SrI@I^;H+s+qm5T+xA_)0p`(X3Y;^$s7Vq zDb|e3NpTg?Z$(=M((v9CVL(vG4f@YWI=%iDOby4~{%_~J_~`1Q$lPxyvAw2x2EO>L z!Rya<)0A`QiDr)NoKuC%c0UW+nVFrBeqTU_f+Svkba5~SASN3XcBrQ(4gsRaXhfaj zCW?!$|4z+L%j%950AM^?eO;4oY3~40>a{)SS0vL+@gzc{SVe8mkpc_w`WUjLZX7|wo+WJ-6%>op z(J~bh(usK$>y#g%`4gfAD6vRXCL`Bhtuaf!By<>-Thq zIuZd*C2!y{($5Mmj`g)5oO^O&CP;Cy*eK z3WSE`oBY}toMt3)6hw=!Fgpzmemw*{+rzV=^;ED1gz8Jv?od_vi$t119NSDkRkjGn zaZYivy152n_m2zpe7qv0pkLB_K^zMI+IVFAYDxoXXqed}+65>>(_r;1^o_&r!shdK zA!wzJ-SLwR6hP~s8j{o%@WvnxqKB0hQ`$~4`2O-Qmt*oclk`QKHZTT&&fcJ)LIYYiJWFO{-tC0nV_E3z;vj2}5X&9kIIn+%&x!bE7tOU8 zL?XPm@dUdLw!&W4#v31{= z>7y7B`p2I5ysV;8G6WyNV9rz)Ove5~@lsv$pec#@Co|ORr+Od03;hxpkfjg#g)O3& z|KlkK!?NKgCX*%>o;uVw*Y^a_#>LcGWegeuzcDv`4pxsD^NBPuL1YW}00o-sNte~^kK3*?V<4+kh=0&S*V#m%Jm)=DL#LzQNm4$As8LLE5tOJeHM|H3V zf-YYh-;_uzfAOl3oqJ_QjjwuHodkX-Z$g5>74STKms+_?Zhl1dEDlT9emB+m@Hg!T zby~ZdsK1)_30~1l*Dk?5SVQz8kyVpOd+f;5WcVr&fmFc8eblt2(J8oJGLMtT z_t)wWXpNnu)V{gqCX9c4L zaKuGK3r!EA=I!UeppB}w_y1fYX4_dmT5$Qc9^9}rE>KeJ@XKQOWuZ~kfFL_PFS7PJ zTc6@$kT#r%yrN{@yV-ya8j|yoFPLXUGAiknBbB%7^@Pvb^Z!Wt_PC_)J?<}p{?rsS z*$EPmh9|VP67>`aDAY4WD{r;4R-R_lP2FiGU|ujA>Z+|nc)?m3Ax#b2j$O7|y0UOB z(VAlF$;wMCC^S>^mSx$V56|=0+3U2a;qUkTem?K3JQn0EZ!aQk3b}v;RK19Hh!64* z1LLBBXWB6k@1#hC`3-q^q%AMA!g1C^ueXlEW)toE6SoGaArhvGJ4I$}#{-9@8w_-N z0X!pJ+6~*mC~$oX!unPWtWYmHD1aIUIG5s=YntG@rMP?tq0W;I;a>F-Lg_dh)>2K; z2Z(kEI_!xjuZQjEkL2iJ*`WHxgeWE8B||9{{6kr`EF2u6LxX`)hFyS_juZ2(^gy>! z;2@q*;$^%sb>(prEK-F5(AtT~Rb38(up~+hA&%&ELO=dAyy6x+ofO+U)Qg(qgyEGb zAmy_W1B78P{<_2#km^AA;vx}VcMk@ZfSE$Z!3mqJ>D>dhXp_~Op&&A_@JibC7!+1& zXkgBJz%t{AcenG*7Vww2?`S9yVLR^4$K&2tU&O8a;k^|06Z6hP2Pr~DJpMuni~)2h z;0Knn@XD-@`rvXEXiGF>q8l{gX{E>F!gOd~g1+V&y6b$8k|}SS1HpoGJg4Yn$Grw2 z8t|pk;nP|E`6~T+|MGSL*n2bESroe}o^ZR3W=E+FF;_qP`6S?+Ef%Za!K5wUPL(y9 z1l2L<_*Sk7CbwrR#19=ENB4J{3HaD`8@ycNMn$A(kV)JWNOEji_-@#Qzz6!M5 zO0X&Teq}Fy4rSJn|&(&`)5=jx|pg1GoF5fLF87Tqtu$Q_y zkV1o#?H_8Fb;VKQDF0tL7m3FqK(jh&52VhrDBM|FJU-f&#I+Gg!i?CU5YU<}!BbYq z*VAk()Bv4`T-W;?>kDrRC0&&T$sr_!wx^Rja%kh<{0a;T^e?5)71rqt(NkYIXvv;u z^TswPU2qd{DjJ1Zprf~>Y5`Irt>Y%#RcFQBE`k8bHM#-@v|8Ae@^M(skk5i>LQK!c z>k136gZG33v2-wGc`3X>RghDpybtQt5{=H<@1*o$t_jf%^1C>?CSx1ON3-H_T)SpM z5^XDoaGm86no{zjGmt`*;lR=AHX!=#d`jPKHgErrh_BbF(W5QIqS}BeG?c90|fC~!Z8`|e(>MdnNVj;Yc$9NI#2RIV( zifB{QO%$ZL;105sWCfoloB8jmM+p^W0N&TgH- z0kC?9=paa3tp|+=#xq77A=u_|5SJ6NqY~E!vIA8-=iVM*>=N@#nF{yyJ#cnCqzCee zx{K)Qd^{1ZSaDzZ%fvY=xYyU%bmEcGXe~Ut0VAWcwXH4^_{6SIO=MXFCmg#aBa%KT^*QfFtwRQ#QhH03s+&R0P+zYxK!cYq-+oBk z0ovR^o(ZIePB^oZYFd>BL?U#P!_N^wzW{dozYr+~9;<~w#cGJWqBlN*gMAg>Vr_P$ zy6I-pp#5jvo_+2(WSC}ky^$RbUE!cXqV==S_#ktFf?*NxPKW& zffaS42WVrlqvGevzW5t-{?aWLd!`INimXND#rMN|L4nK#14h)k;O3tKM~If(z}7I` z2art5T9$`bXS6Ls`ZYJf`-b)ncp4WgvbbbRtydO==X(9kbY|ADf0y^1BLPXKDOTjn zZz-DG-@+4=ixy{gx=_GYh&QvQ$3!HC7T^0wtKL$2;xSBJxObZ@$*I!2TFOxyz0p?G zuE)~qXGdD07XEh|;6j(vK;LNytr5(_tNWk9gJQFAB#MOxR|0CppWns66vQkk-YzO?}=#k+_M1n zHw}bYd+rvMtA#&#W7lC6QvJ+u34Ar7U?AT0P) z>4Us5fmI!_d-L1{k=tc|Sv1&@KQpqyv)uill2oc@f3!X&iJ zol+)YS45vK;~K$HjpLW7)V)JM4mH4OjZ%{pJ%#^kgp&;FsJa#cJ{4QaOJhMrR{Gt0 z{MlNl0Tcp}qV;Z#qL>=!9CKzQ{3N$#!!6zCZspTxS-hLbool+?xoOpq>#J{A{y1={ zizrW}155u^a}I&hIHZghadR}fB2vPvi1b9uU3Xc}Sr2V4w^e zHLvYiZ4nKjvs4Nh4H>kBfwDE)dUHL@#H6@wA&k8BctkgQ?SwN@oxWFq=2p+RFGZR%L>}(tycOg|A5w^ZfPx`Kr zKII6w4Nm6*!H2u}g6sz{huNuY3YsJrskyZlMzL~9*}h6yA($Ur!V~4@(-|SHuF$(w z{YjmtGBym%gEcT*bf!DV`!@V^jbko@iG)N4xxv`mygCb`76wgubYCBwwqNWpR0Bd( zm=$t!c~BkzovSmU)cr36oE`__Vr%FjK#N7J5PqXaZ!1B`rW|an1&apFIx6;u9WLRi zke`?e?;G>&7A=(GIkQW840X(5$ld}A9X+r>2XArCBaJ_UlmLCj7x9u{KHN$M!}SKd zzg1W0j0h>(M6V!zJZ`p*r2|a??hU{$Ks&BB&@Xf>$KV#(5hAj~KBz?RmUKx=Ae`;* z>ip$J8WZ#cU{nFX@@=5RSTqH=C`H59_TA4iUA(z7mhZyduI9j#yw^c8x9QF6p5|$f_%60s=H0 zwO$eD8dgQKa#?^S>y%R&vWG_?DtI({23}PKBZ^E@55UKxlNjyYDtjLTB<#o}U}QvX zOa(piXVU&YdWPHVFqn6)4GIg-++g}tp{y3@^${daR%}1Fw+nw{OR+3RjawA!9*sYD z>L!Z0U~?vvv_YBX=GNnkuMeTh7MI%Pd={5^Zohdr^AUSalrj?yPZJ#6d-t^D+_}os^8W61Q>XaAC=b28{ylxC0X$gv{#AI z;CTM}Z}`jC8-yh4t0EQ{gA;D0?B*GYm!Rqs7eKA|+LG3-m6uOhVSDvBpP-!_6%9m6 zY`IQKfDDazmr89(x%%s%P}E`4rcH)=sHH^@l+JeCL7o|FV7t5crMHd3XBjO>d0uPY ztOB==LJQ9C36a3AwU?#iD9R1EnJ0&>8NL^wXdW$I zX%xKQNR{h%kXlDzNN6zCP;Tk4ZNzn|ddNx%T3E{nB)UE=bPYw`x*TADXU~vm`++-1 zH_c!BefB2H=c3fyq$ilMp7YDg@aE^!r40xZBI8`>*Xj~j-Oxra7nukqU=qv{)!D@# zv91dP`2ffQuj4w{*iJr3rE*QNo6x&<%}7sGM$#+`9FpDn=H1)YN!Vo>hRzbNGijEG~GS&Uvoj;p!D!3d#03MK4UT^N2PfP9fg%bYNZ zOTOX~N6U6m;*Z*pw7jn_pw)VSscZfj#8^QxH{OdH;T%KZ1V@rdkS$2Wb!BndKs{hQ zI$Etrw>=+7tEYA0*JR>ob{6A!ZNS&Eu(e;H$aPVJw&^}<{k0ueQ;JDnp#S&>{IfIS zSAK64&)?@6EC&9CPjRY&Z*JUv|b_BG9IIAUTB+gEM@)lajPIq37Br z5Fm&*v;&m20;N;!mjYlSD|t9uYLxM6?D#vLRM(Chr;Mj>4u&|Mm5q zp7JV$WrZyb!^3;8yZA`_;spRsd@ZgjV2o@iNT zeg|^~uCva9WZ>#L;gs5GT3^(fP5?17fyQWZDLHKN%Z268e~KgKLUc$%uT;C|^m7rQ z9P)d~labG+JAw2;W%8;B^EMP0lQi4@W)s3)i|5C4)FA9|U%Mx1dSnyat53{BJiDr! z!4*L7C!1>*9>$o{<-KrJOeYhY1Hmn_nhm_$Yxu~Ao+#bMAdA-SpIi{ngMSJRX`c|o zoXDA}p;FLPXk;4;2KjH}}cYCZz2! z|5%Hv=YEQdIU?e{QerQg1DsMR#`cz(hWGvZ562D+%8C0j`SIAl|C{%DPyEG?{~J?d zU2!Y#3v1@CCl|CW+512FzU1MMlCtFb<{|A5dRj7UXlraot<0=I_p@AnB*2?Fy8o2% zKRsOI@2vqTTS6z_(tUN99Rr}x95JccT$qI{W&CgEb;$g21=&mBk?Ic>W83O zp)sz1Lmp5g<~&a1Zt`120=WTE=qiAy0d@G!PGRB5)uOU{1R9Jh(#|Myqvi3p;CAx< zsX%w~u(MkJ@GPuce=1+8#jn3VQAWf$N=uj)E1M;Fp)ai*_XDAPjw?VywkJhBPe4$; zb~Qh5I5OQn09YN5btuWhs|GN#E0NO1=lm{iX5LNYdq~&h*IhOh83K?9sK1ZQcDTEA zMS`m!9Z|W~-1Co$rR9zChj5c@V1!JFUn^(LPe3Z{aroi7fbg&9Izxt(OBZ5gai{qS zBe&BahKG3ddEpt~GbWZA)W+-7O8m%8kGLI?VNht{L#rMt+_Ws^jUe|1JTcMs>00wG z$uJ;*wDR5;nBS4LdSV>=lo=SV5*5wLB0KXh9HGDl2n)KPdCcgFn8APHfRCT|BuSGSu>{0GOuK#nN7NZ|dXN@skd_5vTvT zUS8e_!vpeNcchIqKHqI!bXi3b!?RD6cy^Z`go^e)_yIhY(U(vld@pW)T|So3Ut?ciH35K+f1nbxk#XhM^{yhlI|jyQoj_ zHdJY`>MD^8OG+$WXW^K@u&=J2eWdAuYodz#Y~S}vVymVRt_q8$rRHoj4DxyKfEH(c zfY|ZQ6af$+E|Cy(=^_KZPx$LXU~U>%qxVoD_$gr44u?ih>E5OMNe#eoGjB$2P+Ffs z!<7@->Sqo;4qR~!UQ?D%8Qz5c9@qx6KNJxH?g6Uf%qPmYB>1Ki(eR3z*-iQi;3H%6 z*t0bwpceK}&2$j71(0|#J3HE~fetgAyvhaK29SKw*AUo>sBhAd`8s{L9Ro0&B;KIA z4+abmqUE)=I~4+S&{fUINHnBZ|LZ=cPJjJ}q8g6w?$UFBXhbY*$p%l>tSL0r;uI?k zK<&Qi3_!XU=b1zG(;~bqE|ddf@S=NJkx|>$AJ*#*D4~N&M3}x@1R9iw&3w;Z_$i1{ z5p~R2*dGjMrH~{)4{GLvnhoG65KQHnH{Q|0Fc5dDBd`s`CioKFOpO4bXw6IwsP7k( z8t88D^+!r$gn)~T0laBt!PSs3AsSsEK{6l5ln|{`KpH8hEwtH{ZAkiDiPBkgGh~Z~ zC`DwwSFD>Ad{%z`+m(4!AC|^Gs)0Bh=XGL=&3&B+a-9j#Az2&^Q=d{6TzHdBg&zT2 zwU0E12lpet+e6>Igz4Q)7x7yq4G){aVv&pNVYY`QH#GCT-xr>FfKCK2yCpqfmPTUE z(RjdTnnv3@gdZvi$&)Z3=Xz^X(|Rs#p!)G~)$nA0wW%(OG4tb;SI~dM@cqyRxHEdd z#Yuf1d)522;J`2hVK#mLve+K8EPDEV^A;i2{p-ue-EY=sC_nru+)4{x2*59X{oCgM z^T&(COv>wXz5D*5SyO&yUDhsG4AUTKM7Sie(&_!bLwjQy^#^&&Kd$^5=N{+Jl3LVl?V?2w!~L}#-^+w7r4-7PCKNg*fZFmHUDJXi^vgE1c8i;=r{78N@J+Y z<|CWCq#!#(+)p-kd+(>m(c?nViL5Y&@C*;{`xEAn+WME?)<|4}*LtgsKqF1{$A6#oFXxwA-$YsOavwiuB7nNny zPU>`aP~9#{DV@q#>m^}Bql|B(#@Ozs4h;U-7mt$wTIRnvD=1KUgXRO%$hxp`p<*d% zs8wDDyXgqjCA66UT-!}x1bI?GXxt(Mk!W%!M~NkWuyQ37q(c#&%NXxAWAF_5XJ;eD zvF^xX6-BB~iyUib51zu(n6|Ais{R$ngJ6Uu7uc$VF7*$b(kkT=NS;R%8+uqYowXS) zYK**5qc{GiOZ1jI~Z)~J99e36!Fa9u6 z0tGOUIC)8LGH3;>|5aPs`8}KIL>vb?=iBL%Mk2XFT;KVmGYYTj!uzoe_)DB)M|qBP z^PRUDA>z?77zoPCyHXx#rW}u(Ns;IVqu5$}chUdbj3FK|z{~@Ys>gX&L`-l#IQ{}# z@q1jV3gO@ectOC3i)euC-A~maPvI9{EQjz9J|WqHy9chM`%k4{FIu<8k%YUm7ge=E6?zPntdy(q#>wK@3G#)c0jDzCt~0r3@`s z`e-I1L8BBa40M6=J|w8;wbuwfRW7;AoUJ+Sh_OUwB;POsq^nP{DB>_1^gmltt>ltA z{BNdWp?;UX`GO{04Kp(HP!RfhouI&dB)5kHB5co!C$jl~n+l5g?^w95-h=BMO(9GZC#S zYC&C2m%$$Fo}gvG12tQI9wk{)iZ-7~nPN1*T&`R?$%xIP-xP^Vw=3Wa{F|uIU9RP?3J!FgsqCsRtqnm3TroBbeVoHJ5eYH351W4Z3aF^y;Ghqd z4`m_j+}WonVQwq5cZJ&Vlo6o)Ru3f=hhO}X+RlW%#?zM8n|pIbQF*ZozZ}#zZw8Xn z4pDz90FypeZ`l_*piMiikd}yug#A2Rb8xy)s&k#Ob>Yu%(cEJkd!e_bunc0y?2}^V z-Sk_78UZOCSG47qOzO0E{juTQ2>_VulkiaDY;xDY=(8d4-3uR9(2TNMcA;7Mp*e5kC_OvyU!Pv2>%3o1jy` zeyge8{Y4~oGi>5P(V2h__nyf2JbBeC3WSiKW$ybF7OQ;nkcir1gk~$Wmf-hkzMcNV z;;csd!UMGP)v*95=v5rPSGsXQF7mqeI0*-Va%t;Gj$v49=bG;-Lxwpb5UcJ==!}fB zr9)e-e6}`VumCT@*r30xhQG~~tl!t<&FEFrB>(}Fi$=9~x>R-`h2$gVZJU~CZ?n-c z>t%XI&Upd^I^qN=+5|{I)*~l*Y|D5AND*u{>&inJW?nlSi{XuLMqHTyZnp@#!0zx}`z{uWkvp(+PWd%Wa5bS9tzl0}w8VjUoTjrNQEQ!o z)5v4iUfQiL1pKU|2i_itz5v%W^iDja)(`;DSM%?fK^=u-PuI4}0jWzN8(1YeEexQ% zn*_hOIuH|5LFcR|FfG8&!nTD#Sx-w$0Z?(GEKi^9;5I=AB!9>Q>HO;ON+tNeTfy8e zhr|pIpjTHjr*Jp_j#gjZ4I3>6ytE6E7#CjYZKBsBYNy-is2Jsfq6*TTE<@~_dSEgt zB54vpaa|On7yxoEI1W7n9Sx`${!C~t&5{Q9)9Q`WuM*|`aJljp0!)8!phVTfuYt`o z#kDGz&`Q@!xNL}qCjz{UY||o4Q1mL^rO*V|TLag_MUX?$qcxBNQ?d!f>U^Y=_?5U)3$T^D7Wjm+8$Ta`peSrqt8+6{hpdJK^J=0a{eeg-ZbAwh0$9g7QJ-uxHl1 zu?tHA9O?{b>6u$U`z3Wb{rs*L*Vi<*;mLH7sjo=n{mb?r@ngT9C|zF)>}*MAp0J*N z@C#J2{NBeun|A81_6!_YK|SB7cfIfW`Zz08uVHXXVE1hu=Gjk?osG^130=5{gz}<8 zB$m3cxgLkH&y$-k3@f?_<;mWiLiic_vWT~&545uX*ca!APvdR`L2$;|IyY*e1>&`v@p<~yJA(9FaJ z>aK;S=%))ac;*G$fzx|in*3AVDn3>Ehdk>UYJ^;-f;zIP^S$w~IOw*I$fU zW&sQPmp+?aq%J&T;XqIho) zulp@bMUV7~P}&2XmAg`DR@$CgC_veF5go_y*Mw5>MAr)tR%)Rbt-t$-n9T>lxL1Ws z5RM+#5UQOv^pttX-D+gx!O>{Z9UPP>^i7J^j$IT>k1HOagIjvB=4Q)92N7y7R z>zsCfJFkw7scY&T3Qpx(@6J4OS8TV44TIu1)hr;s>HhJEzn5Zu4=6+y;o4u7ZdUai zz(kGnV2^#=%y}Cs?K?jXf$%ij!BMdd^wppS;Oxq@WaU%HL^scNID}BiDI={cE3a-~ zaFLG&`iKxl$o6Yk{wR#5yAL;);d{08DQR0~tOPsNq9^qC*P9hG znW^dddRmqTvvoqFm&8zZ1Q= za;F%>Gij=LyyVq(zXip*Q;kN`PSGNj>L_h`BSCiOkOS|F8jHGxNpSHy%Fz}JpvuVe1955WAi}T88SP6 zr;p7-(=P$|!lTb(L;A}%zJY)##4Zam=y2ZJI}>zA?mYI$q0A_uvU$pAds#q70*tL zNW>IYQLh}p*~P(Oduo%jC|2EZfK$SJT7?<>&z1B6?dmfwWm9ex>N6;B)5)T1O;Q*t zTw;ZV^9em#FEEwksEJXOx1jHY2N-zjHVbZ|YNL-MJ`>_8s8qNXq%$vSCt&BoC?gv* zodfXtVLS5SN1BB6Xh+F1>@p6=cY=6)XxE^90DfBa3VIwHN*7t?33V{ovrfE0;$E$Hr6pQ*^bp;c}u;zrH z3}6v#>TA$K_@o%_hL%>zn~+F1VG|sf3$k>7%k!YG0O2k-#cjiO)bId>Xq%tE^Rmmy zs=Js1f}XKx5%C!WHSPxU`$+#ZIT_wUr#x$E9gT();V2f}!K(GD=a56fn;lPYn{}k# z>SdsbBWLrdGogE)MJz?wjvJ#<>qkY-+`lDCtnVs)-y|ix*@AGU(60`Iw>f(>lHY}* z1YL?WZ1)!^tYI}>4MYt0%?I>+0?cjAYyeA+2%vNCgVl+76b2lzeA*)=Z&>&f#7KR_ zRDG`b!Tt+86+x(jYW6Sc2N7jfFxFIu%5@eh1ViMW#!pm(Eoa$pEcHIFEVe8$IYopG3z3}DX+GRTyRv3+Q#bEu*&bjG2zAr z6f2VG0tZ~+TUvaUTvs?LA|}S4r?3q%T(>@H_Vx4Ew#t(3Fw!W4cN{~;kOlBvx*ukexhA}0Ie#3 zl$3x&HBfDsCg>q9S(dvnE(^jO=xBO&0lBuq;&1u8;@WAELnao*Sb`S@4w3{U$a^3=zrH0JjZ19U&+CNeZsX3Vj_O_Uky$}O5s=`sC{;Us zw@AJ2d7lQM#%){jI{`QO_6E}=k%l2o@mmM<%SRYK)5_TNE8ytI+#0ft#ZYh;LV zBJwV&D|wIU6VbP1(x@SQya-(Uv(zkcah}AKv%hVi@L#;QlBShTV2S4x^LPVZ3 zrrn!2y^?B^p$8qLmR3TEDMf5=c%Jr;_JW8nRa(7 zov`E(tuCYEuwhX(;Ol{yOocdyNSjqq*`>rOgLp6M`uiy(FCmdHF+83W5yLej$hC%4 zg$it^L|T1CtJcRm(P&;;|Kc2+|5}0}e-8cBOhT%5h&n#?0wB!ZO%WkQO3Rks9iaxyc zJJs_b1miV71>8Z_v-yl(@m0q3rU0sTR|PB^ONpgg@Q9kAD}zHQnKXz1YqS%4`P zmMZoqf5DITJOCF*(bN3CqlaHaesN#La9%p5x&eDbV7p)%T0&pY_EmbMu=QX>qP?C^ zOq~Q`)15c^rrtQr2bK(0_-rm{@SVKJ#$xW7X`d^XoWaw4qT69tu^U7Mi79xUCkn8s zoC0jL>V0t84hjn7cPq0A-e%xbLx%bBZYC(NB;yYc7S#Y>feV|nbgTK*aBe8PK$;GX z!Z9c@9~#%XJ&v?=Is*mJqpvwxQ(X@{Fp2wi3(y}%6PMYQ*NMSjp`@hDnGaQGc(55} z35P9evc&;ag=ipi9&(r-!h3C`f8}vU-8ceif9C6#2(z~!;qZY_ev|O}C_sg2JV!O` zm=a#x>{t>*H}kBh`nzA?EB`IY^A_nW>5IMl5?TVpnFtHTv37p*Ptyf_)fdYKKA|E& zClPnUetcPNg?O9v!&l$lSOpF4G~z)rviYyNJ7}V?E^R&j;fpd8@+f)>Zl!NO{J7=s z-w24#TV3xg`&7(Y38%=x36C{Bm;361zSb~{xAz}HTRs%5B_zw_9l`&{-|txQwMta% z(-<5u+H>KIleYLHe-Dor4q0sunKST?#7>!;F}!0_!6t@3Ky;(9s}$Kc>cJ7oxE|2f zHan5%@!Zu88gxeOW#-;%2XqFe73vz7WKlb%{h_rskQG8V6sFWyaJQ9zL0%Xc6x*?a8f+MOP2rXRvX*k9BqtY^+RoHho54_Jqi{|Hk|0iaK%VDCl5 zWQql;KwKkRD)DOKQ7b`H9fQ{8Q|Vwy#GkxbISm%-JT?;-dz+?!KZ(x`Uz<|;h>R2l$JlcG=bxNB@_r8s&cY$>_y~M^)40(C;Id6k6ROz`f~P4n zZa2(!EkaIfz)Ce{5W{fSMq!2pxc~a@Dp*J@kriRP;Nps29XB+JR?dnU66Rn=1KFY8%7>kOkuLijp-#xNKn@tyo zB3X__YpsV^hiI-JbjhEUK<(c`EX;0T42xEy7ytQy3mAd6?z#z=(m;9Bxwocc>kd78q-@t)A4PeU)gSd!|!HIuF#xJt!Di$kXHRO&&_B zkb$6$af~-h?iuav!e2Lq0$2!>PWHP*>u`P@kryVP{U2iQK_+wOG6-E5Okq!6lfUZaRKE_ zCT^$9bTIMPX=$O*1y|m4X_*OF!T?t9bPxGhi(mngMvHpgH+RYBC!qrfj*yG6V}mlR zSJPuqNZEo}A)NG~mf6sn^PME%D2s!(b0K9?7E)GNkCHA#Cz#1xPw20E>S3b_-*(?-U4N#s-T_u^Ct5L^21Wf8qXtR|l{^JA{6>@$dJ};%D?& z`3vQ#k3Wox7k;rk%A?7Dby64MN^DoIysu8ub+F9C>MnCY!;7Z9d}Z1+-3P0W-YgPH z*k#Vh|5VZ$@pRh$u}$v3KW<+MH-O83?9Qo4nm)mY$9>mg6lC!LWkJU0?ev$Fwz`2I zUQ&3&@c3mUvOJ_m1&}|YO_NgS(<1jAPiVHj>lecN^P3UTVtx20@1JbWdwcOfh;b0$ zwyeaa*9Po5v#MZxm1G5u2Mj0CD|ODcFLdEQ2bpQPNsquFO7QM`wUBWmYe(~2$Og9@ zNMkbWx{QTu)i*`Nk%}0L;%vsPQ&;TjJouLlc-gFh8UMR{L?c=p*v?n74I4MNc5_T@ z8Z5#C)@27T)6Z-v<8u80wsjAZ>}(mW$oLEWCe^Bs!+3`?8e0KIsy!QrfN+azR%g`8 z^2z8vuBDZTHLxl;{rvzGc@=*1!g45*j=27I$XC9V^t>=OFp5RfzF4lLUtuneu~(ov zd=|uj!>uFCy85>A`YJBPk%>DH=_(V?ou27@=B*RMkx&*y!XIdVQgzSY3LS!whmvE- zawC-P>-9N*E^fyMC^QAi7kQ(b-g>LGsc_KbywN9zCv7fr&%;H5vnLN<|CF>o*7s*) z3nW+)>BMM4JaKC-oOn5!GR*%)UFI?LJZv0dLXFk+vwGNx)284xI()C~ja%JkfJJ7) z;jla`n`%k)p25=~W`uk;8mo>E+bq53E!OeaVhKpACW6;ea{s+r)^Fqu0Kdky=F(+X$6IWwfonXcY+1##_27YRP_1rH|*15 zXw5}AE|OL(uD(VY8OwvWDG2!jvum5HTU6dDeU^?AiaJrNe58YdQFp5E12L-^QQv!{ z@}OOYAw`6z)l(^OyN93xHInQ`xlZxsr&@Mfz=VLGmnV#Rh}-j)*_EFt88VgA6Y!tkV}XreeufjCEh zn~Hc4`ekAkj*tUNm7vI6FaDk<#V7JIi5TXvW@5YKCPR#CTboT+(u{ng=)r}*cJD`T zZgGY^k*^q!^;d~p2JG6D1{5GjeIBRC>~aZm2&hauwZz(`+}0?36(TAsh*9LbJF+6( z0CGMdZNpRBTk1^y3q87%kkgYKVgmEmiaai8A~O85xzaQYk(`J1zE z(?U4u@eumnd=LaC=aLFDh##O8MW&72c2C_!l>Frb2_aE=>WVk4Lusodt$cR~mLc*&4C|*khBnr6KCy{ryYS5 z4Q(iUM(=b3Uk2(Z%LU%(Q%=+;deE42TTFM0WkF$Itw5X}=DyI}4isR8^vKd3T+m?K z%lj_jg+d85{7~E5*$=)`Nqc@t$A$D5@Xf1;V2}fIqpBiIk$qaV6lL)xrw%~;H997A zeRD<@%QM6KzmEHMt9}Ziwz_cbv~FS(9RkghJ?!Vm9!|i+k6qM&TBwjb57c8zr~K`;V1RNw73;#H>?DW zzy^j}2OA|9jyHm^b1#t?oQ|iO_MIHYZ7IW1%@=Czr$Co@2~i>Nz>_Y^iXiqv>iUv< zDVKN$dHqUmipbN{iIV1VVxA4gQg^PvBS$#g;-M(!!(00A6s}1)Y*jg%q4=ZIjjzZy zj>H}LU!~8D*T9?}ZEt&hr8(ePfj0!em{Zh>svQ<<=1@==EOlfnRq*Kvb)+i<(k)>d z{8rb+YJxWXQFI_Hnj4B{0$!_9@Kn}6R{ZzGH|F?Aaf|7c-ev!7c`IUJ*FrYg1N+%r zaP%GveBecNi4=xJrmTqUc?jFVx<;mRXRsu+X*DaD`+ge`mH|%d%WAG?^2icmPUYAQ zoX8d8!I@=z)#i_b#4AGj1zm_p35?(fw7H-o_}71l1DRO$iEHqX+599g^t(Lf+Q%bxG=zFo^Ob zJ5|$K8}Z_^>2^y2I96J?gwztP^-=)Sr|pw3P|{cno_*f%H^Sa#c*H}FvUS|K+Syh} z^uLKh>l1osz_mUPjnbr>}=|8j25Y%~{ zo&v9!`8{mMfkkXZ;1}M+O}&RZO2o78b6BCH&$!BJbw zyte%A{&>o&U}-P-39goxNFx8&IzZ9`opDAedK%aqwQmL;y!?0*VVv@(I*S7e0NE!4 zlQAL27SmS>wA*m|@^D^F?qDm3LGeod{L*!@K~N7bkIYS(eOq}FoLRfT<)K`k<|p2? zD+@OdZ)Wu$xpSv4n8jT-4X7E%%e!1YXy6`%-aSwr zFW7VXq%j#uHiOmVAKwK?dp)$>+}8ud zu+NpOr%lvOIRBG2e;oIo*hq0vQnnH!*UXr=?R=(qXB+^hfA-JCN_z;2Da^pOl}0ND zx9{4Li?qoqk6pg$e%UYVe&6T`ok2I)SXn_&B~GImoe+7TIOsSFglw+oK`&^O$MxyV+7q$5A zn-tD>C!?UOhhSgO;#4-3<%OPfWHayWyI2Fltxe5o;dSQglYdnrGyDb*>!RDktqqt? z(#eBIvW@z8=|UhvbVG^5N8(1kfm2`Y)|+E>5QXrcX-1yWstepAS9uzxn$XcM*36ND6GHd1La@$d-0NF5m+ezax=Yov*y-8hq%dhUFs=xJ&M24?KU#*hR613JuZ4+>$9AjFZe-*Y)Zl>)4H4>*Iz&!W}0N@nv95_GhyC1=q zl93^z)N~#;8w&%XA);7MOmPr;&Ltl+QJyGq+H6c$S$#kPV?Awhs&wNcD87Sg2U;YK zaJJC($uUxgE=1RhP$WO?lwnUqP%CT_3EE_zmYRuVaCpDaV?QfpIP;uzQ!5s6Qw_tq z%UbOjf0~Y;&@~Hw1U>#o#>-Jj>>XIiZz8{ZnHxXvN%?m=#OedCBg&~b9ocmuhvFr4 zplZkmFz(!ZA}#F+4YJ#u=d6t0gIy({by~)wl8M(LcViD(pDcsqeOs|y)IKrG1Y$v_ z@1~`cfXm9$y-QTmJ1hp%ek_>5+2ReM@O zj}az~0#Lt{?zqUPRVpA#2r%%$gvazf~VreDFsw8*+q*k z9z`dc#ubSQrDBgBf9hF)0M=qbmMl;fOku=xzVmZ>EXcS2-zokj<#)?cz4Fl9P2{Ou1LHNg z8wpbNLOrfUy4>}Zr~BIFvO+mfz!KZy0RJDbFFmsE>=JV0?|IOibwKH5DVE6$Y<2IIbtP(X3n)=nFl_shdi6m`@|SW5v*$9w zBoX1f^&Pk;Za`p+Kvy|S|E>q9r%#Tp>W-7%tI8T(c$RIg1Hs9&zmVGt!xWeXV1e`S2`-oMG>((|s$hXeuaR-*DL9o9xqjhS_XGAFC%7|djo>m=s zea!-Qf$lhj$yv``ip2O^vVrdQcVA;+glN?n7Q4U>Mm!NJDN@forrAq|oe`I2OZ6TG z*XX4W4=HOZBsURHv_j;5pGr&Z*12K-22^O3B#{`l2M|s96}1a|5IH#ebw$J{zbWyp ziqFgDa7AsPZ1FDFy@K~sxq(*r0b)|2(gi!*Jc4+y4#Iq@K$*c@tj9mQ)<2Cm0;Xp- z>I>F&g_|vn;SfxeEGy&<2RvwrW)`W`0XSl;tDSYo%}h{1e;I-BBf4i*`rfSqJC8%k z_D7j31rYzcC7K>O2iRR-tzvI;zFu8YKBISe9;Pap-yFL>c`>Nk)_N9IjzVtcRqbrV zVf3z376=!@QoFzD*ODg5=!X+cAM&8|eVy%yzrArBP628=_5F&BHADlzWdzo9%s~E{ zX1f5yT#&12HzaO{!ox6Qwss&aVqsUjOs*+$2o4F;jY^!Ae0;!VYwEs;BZy}A_kk{~ z{dhKI7HpI*Tb1j#WQOlpBJA9Rh`t$}AL)V>y&D_Z?kBE)2X|z9Fs-Omq&TEnH7;!| zQa2g(&ZxW@@B$+=EBrR!mA3pA?7D_)26nJQHHO^JE8k+ZI3FKS!~vF z_*0Uii(-~$rsJ%PG&e?TCoEx(AdQJ`jl_T_39e%73|`>z zHdH^3X?03Cpx1mHC36v;G4z(30fNtu*DvM@uOz1AaMZB&O^KLU(*8@KDj$a;gJ3H& zGG0wc+^8n81JzWA={E7%%J6YsC{iuSX!qB-gt=#uc9g&)U0t_2IragHT{A=eAt;jP z$Pp{XQEe%8%UvO`3}6?0No>oYMi*_q1m$l}6^cY+Qz)uC{jct zJ|t2Qg&=|V)kXd}oZ-Sxu0or&9yH*xv(2|#-hj+w{SCy|CXOwOjl)jwgIwh8{31&2 zGuQCnyAY{k&P1P#_4v4Dc^#;WNSPHZH8--ID)5&f3OyZ)Ka*=nSz25$y zSVGID!$r7CVm~!UeuuB*4XQUGXIDxal{az|KS4{$fg6Oks2{I=QvEJrXQzS&P5=t0 z@v*Y~=?8&rtiR`*Z;RXxpvjWHkyz$W)5X{sn5vAF*67ak9VeOwuV`mXui#-$)sDZc#2 zgVL+ITh9D_mjBDqxyL1azwiHzpr|PxTQET>Sg2X^w1fo3`hZhwDnf9$acUa$Lg-PiTJ zs?LVO(OK!C?$?kk$#^2{lB7aVz30zYOTOk*`CC-}CY8X|5L1UF&E*Cm@p(dPDvs|p z755n&d%=YD7!R66=(WieX-nOQEKm-3-}u9&Z+8QO#a@THHJaoZ%g1p#tH;$RdZIht z8?`)?!eEIbB)-9vt6`lIDpsD8I}+~PWKnccLNxOgX~ikkADmW;05jHxQeUPMqYYZ9 zdZv7ezZX1pHS^O?cqGgX5`{bf-t9V`_Mm%~vLA9J`NwWUdYd z5pxRrBpu3uk~F^D06fDUIj71D>g&5n@i6#*5hID2YZbv?5`E$n>~_#f-w#HM2F|s# zt^Ev|nS5OJuB+_i@L?Sh+=Hl-{7yRV%$$(x_359|lDZc}_@ysD*@7+ge;{bAKIGQG zgc!TMjzRM~WB*i*>=wegU}-6%QwaKlHIE%7(+ptpkPlB|GD#zi&HTH=5-KT|Lmw>L z{MAiY%S={cm3SDvgv<`-@qV^i627ZmAd=)d=R3MM;WqKWxO({&%T4+EM$2B zBNXG9L9ZaMV!;X<7Lf17??pj@WJes|8RcUeM7oV?@g6w4@AymDD?}MBe+&j)v4qOnF{Ve7<b)yn@GMYlP)+{qz#2jY zE#Bc=7i>wEHu}>@wLe8E0hU@EO;W3dWW2E)WnGZC+Br-FOqfh=v(v`*r0gq!!j^Hj9 z5>Z{)t1!p3mn?1;%foJF$GND+jS)0|T`pHqUbW2W9JRpw(;kUU(Eir7wUpX%XGRNR z1Ur5BQx|{y>w_;U$mbjM(EoQ{B3rshE|s>X4uVF&og<%evii!RPSaL)AASOric9|c zK2lcc*zlC%!V99HKL!nh`B1^l$f!MD`j$;MTm{GTscNdDyJ_|DI7&Plz6H&mnythl z)+yt2@A#n?Sv4s9twAkOp}66!y`#dQAja<&X^T*9ddFQQiOZ`H<~W5MZMb^VXPEe& z{Tfuq})i8%LWvjm~D=9MVzYz^}xL9)&U3auO8@EQ6518M=@-uw;2m8Uh zhbvZgRb#>1L2KpV1ch1M;g%2M=`3S}_%fFhNPmjuJNXdwu!}*Jjw$BDvU&4OSsM5* zg`1$-)*XCxu1E}LyoM=(OWNomsfh+V$y1d!EdU@%E|$Y|ApizOS6)KP=xiV~HEFF@ z)-5a9ox5(K{>cMrX$pRpP#HeZ?*T$)8&}9M z6o+geT1eJR0Fc2p*idFgov(oJl5)35adxn5r)1LVC17)lJS6m2lGIV@v<@tpWZcnD zF~_JDeC$2MT*&9MsS|7(R3S7=7XPW0cy)_5H4zI;;B&^M%flz{e!YPVsGUwZ$5i+9 zLgXHlBQQ_3`KGkPA2*O?6*?3^lxZYb+ST6oA zN;~O{>PMlAIet4CpyoOrx33sq$c5Q-2)#x(G<#*=KLs-~VQfZeAu|DYY8R$FGrLsC z-M%0;nMpf|vZG(vO5-;fHO{F-aG9EKK?KD!N|{$r{!XBmzROp2X7w!UNbyEGOc*0& zd_rHgI~_vLQUliBMr{fDDKkCVkQYh3+OJ{2QEs54uZo34=N~CRJ&H5lo)k#rrE0JQ z!)3(W3`-|YFE8~foR5Oi=;eRslBemK3xTwHrSVr{bb3rF?{tj2`KjLJ5^E%Yjwhu( z#Vl4e3);6K>@@g%!3T+(M!B9jnjWlu_Ie93BBZoY^~T*2)F0v@x?wTdBGov^nK5mT zBE}tvTI{EsogaC%+@I|k1k04o%}~JouiSmBZ^Z`;MA3i(>(LrwqLQlMoC4F)$q)%#&^97KJU|=frk5B%ZKEMv3Hq$Y&~saJ`6KR#$bW z#7P>fZ=s>f+|%VYuTy8ABIYJWI=Rywwf)=E`{LrYuOv>(-`_bwqx`w&Y@1)qqIkR# z>pEEXcyea$_E-B(FBLEF$~ffZ?HEi!ept(eGlR3hzeN2H@$pSl^WB50ulCQK^s9a@ zp(3cPM?q)DdU^;-GyB10xMUr@;0WWPWn28_MFWMwnArNZg^utYjn`EquXo%Xy>D1x z^Kc$4F3#T;l~p8?FQ6IL&n>yIt9_@o<7YVz8ZL}H-iYG|3a>p=@9ai8Q%OA}->#od z&=?r_F+5e%`=wta&r)rmTJ0T_u5FFB$QPTv353%QoixN4(^S!<@{G_U55TbVr?czb zS@dEGIcBQ_yhBGH*dbC;*G=#JeezfuFfsp6jxKKWR%W|nNx8sb0ir{p3VYciwIxap z5&>S&hSJ^I>jhk1a9P(_Ep3@GRE7Ls^S|KTg5()O%gK>X3{)E1WX9&q*Pxi)#Ov+$ z3ZabsDGnkbPqn>s{0!f)`$Z<6V)(mEuWhWdnv|`XhJD1G9Q+Kk)$jBQ;Fn?#fS)Dl zMg!hbk#P^)Tz4vBiQylnu?tbbZ;)tr8tW8sde^DyF2J|DvoUrr@(xgiA%oM+(52-j zD*hP3LDX(uFMtx6>*f;|_i4BcZ3{jc5Q1~QzqvVh&ueUE;uL$A@Q=k>k5cd{FC$y8 z)u4zm@}+K>n<_{g2!2ri{l&Vj-2vw-^bg6}{$B}4$cINWEdfo8UtTOftOOv_?m#z4 zyV4HFCd?>JUT!bWMm=(g;RSdbft=D3_oc9 z*3Fn$!i6kes)_{eJly76S+Y4w-Z4o1Xjm;uQ&OJPu03Kib$=7aA~O>8ZTZ>F91~Gd zB9L%|fl*~ZozyL;1Sjh(vdtJw@aw59>J1l<*l?_PeSEJLHON9|o-c7=Qz%%YGSwJK zotg0(h0@{zn&M^k7v*{>tZx?{{8&QU-*SuY|L_;l-XSe5gB@t@2Aldpkz%_4{vE;$ z0i5RM*K?ij?s46ox4CuFJWlE;dWx(`?L%}!Xmez2;Md8oLcFm?Lj_I@o4;0WH_9~^ z$9{w-WrF3DEjWuimwO$h5{dc_qXLVio6kADp#590eri93nlcU6bGkv?24H372KUMZZ-^=y8DRkFHu>9q=8Na_45H%FjH;QbH&$g zgpG@Hy#xsIoh2{M*s@33VK;*>Zs(qzu6VaP*lupTc$2597hG@nDKtyp2$*5#XiaF%usTRvC)BE#isb5pku@yYT6Bs6sK3h;6aN z4l4qd$U_#zzW|z}bOFtrMitF{>YQ0?N&@q4pCZG#rIDBge{G?pGU!bM2TV~~%V>yc zzxJy=l2e!i9fn2DNUM(YwPd%@z51#FOn{1k?(w@4K5TxGe32~hCd0H}1DDiN=4_NW zZ(}_3JqP~0)rBpLM}Y4Hbyy?|ef-)*AWK1M;~*(wkXwA3wns|>y^&MdChlGvitn%N zx@h8zY77eRb(9nat2qo>A6yMSWD~l2W7%E{utvZ~bD@FYs0}Y_N@BA<#dV;+Mx1Y; zFc`Y`H4_Sxq4tG2)`QT}-<2~OWXcPr#z8pfdM-ISe2MXtVhI2U{x6p~W1l6gI!VXE zdogJ4y42RIY;(p5&#n6R0#N(1L<;VB$rb!dyA-rr?C&f0LE9`-Rl!=lGR0flC$38cZJkvApA4AB<`^azb?cTPWj%U;0A$AwY zf)I-`uNwuNH3otbJ9$Z36o~qhi>FZ-@0$I;97HV-V11etgklwfk338c^~;MVU6Piz z1XQcZ1|0UlHgR^4Q=7!N4Mi*N&I*c*v&Lnl4ogg^E;^aKg9{iZQUdDEHEwGA&cuyp z`!#K;1aK@lQ6Sw}fAN6YDIYvTUVdkr#a)YRm4gaM@YOpS>5sUsr?VI!L@|O6+*TF# zbVs)5=6T}M7$y>ewS|f(N!Q);PoT-TU=e<6)S~A8b#ukyh~Dc;)9M`!nW?&~ArikW zgf^=HHi|jTAdNgMZlNucSUzi}>v94b-yVVK^}#wve`}Yj>jfz0L0Hj-&EERTcoedO zuhf?zC)$gQ^tP$^s;C04)*vlI3S4Iv`beLz$xe0zM}t?)`*nuyPSGiKft8MJq*xTBclDmPz=*$8|Iv~03YVFkwB!}VV|Fh4aubs^`x3B3yMey?NAZ+&)Rw%f3%CX-EkPU_d4H_ej7Hc-WgY@t)*M?0&+PWylF*o|6qMQl9%s&Yp)+|$!aGd>nA zpA-$0X`!GCPIAR>LG5U#Qx;8tAqROmxv}|W7x~a5wqa;DQU}}NORG48$}!T!<{dd% z?HKWl(zMy6>-;0}gyR0!-%?f76{xHtkQNG3u|-}Wb!>=-c^c@nhgApLlS%djn-T2NkD9`s8WvU|53SEW#Fmcx1 zbS5g^UfsB&(UpxS9s;l=`uxLd^GD1%bo3Rkd{AZ_g0U%K8j{OTe*`|^yp-y$Sb?Eg zdgTKvlZRz^XZ^JL8fC%ntU7$x&`}tKV3LyTAx>WTHd&*sCg7VBO)Z|vI4l?+mST1b z%_#Y?Wjw^$WNZ2a8*M?>7Z$5Ob}nwgH)}HwBJMmdiE=t>`cv2}_mxO)6~#m4%+rwa zDrfahA^dgSK=#iC`KJ(?)4Ss=WH9f~#}AE;jBa{UT>~6_^W;mS{F3)+!)5Mw2Mc_` z#Qz7-nX}nrk6eq?bP8gC+0LZne~8N`aKZ{NO+)WE@3-LRB1ID*=c2Mceu6mGQVep} zoha%oOR?*CpS}`7tWj8GmU0=5C$O+da1Um4x7tkGTfJm0kWp5l$Tcw)(&=IOecvoO zYBmeDRh?UMCLv0-QC<$XEF&y0ePUB1_JV5OgN@^PNac33M=p~Q$8<2A)=^SepF59H zkCE=1BjC`An##jQ)E9PvN!4jpX4Dr^5FdwvKp>J1d}KQ{?J5&+O(JRLvaDXwM=+c^ zWY54t%fzdN3P=y3sE;WK0Md;FD%}yo`Cda7CrH!mYXf-lCi$@JcrFDt3?fB?-^CKg zK(Y-R#n6yr`)<Kmeu2^sw%NcXA9Q43%)?44 z1QUso6%lcc!im2mpcrTZ`H*)7b)sbk4i=iZ(ZnP89)II*f`vX7BcI(_O!J>_2Q+6^ zfSex85?YN0G#;LzTn9njeg^B9!%85iAmZV^R;sh`>Z#sq{(%82i}3)8OefD{(1z4N zh&|sLl*IA~uB&Z@^Rae!nD*=42>c-A$%ulw>JaL8x$HUhkubIizpn)KLUH39-tQmk z2aVFqgu8_@F?})nP8KUaN+9K-j{oS2%(OX&)$1r%te9oUj~urM#rfmQD|o#`=uSlS z?Pv&Vv7bg(?;8WUh3#dY*r~|Y@;)}$GdU~?++UVR4yz! z*2v6GRd17;e6MQ4H7L!(2+z_~^o7vcoZV@ z+mKD?I&Nh~TV>v`*C2YwyV4kmqjZgnTjiayztd~?@*LQm0cQsoP zNYI$@3j?DNNHUHYgQ+2%2=yqUJJM+V;idsagH36F0dT^4n+mg=>h^V&jJLGW->iYQ zGs|pI*VRSxh;B5DX(M?^>ZkO(T4!6U1wZ-~K+or~C>9E`Me|Ky)|K`S%AKmnN&Rc$7S@nqz3B7z5?7zrfy#vWTeZYNmONFy z?Z_{ZkOIL|#P~m{Jkz&aS5Ll=J9x8@M1te7eEh|@3p6mEd^;DRJv7pJZrGjP4m&X7 zcUWvKcCr>zd1T39Coz&nmSygeIyTIY2-BKVvBhkhrF=r(dAe=zDD;3XVIJAG$QEgT z31`JV9Uz>XW9+YFcr1wBI@zSOIykvNLy^S*813ibMPUoHx3>Mrghz<6t15n^kszpW(n5j!ZPo%=Zf!SAE;C7Dh$VWzSxGuZhLGmDIVseS=@8o zo)W9x?Dyrh(Wm_*@_PcHpsv4kAEn1gCZNESIW$q!_wy zct&aGr|v8}TTeG+gAZx*FT*1RB3Wse+RZ(*!5;Z1M!Duw%1BZAm}n97*W|ON&BCcl@inf=}~*PqZm=Hu?`khY%04);nXqXG-96;10zW(49)X z3?oN+K#z#(`8@8+mStO8B7j-&YyvFJvnCZlZ#)*=*LPAB0gh(9#DTMU9rl z9ioj4vdR9l<5-w$*5(<>ZQ4SK`%AAIiG0abATpj8&WAT*r6BYcg;ou`WoAr6H>THIX`VpLZfLi`fx7xZq5~VW5gpf z(X_FN&w8_#`-;`Af>n?7QVH;F`RS-cJD6Q zW5(x!CVSMfc%j}P3k=)D9?E;q0B26WW~ZoqZ+@a)E)~K*$7L(yKF1P2b@A8EjP#IT z$i|x!h9kql4^`FT0A!K#uy<4zNF$yL$YT&V9{Q2R21Ms3QYU-mM$cFM($&yxd zqGb`5G!mx~qG#TCK+(w!x%3KF(9xBB_;){E6R`J^I=CoQD9s*ZehL%8I-N%xxSnQn zsp`uV+g6X+KZ~3F&m(9_Lv*M(ESsBLT-*q^TI4iz0rB}wr<&ky*xg8+SADVcJ?*<$ z7FdM#7P&ip_nu+^*&tDPSDX06+m4?oIsC?{5psFhz2S{x$_)Nt?G$WISigcApW zPtejX-e@;6$3sx)k$+`=<$y7l)~b;3;*R%LP~UI*FB z_(lnX3{KxyZwoi2BZHq0DzW6jUE42u`adXB%MEqZ4xGazvPIegq96l1mXF3;#aic)Zo(XQJR{&0Vr z?y^_e|ZGG|vwJ_C%NV5Lut zSp~r41&^41SDnpAbD>@SRc}C=;|;QpRoD~@He{4gBNFTtm#W+h0T9i+>qsz%#meL4 z7Km^Y!WpROx|n!|6!Lj$K!R$r8x7&Eu?;-17ZF9*wlrH+ha5ml%T+wgvxob$WP+}H z+Z8K2_t8q%qZb%aWb!u;q=h|sXN1^IcLKK|qFvk|Iz=vycwziMu{*dsk_MYAWFI%4FWrFRe)nA&d8624l89@w_$!4tk| zJ1pZkjrkZz;Z{cM^r&B^Qv|EGA;0&X&HVYCqC1;|E|jKsV6x*0>P6C`#P#hCdDmzV z39chcQyfca&v}wXV52$OY7)@0{QRAw#SP33qqBZdafZ>rGohLH{$!&v(+m85&7u*S zJ-Snr7~71Fv|03JihpZBZ2SEXWMfSWCiCF2c8U_pra@GFvU+CpO`7-FL1ARE0;-c@ z2w4=V!Mj=THwOi;0bUd&o)r&mLJD1&QmM>pr?b(fDlCeV?x^j0V7x`8fpbn@w6)4) zdDQ;7g6RW$&y!_8E_W(aFLztOzn^BQ90Sm@HDA3Azc2;hvqz-2wn``OpYmGz3k|F` z6sv-4K5OpcH-=$<7q~o)^KuGb5Wh_vEyepikSsgr8>|Um|Kn~9>FH#R8ZH43=^=by z)&^-3t;QbZu%DKQnGEvw`MJbE!^0(lxa%ZoRc-i#6h#nhwLfNVXMn~C} z&+k@0l^v&DUh0s)^NUv(U^8iaY?9C`r+sJ>y$L6VF9h0z5Xc7ZuC>tAViE4IT!{$* z?H1IWR(*AR=s}VF!Ek6YN3p{>$@3M6Sl@ZunT}}6NG#qPDf&ihV)e8qf7^l6=0_Q1P!V7u$wgEYg-h~a)-iw?cmzJN zM$+EB3pW{%1&{QKd6-nU`~AAh%Zw-fU!Q=wpQ6RxxNu^hHS6Rf1XsB~hC!eE^Qqg>@$c9a*w4d5-!buo)FB-%p+KWXGd3uopLB(>1h7 zr7#_olKt(>!hr&l&%H`466tPk#eL@8j43~3od%9GWm@CSWsZQ7UfE~NLZ0F3uUJxQBB#o=Gbu>qlkt;~VVQu}{cf#1r~62< zgk1tb}!4Gc( z?Gf8&yU|ymA!FaUTl>&>(6ae9Y=Q&Fs@8jHWD@MY;ewAg-qHKGP4M*74_IWB&FnIw6?hU??>E1A8MAS9iDwIhYSVcuDsm=BbrBuE0pK< zTnVWDV=+6BJOeG~n4n|7q_|q`oG~x@PJ48`zd*PiNqE*)?Cu%+#D9h8lx{w8)v|=B z=3u5?)Xl@tV;i+=Y>oXNYpYHU%7KAS#aUfRtr)t1_XIh#DDT z+tlwkzSlsbEjydNTMci&#G$q=WQ9kW`_|J@x4OD-B1=~)w%6)NwTn$u>wFyVLEC}n zU&jZEsaF>$oN}Jg{f)&p>(xodj3DyhOL#OQE0Wy$MSTOO*WGA{VUK31VK2_;WnnSH z#WCabcQ5=qB@XIVT+qm-Q(ISMMhCTRzYmMHCn+XG$F%DBG>ebX0E08tjzuj;y|Jod zG%mfRki#5{&y4H4Lt_?npKl%rh2zb9Y(**5lwt(O<{~=~G^V+e)xP|^`iu{x;h#(J zoR^P{us??+Ee>#~i=VPb7B^jSXQ0c%qxAL<{)eOz;hIRmGopjE+(7#w9wBf?lm9mO z#pnZ%(u&_~qPSj1-HxADno44-B8gpp7G09$P{zv;BRa!>H8+(RBNKYUlP?dkixw2H z2{6vx_lRkG^Ni$w+8I@EZXZ{(LQP`Tifb`PauZO>!@r|R>gyw$KGzc%>Lh2>=;KcM5l1$maZQ3S7*0>l zHrdD?nv0h1le;|UIHUeR)kEEsfs|R2Q+_U=D>p|r{}_8C|4wu0JqFBmlR9b#CUwW9 zBC}Cxm)7!`ev%uyHUX7)hMA2bei$yc)J}uvo)K)}V^!klaF5330>|a+?4S!bQc*df zg4GLX`UtIDj19M<6Td|eEP7Z&j497eL?PcW+V6u~*4f8;3$}jOQ>?_(45a+W0vU^C zy!=(Gkn2~1q%?2vnW=I`i}Gi0^NdIjbvboZVt90tfj$GVDgP7&V_e*cl5ufs0LLa% z^sl!p(oTYKW_8W?dLGw6s|JJ;Q(D9TYYXb52%@~QGa85!q}VCI<%0`kjP=|PVKekL z?|>jJGtvDkohs~&2Fo0lE8>W+sbEt>A>u?G9S%c`5iqX{tB0J<)36JGjmkKu+brjO z*J+Z0Zsg1xj=S$3#a)yPHWrEW7M>Mjd9m?HvXBFMheK$I17X%amXf7{d>Lt^(7r?s&o*7ob-PWDETnL;0AZ*6It)P7D1+&K+nS zj=ti@X0gYDiyppYkHOB&$lkb6)3;r&z8RTJ(I)|?n zNWZw8^IVgzheECJ)yT332K4gEl2w1#U2ANF_LVz!Z#*6ajZXgdo&$;``O6?S8ykAr z)!CDj6>@dL3VwP@_d_UAGiwv|Otg=!avJ_DPR`99PbO&W+i4&eP@Uoe?dp>JgkjJgg7$;n7(p#0x9$> zrCe+CFT|U7}`r)gH4|Z!N2(Icys;YVCgY z6ZUghNY8)NFlOMPhrXdZwyVcU$I-R9BLtYqEu}7k7dL-6@jMONTZU{NRBYyZrakqJ z{Vw1~o!oI6(KUNE(3$Be)yFpPp^-pSagx_MU8L8-t5q-qt(!0N$+o7i&THi&IaE}A zC9bl68{i$D{)m6jUkdoCu{+>E9xUf1G$qb-&^h+`RqQsZDn~zqSokKYu*DA3u&g%rlO^e!_PQ zh&|+q&~Fc2=NaC)x3q_`aXZ!^!A%9faeWTe6myRQMxCvJYv1a>&S!3MEhDq!FW1=l$NuH^X5HjyVkh#$JeL{5A+G%)O1@3ql4KVuzyvLPL z;5R7IBCmqQ0dGvJu)`FRn7Bu>czZ%`R>~o_Oz83bu8Wc9SP( z7dJ?48@~RaG@m!%+tx_CO62p^(7(q#fBAoi5#_Q+Um~GJjrT^OkHzmlYj|Hnk5N^F z^?N<7)uZ`^sQd5yemWZ3f;%NPT>ES5a&lC|ir1HxJniwtI${fRKo_f4wrHq;qVv34*ez)Pc>;EHy$n4haR-7C zoCjz|+M39|cmT;h$@-7`ZeEu>F<6mR9MU_$)w}TfGzwdiKb|`#zR=B+#+~l*T zPR*u$R#%ISB+t;%N20l)M(h2=ub4#HD`YqQrHyGQlL zBY=vYe0e)NG>pASBPefZ=6Z3og&J&MK>`wQ9zd1X0dSy48_2dlXwDaR7-ie*C{C?K z`&BqL#__KbxQcm|C~)J@e~OS$zA^(rPm1W6%$iq09~ z%$tOxFmdC9fQE4}E^XD-dn<<~TQ?jLOB~3HvInEXljll|#D(2FbQ{{&8u-B@&|9*s zf+k=*7D4i84Xb5QZL_%5Vg3+K=LI=(2I zF$yt~6v+SHu|4DJm49U~b$4S-MOGtV#f_qe*3RtHqkEp01^41*Ugc*%4mRH7a6*Na zMsQzM7MMXO3btdv_eMx@^67V*o^Ob=I;Z7f3K1$H+G>+p8WBs)>cGt>E0NL`m8Q*W z!?)7tI;spZ`-hLQKle7$-t1-2QMdTLkdWJ+-BdF={m3o4VY3B8Lx4(}%PL>7t_)!g z9=_N77LK?mg}dm{atDWB5{)GG(GoV{?V9kyd-%hZ;I}r)s+aB-!DAN7cJE&^1KjpT zMl*ddz9Zc8{jog2su95+0+@W+tE~Q1HNbhh<^#L#dD}FnOG*8&5uO#%d`GW7S&7Dw z`j0p!I^Yp!a6qc-2A%0txz$cO#0rYL5XZX_nK4%I568`WNvy;mf{rK#I%*?Cu*DWI zW7oD{$#Aj4k~{#(H3*sh4z#B6xnI&z-93B^!n9ME!tKjRmgoOld>?*;_C~%3M}aeg zrPZJjsJfUF6O%Q{RpSVr_!-XNN%O)NO6B(l+GPo-*Z1%2Z;^^Si3a-8>k8;%+o8RJ zC17RgU8!weZ4i|xK;au_s$&nJU8}n;BOWIxbc!ta9e1BzRJ6v1jWAj$D|-MEIZK8X zS@XqL0@5*IgL$!=GQs zd`&G62|ahZu{q5F&T;35*4P1X1%`h=VDLMGu2mv}B2SOkt{ejHp#gnO4?~U(stvpI zfxP@S@24ONMdrb_LVk~3Sq~-B!o+oU3sye%GyuQ3Ic0@S?;&rLYjak>Ty}9z@zapZ@I@Kg zsj=shWf>A{l(v^(RJD1(a`r13fY~T%iu=A*$Y2(`z{NK+JP;h2)$i;ifx9#?`XoQ9 zaH~#$1sN)L_kVU%<-||o@bCK%$M@wSXLoUK(j$7L-v&J^+TYS@NNgwaFnL#pVv%F* z{fooG&TeC`F4fTipvw69+!3NYGEbe3gwzrcT&{%5IBd-wM5r=L^5!O7unKAkiX6-t zXd|Wv)Ix+c)z27P3C)1u;fKt>Z=R$_zJaGquk`c=b!u5id5b zAg32nsS&{vze6;#UZ0{gON}$%!ZO2_Ni7@K){KHX33csb*fl$qIwFXh`F>rF9Aqbk z!+2@6EXo;|t&>`ute!H(X*49kI}%$9O7ftsFtc9igUQN=)?IPY&v>#;#}pSMj)!cigdE$YV2x}QsaSwh z#gI=``@vG7V1?_T-p3x%V;zJh4i18d<%`b$Rv{Isz!9+fNXn@f?oK}NQ29T6Vqk0I zEdw2ni(%B#AXb{a`)gjn38?dpHLI&DSAXINt8 z5o_mUNHZ&O@GsN$$BLr|mBz$aLVKo5K^94cAzhvneEpDIJxUpGIxH zz6Sfd>|$Fv1jWh)YVXCaCaJUuFs}TyyDf7M)0Xv{pA~&pY0Oybprrg;h(6?MAL|;p z))}R`Bx8M!wRbwrQ~R>j@aI0e>FB(g^~I%fvGnj+2SFB;fS5930z=nv_ry6g2Tvzu zO9(jyKjEy_0Z6_%s>XLA?_uk8?X4R(WA6ja--{1Y7wc`R#i6BTV8Ko(~t7+sH1*5Y}LADHP!B}LqcZW+87 z{p=N{#%ZeI^ViSV)AmYjUU(x9jWfAx$FcubAW*KN@kVrVqgv2y2MR0)-wWyV*9k_e zcT}LzMEy1UPIoabbRvFAY0M-&%6+jh;+J9e7P)x(DdOZD(QOgEpX9P#snRq z^os=XKkZ_-w3ziXS&(oIhNGD)H zrLW=$`-Oqq_KttY4Sp`RX;hjycv}A_YBq9u=8<o>Dl*#_i^3Zf#@hyCV0j zUK;wi`CVuTBAp3GF?5C9-KXR(EjGC-x*7zj=GB_jF1ng#SZRvuT6@8F^%PmQw72s6 zjRoRYWr0td-P2kY!ev{ir20Wem!?8lFu04O$w?II7w6q`KBd|vGOKm$MXos#6QOeY zk;U(i70Hla{Le-yl13oBO&pWkA_EyUsNYFnQ`R*$2Zpc_p=JxEa1wwJNv=L7(d^gh z5p4l(rsRpD8_>tPh0$~n@&X7dSLk_93{kz~jJI2p)q7LEW5F9^X`8UMVVYFSjxE3r z5d2qx7d!V~?66L15njrn7O6g=I(SH=guTDXuS5a_HI3u>*C?~0NIh|?Fc3{0x0HcK zL*(-Vh_Ri;tJGAdo$&LbG2{K9;*PF{=brAG3SdX5H(~{p04Yp_kuxf#ent%?J%_+| zGH6S$P^1l7bRYq*n-nGIjx(IHVBqN{PCfD+ZqLjB!Kb=VJ`ob+#`RDLpeZrvxiDir z`3>I;LhklbFy9iVLQKRtKqJJq_P*)A-Ra))#Y}13&a{9dzl}5SKw_8wl3=K*ikMVX zegJvLpin%Nl?-7q207SH*LWWLuAbhOXnr~g>n8yVMd}+Nmz%gm&}J3UqBQnhJJF)& zJ!_d$??m8mT(rt0ZHx;=B}|B9+7aQ392@whdZkJY%@e3WBbCJ%aq6iT%;x~zG~b*p zS)3D#S8p?ao3uop0-L}aZ3ZK!7UI({cqNrs}t1>Y_+!w&a2@}aZ#ukSy%Lj zr;PWtBxtNjDuE^sqKU3V%V30y97^t!ZuN>Unm6lf2tbhlhi)H|)_SVajFj3UEV zUqF+Bt37e*>Yu9+&mRty*qVQ`+E(-fxzslk_MqPix8hRUZ&J|L#u)piVCK27_R8Rf zgL4US#{A#=LSMr7+Q?*e0i)6XUwJDb`MjFY-wyMjvG%iS>Fu}K=0lew#oe%PQZ)sJSn<<$1cq<>9*@>8-lzb_$8jQyz}43`-kPrn20c}PG}hSimfnzld~oE$IYZ!`*xifC!`40dJPqgEN6 zdX|*Ius>>ZLmvnA8>F_(u#-I zRg-*WsITQ>GgJS(Vt8i>%Hh|wkAmj;sXr|9yb+H9J1R7JoQMtnHSjsl9g}ynHbfLH zjFax7G>d){ ziwJ>M{C;te#?a-BvI{XKMP|@z^wC~-{2+PFf!rw*5qymF3RWP5r-(aRZqpr4AN;2T zPpL&ipF$6QR56{G8Exk86Ap`^u(dXm@)XRVO-LUK%VU&DH25`wDoDCMLH^L)2`DR>K}?GVk{`*W@H_fg{q>&T8rugc-VCjhzLIZ zRPV@tqQ5c6;_2$}BQ4OidiIZ0x@vh**rdfEac1YR1oy3XlOuhXrgHQ10Bx#91-2K{ zfX1lodV_M_uVniF_HC-mUNFNLGZkiwdK{;p{qn@rFPHl z9*%n3zHBsc9QYceo95UUIW7kr*gM{=BkwDeg^rXZiLZ2hg~Xw|2gN1kak)B=ZLwP- z{5g@SxPe+$-Rgf*P=LM6PK#P4o>};XETW#Vn&oI~n)j|hRkq$C7_K2q5$pP&`Y~*R zW@MvF!984RzxG=sQ8#QUuWimaQnfs=gih;834I{3_A~A4VOn&ZaQ=Lv$wtL8Q<`YD z*0f4|b^o43shX4J=q#u-r{+a|uKw;g8lCrF6RpFph#~=)0ekREWkXRD8y^%l&+L*e z3`^&mhT7acvQwKALz4KHM&9t9Q5%AcuIMIb+qdmd7ikOIMCA`8$o=mZ&DQ&ssEt?8 zKWX(pZjV@t4P@|1FE^i1KV8`9tUU9N`RNq}X*#6yWyViA2OUe0JTt009)V`ujhM9f z1B7LgCI59nbQeQ!`p_KSvuTvB_e$)F!>Uh{X&1JCO!NO9nm;f*QO$3^&@cQsd0y_6 zc>_6Bl%^loEiY4JM?Uh9XZxlb$8ibtz6VoqF=Kn`u7-GNvqSOr$X7E)M4?JOb8+y0 zh?SrnwB36$fjl$v`wP&yU@e5JzcKf%ZioeepFEU?0NFUt!WIp~u^TZGvNA^4jRf_D zafm=lovwmI7_TBqC)lR=!5PC$#T5?;xR1A!ID$D|lVwiM0{JLFO_k@R__9}Y6+S|U z9&H|PT{u{3H27*N1kw~W?fE&W<--h+)%i=#tGmjgh!UJvA5@-l)dU1JQ)zBG#}T|^ zqu$of;>R{fanG0&Z1b(gJh2O7rt*2@hmrF4N|exyY8^81)LHj2cc2~|si-Mid-UtS zZYXPMN!T7H&!|{6_t#lqrf#eg2JYj5h3>WAjp`(=UEv7KYIpJ!%2jE8M$-Ma)53zHAUlVtMpuDsIQv4RJEIW{>#sNewJ)hTXf!W%2<7Ib zsE0IjK;o7aDh4^+_1M1EVmoMaL2*fis<~XNf3wcNeTG3v8UR&qZ}}*>oB2^a8G{O$G=}4=CQ>tB;{dHUR(Ep?8OdOg1|2Y2H zVROvnsJ0B#R?d7~X>$(cDmPL2)6cnj^r`!r zzJx-ZDkGokYFQ}EFNy;Jg`xP$`|(s9y!7(@)5h{3GYSs`eJ7OG+79Yav#h`soC`G2 zylA85G>bh|W_(s8r+cQ9LKPt2Z$672Jwr?D+o&5*p%y@hTeXB{6YpC|qo~x*cA|+i z2HOAlb|kc7YLO>oG>u%qFju=25cPW@YDC27k8ZP{Z^*P5a*Qqgrw{VRmGI$JbbY$P zmD25A^8P~GF(xSmcHlErs_W%=9kZLF+S;k~eT=C~bMF+tbf;9Y3QUthnIB&tZtni~ z2mF3dbSZTJ&6XwmY_Xm>odb(jX9Un9|DCXQ5-8c5PT2Z}2}LjpB5;hP6UC@{rRKo=$~c=kEy=3)0& zP^6mPj1eYz;5j@FM28wR>8p>hi_kJ1G@1CA@%lZVW|Rqmxu_S5`q=Wex`{=JZ(}X? zH0^~>y#DIO*#@K52BBwoT=1XPFYj5e7a90n;z!&a3bSW|$ZWc12A2znZs*PR1UJn^ z0NLes)^Y}3#2Bb3&BklqVa@8pxgDKIm~i|al|}_JN-k8Q`(%eds|l#WmBfnMYD(7z z^F>UOkZiJ>8<79!9<)>2O7`91ZCU3b$}ra`<0&YcQ7^P-c7(rrimRkJ_d! zRzH!)3{$>=Gv99?eZc&2Y^vWk+i6?&2-vUa^+Oa={@V;*N$V0oSllhqi_>XIKY7Pf zICz&)f(B*eN@^P0)YkiX@WK~(0sa~Dk`rycicmfUX(%~4#N++hYIl0)mPJL4x~7ro z!AsE2+SeJysKDFaZ?7T6-9cl7+V28|%C_Sq4rOuU-Q)Ep`=*Xo;LlL>5SFkqLv>Zd z^|9mcw&Hh9O8`&>4c#o7D);x?p;#%&2JhFdv(EH+sw`o#YKOf)1{8AvJNrgMdl%f0 zmz_d&QJWl2`y!ayS3OoSzoqP=<(H96LgvmxEk1}`3%5%Bks$*6p1MA$&Jcmug%F|I zNnJIK^0*ss`CQ+}ZR#Dsx0@Z}C|~LP#viL_9#P7%ZL~wDLOHhKjB@{x8}Rj@*?j*q zrdENeqc~(kqwetSKqQ!YKt>l|{A7+OZQ82(XN$Oe*I}lnxK;NOoT>6~qGK@D5y}2eS{GHRr6423(Nr6D2asYRe%Uo0CWJQXxe2)`VBGMa zm?WtN-V7PwCxLX7uhhKed0gjsZh^&v%!oLVCg5+v&7c3$lna+WXm{xd3)Gx$ za<)ADzvSqp%4{#pxlTM0JY`XS&r#@P z8K@*H`1joL-L1sYxOdq5;Gq?9+WKbkE3u9FSv zv}S(Hi8YX?3cEoY*m|lH`WDu@_Xo^r4k%s+dw(%leZg>=v>fbe;uXs;5av8>|I#Z5 zBkZmMs<{3!CJiG6OC>F|9k3NNhk-r}!u@2MJK~$Lh6fZ1P@cS~2i{RMv^5Rm?Ixhk zVUBeIypVK_0xUv~ow9`)eep5ywL*S|>D`3I$CbYf1ubWH%AfZpPxgGGVI{1jr5f#h z6eC~@?nQXprF{Qh^<`Ta#SVQ0+Ql|cOM4?p8h)o$3+qOqangorUQDN*_W=(`qgYCo zR#Bu%??7w#O7L9YB_(F^;$5 z1q*}|HDiDOws_@sQPhuY@&{L5feWs^^SYPkuyo9rxd79Px6taeL}9}FJnfw23{p~{;q zvk&}Ke;ra-SlI`V)lD)uH^>prP3~&tOap?wLmp&EpV1RYFKKG~64q#F|Mzgcl8L&Y zc=Um1mu^5u1*QVVBsjZsQQag!0-IJF$V zdQl`8|J$^c)QBsV(1|XytX832oa-=2t{KcDLuD!&5u8)izzXD#-$fQjjM0gc$vNL=UR1Y#Y48~mar%$Sr@(0tUWN0iU%ZS8kW1$#3!Se z-1OMe^;b7luNg$Rh+-GXaqS$gM`@eP0Lfqj`7CeGzqjLf60Yb7{>PS139xzkBb$lG z3ICptb;f;g{f0teQ>JNZ`$Ur`G2x%G*mfz^oms|}gi*FWjCbd#WW4XDesry0BL6Nl z*X`}hrqmEJ_+?MNfuCQGiT#Y$GTeSr+(lA+zp4zGUJFqltkp1(LnCFGa*%D-`a<19 zHV?rQO#8s#o2LlD)rL+3X8Sjl9&SkP!hcsMT*IC1H{_ESxKn2v^kRaU}<~eeFljFy@o|>*mxJDwZ52JD4VZ*t9 zrA6QDM8LehpgX^ZNn}&62fcWhK-V~(p<)U9TQQp$QA3f=R-QSy(g5N?X1&LJK4`MH z9WJgAzu=c`+W%fB6B^raHPOa7|=dGMTgUH~(vb-X~0Ypfc-!6J@QU}wDa z;p_Zl%=F{q%2dG-*I~v?iqPR~)hE@GW8;Azn}c%u8k^eRpw=}@kmbGj zSPuFhp&h!4_Rb%fr+U_^QvZC1g5r}R;yh2U0pmyunXY4olZ#ddc<9>eS(zZNK7ZYS z!;~=-pkBQ z+Oo|s7Hn>ErAMx-l(~36c1pZ3y61rqaTBu4yb+i0IOS`PzGc0}f~Huhx)4!q-K(PL zrs@90#s_lI{AT7bymj*^e7$YrR6p?R4?nscW5fKT6LWWL*URfoz1%6VxOA#_QmE{3 z%=d8zrMNCe;aZrcQo&9W6*A*e(9vsRV!7Ap zTcm~JyCEk?MX7RX9Da&ETT zMNk3UR*YGT#*3*{HY1x3a@O}IXuv|^GQA+FUAtYF?j5}1Fwldwn0w4)y>0u3W1p-5 zSMymoq=>d%=)Cs+pcN2Lwg4ksz-lw2u$7qQhcZIyL!s{Nkdta6oT7Kjrk7w5LGf(` zSqu0K>vylbWE|;-G1YBeY6bXu9c1P{<7C^gx$04F?5dQA0U^^dpDG-Et`)6O7wobV zHDc3?JXk~spX)cpTb^GqG6<1GAixPQg!RCHijpY*rWt8{`XU6kYJk**6O8gB5hNG> z)9cnUS_-qKyp)B6lTJqQ5vS=l7X^Q@F1gyB8B8Vdr;WeZhH0WMs z0^*WloU!H@jm&J6a;M*Ws>mhT68he@*}4f(iwE}MzN3M6eM4{#UG>Z_THK0%#Q-i- zp7Q6fIRQ#z<*e>IyLWG^4ar6=$3RUiBoAv-Vfb!C7$z=sd{Krx*0WG(ri-=Tl}$;6 z`1s^k(AG0ApB(wFrBFi&WyLkuQo!qQ;h9FEnhm*{!{#NcHF{bGfk~*L zN<=R(tw+Jur)i`8`C~7uuUf@}p?q7;Zq{9|9ysn=3GCDe%c|KSmIU0CRRdj|IIcB&6GGf* z(cuI@c!F5W@cqsE)l$|v=bVvp^ zFlc`k=2ps2pV}It^5eN_8YaPp{hF61-FS5zFH)e0!6`iZ(oNV?pX&39mlmHI$|Qh3 zX&UHV^kT^vJgyiAhDJM0W+}2M&aLgpI+LC8i~;860xLeT+m~&cm~iZX=pw7_(fkLI z93=l9bPlEV_3DouBjcK7{fa-khNj<($CIpM!_2HA{h>-9lOrbELeb6JxaUZmo% zrd55oF_YC|zJWg_(=q0m;*N8i-^YW|g+){mqun+B zY4bm)C+uh2Az!~e9pnOr-}X_(+rLN0^TVcS8{P=Y1Dl%9mv-#l(>TowD1=^bhJ|{?+nLd@u>EkD?yUTGjkblD z@NU!&$jrwq#$s~XgRnhVLr4N|Y11TAO%=VFp8+z|_7G_;2m@$t%(>W;C`u6@36I^G z6N7RE%fW3?cp>5)-~HN6^8+}*idAa~Ht6gmjtlCzA!o;;pWTpeLrnqq!HH{4#5F+# z?f$Pv$Eg%8d!YHN6vY+hMsX|X0s2NcgosPfafz?N$*1J3cyQlItrN$rPcpH1%iI*a z9?k9iL-MZ;IDJj?exv4-yAZcxm&+TKD@zp}X3uZO0P6$3n+6=};6SoIY7=|JtVr|8 z{PzJcnvfv^mdAIs^@rLT$+9q@R+$K85`xrWuX~UIKjq6h=7uh7LPTxtJ6rQq*|zk% zFp^FwyYhHzx1og^Hz#DgCKZs@pYRg&stB<)>?Dwl^1(_3%;0%1#;cthX3l11)-kgB z!G>wGQVttvIGkA0}XZa_a!+CZ>i5jQBnX*`Z>^vtq*dlPJ#gNN9|0KUJ)= z?bs+yWvF}RfSwT(5-Gc!RlRUzo}vfL#SR5H#8<3BZJGV-YMQppA7&fRbC-IF##7B7CoT zj+#sowRYgpPme(V)N%}*n~sHGq*HG3t?o!|NiV{qQt)*bE8f~qwXDwdqY#v0s0~4H ziUNmxAXMs~wYW_c!=1J#M^O5(LH6gWWCUre`azmzK+~;A*iPk_=Kk!(&B1le9U^pe zFY;+3JYx^jA%URN&B3&o=LzE(KYS3Fm=oRCbBOHH@l`P1>f<|Yn(-RYt-&qx&$V6b zn=74{1FDCxKn%mo|9kPws|WecKm#)TGq!oSnWudQOo1GuD@WS{7V%@e;ON1;njhyD z5%FSZgSG;PVGuogPkJ*yh;ayADuD>nk8DJvEgKE#*oT^BamCpwmWp4`i+%@ z7M;&uPyiEoTE0&f?x*l;IO=)&7d13gcnNZ6aJsx>`o;J_;nX|8roz&^R35u9bBGIt z;4+a{;Dh0$VcjaC)XM4{Tcz(tHyiA2}Z!U~NaezT%WHOW?Cj5G1Wm71k;Xx7Z-8I&UpD%f# z(V*m*2AhbxBlBX=syz$av|f8j4ur6#3c-&f@*g8ApPohgh>9X zarVUF&^$tcCu9WoV!RVnQI!V^|J%y!3PZx!j9ects*jBtb>n~V;wTWKVvLl!1uw^t z*|CwZeK?|iPgu`^?u>bEZ!eK`;XyDsTJKuK}k>ZT$u)t0NvW z9Ye18st!ZJ{~-H{0VZU+uLOth*>CVGmz%}0W`U9GZyzXF(o|2Wwtzpfh&L00=Ya8k zRhSlVUPx5ufVd_9AXRaCfi6j?N$GiXtM^aVEnp8+fT?ZFYuTsuw2b!S13QtcH^EC*ej8H|#&H|tT*v(&;fFhA z<^S1JB)D0u^%`rxvX|KO3b4T&*oh(*)DU6$Z^}hDM%iOf(Lo%&y~_Skqq+d4P`^@u z$u$3;bq3Axv?|T}^m%ZX3T4Z#A@nW{v-UxzIcuM0 znxf)-A$j3fh7h>mZu6E25mbSxQYHf=D3zM&vhYPJXgSiPnZ&0dTF1+Qa{sx$Q!h$n zop9~->Fiu)<*cJ#{$A+hlV#=kaJU(pdM)I0_(;pP`){4>zk!l1s&qd07AnYg{HEy| z2DcY9|Du^VMCM7r`-282^ZR`uG;WypCm_1DQv{l^6|jlf`xmby7Kcv1(~5)#_kSwF z5Z;jKwN9q)okZ|4a4IZ8hI3#dgCB$Ihc`VC3KV<}Z}Mk4@d8wqJ39VbqZVvJ#vsr& z`-^7xC|&=q$3Hm%*FT?x$l^b_chG4X7Y~s8;2uvHyhNzC=}z^@<9&08RsR9VZaej6 z{OKa|^kVH@;<3)s+R+rCgDcJ(-ks){iVKoA^AqWYeBSs?XwTiO2n`^4+p)2EhP!)U6-k)yh>m{tD^{o2;^R1H{~XYre{ZG*oa-@v72T>59Q{PsEj zEvgip{gb<}-C?4CyCJDY4uOf@}N5h)=(MYmR5BP6RmsM=t+)kka z;x`Wnw-*JEvU9mPBG|uT3WJ!73IwFT7>qlN3eI z+~)nfFQb1PN%@h2pceP5SLgnkxakz zxo#C8K+qH}me+C17k=~=D_-F~(6h4vIXmUxXKL=<`w%Prewbq15DDbnp)Wq zJl!=KZsNF-Gb3V>3&;zmZnC+R0IsOz(l(r)o_(`GzT>aQoYfmv+&=Al##~s$8lS;6 z^hK#9Os6E+LXVK@)0mCaelG#irs`-$mUvfKl`Bh40r5T;geiykkFcU@#j9H-+Gbg_ zHP*FMJn~O~P%Ai0OJoMxfUc{hs^(v~hctC;o(E2?BqG6Z;bT_r_MqcRI$OYCOT*IE zkB-}h^N&o1>?N&%4i9)jL6H|R2N#yT;iaQoSu9SP^)VMJstvo74x^wo=GM4u;Uz+U zr9wyQzp2;~zEAtQr5~p^lYf7K8q%Xqmm2O9RI-Ol8$+$bfxCocB|mzf5O3g)*33y| zu=U|vZEio;x-rx+?dxIhd2^EEE(3KE8}XY4g<3r!0gZdZBVHkpthEAI=pY4J7bpWZ z=3R057Q{@-{uE$H3TCZU{Hifd<}n5eb}7Jyf|vE~L6-Gh^UufizR~@&49IuF5NvZ# zNZWUhJI1MLwy$DGN~;o!fB;i+HfJR-_> zZl608_xjEZAj1g#Vhy>MbZ$-$FwmbD%Hn$SJ$4{iWslFq(Xh!~asvnhEw%v*M#O~S zhuMD?p$E|QOvd{qyKD2RcdEzq^uU*?!L-FF7MT5b16wdPROOft{An zjvT$yEQz&X^}_EI2r^UYmyXS5@Xf@QP^sR`VkH5j@)n^m0{0TuQ3jj?7l-p>EWAW6 zC#k@59m2;_tLI}Ki~&8Ld<}_RtoiH0fZYW2_n<2od+|zeb}Nzv22gcjuCoOw6qJ$! zbgYEpF+IR%xV4@yW*Y8`l0R7~*(QsG705-&cZBgyjuf2}cKWlxO{B~P^|k!KpL^S) z0;fQVT8`S&^-|>xFCd1@Yea&Qm`cp5Kzgfhgx}@ScnG0I)3N{t+gKd`2G#a(&W!t% z2-IGa;*^BYEU0nsup=ywMk8ipA&$O<99iA zsD?;~1BU@-MvpjcD}8LUDSrecC81+vGZ9SV%qR)Hi75rOt(%6NZc*%?e8=t2K4;=e z26p%>c=@Wp)Px?p)POocbzkPIRwwdU;p6PP6TZRo zAGbh;E|_HPM~8W%IqPl5Yi>gNkSsjo-IpjFE7lcF(tz^*Tzk;jLmJ>1n2rAuFMMXI zkw=u^@gQ@D@owV$?WOJ*gH?NAn=+Axj)upAAjz`({G z!AbpIYc|Hi_WCw2x+*9<%MXsks8W8^(baYb(A+&K-pwqUvLP~yyxKJ1esJT3Of$@6FD zV_z*0z)o}iV+R=+)Lwnt1>G28sTWSs)Q%>-KdaEQx%^)4OwY?yu-*1jy%cc>INFBZ zAfHJRNY$XYb6?j?BESDisS|Fl$?j*bT)~GNyQe1Kiev#UTaQ*QQhidx0lI8sV_kHc zDl39J?dH;Fl+hawUMg#fg^>CgeCJf|Ma>tw9TGOxmP-41D;i+=>b&j<%L})DJHCW*FydsA_(F8XbEe6?%aDv%ktv&FxTl z&!>#w$Q#{lNFIZ>PU_lBgBOHpB`#HG>*ktsgc4vKS614j6h%5}^O|rn-mof9{6Xt< z`PN5S*qSErs+~D{I>t7~Vmmcxzn9(QQlrJ1=i1EzC9jr>f%S!D6wJeM_5~QZ zJdgrVy~1b#Er8ks5_@50EHy_^FewL-zwA|hPWXMKyTbP7l2Wt#fnn5^FOKbyr*WTi z;sxLKhi^Z*aa{xLzC7xJ?DuR6yr1a`&T!MDVRm{k!|QESyDTuFT+H=J=W{5j3$ zVVY`3%ORoIc5%uplgN`2#&3B-u7n6W9rey3d8sNQvgs%+;|oya#G2dG;x%KnQvW@? zf7>xR8rTrU-)OiV##7Jm2s(1qcR|e*Oe(iS{ds8^KU3ZNw!+;d{a26Xa$lX*v1`l>0+9yDTcYbY+$;k?#xLPPC0j;Q5XxCJ%&~`U zQRIYI`q_^}zM1VwbnQs^NiDp|`Iqa%&xRdB-1F*a^+pzf-$}u~xT6p7pQ}Pd86U@d z%-eve1l^>1?Lz@R$L*_uFjPb;!N*s*O=Z1hYHlbSS9zcz9jLmUXL88QdivQqIe^Yw zJ7D?+>e(z%>}qd|e{AOok!G;(|F-bzc@)b2X*T4ntLT*;W4$Q;&Cunm*ho_3E!?qb;+#s!_3WjwtcPL-!3B;{5k2* z>*^r22sX19d3*cbqbnT*-fzQ-d^>VpSopXRu=4Up%xONACEtD<)ZI|Zf6SUyhMc;- z{7`BIt_Rm5vZt_s(F=5%&sNX2nFJHC$=ABW7W!F55O_%DtHauqhp*}A#o@adxzO5O zyy;N4?zdIPk(5p-kvD!| z#UUjUen*)-F<0V3X!sg6XOiyR%Y*B~n&R#~`bxSGrZ3d1@UuU3u5c9NN&$tu+pgGD z+3aM@7$)Rh6}>K~pxXV1H`3MY_L&Ay)YRm>OZulz!!<_F><8F)89#{yv#XZp>wo!=Y0TabP2KkXbYZ6FW*8cdXDcbXi z{K~{J<{C2s+^O7GxO|PW`m2zJp?mOns(oygty^BEg!@>VcuQ$(pnZ84zs;`>`7sq| z6j9#(;3u2Bx4cI19l&WdvWX2OJv}9w@~AD}B9Tnt7M63wS@Q8m@JpN<{^cOCW^b`-W9$-)hDcxDjiE;R4kt0GYzYj271G2ZU zft`yWCmN~91g&G7Y@nFn7O!X=)!_)xqTKh(6AIOh8~uiuD6pk_g;(ZS^n|4Izi;k^ zg$6GGVe5IsLq3CV<_3dIhvI=hS!qE%W7&N|MPtp4^hrA~T;gIYh@aB>UkabQq-9bB zsKoyGM;%?+ZE*o9^VF5-|x~iUl-67_K_+r6VyxsxzJo#UZVd#n+ElGo2(r z0n4JPxzyH{LXt(YpPN0sy+MV5t!8dOQ$(+OPWVZVjmL(_(tHsAF2|5^_G3?)dBp^% zDflIm!$B$Jbz%pSpZuAn|990s(tYoG4Vb`){!JH zxM&PEwHJ{;(-#xZ$P#<(b}&%M0s-{H_==9s=Z&%o`X0p@|aMPgs)13REi3~ zn(D$v^C1l5XF?^EakfVf++Eo5R5aV%TSKH5s|ba4(|{0qI>4AIlRiC_8u>pc7>a?W z0VMpGB1FXW6=%xE`4{c((3I3*EwGfSDNiL%Yb5O2{Gqe>9_das39mrK4`j|%vqvC2 zIjtY=Ff6GR0kdwgZ?i2Igtu!)-Wgi0wt-o;?r7J8Ka*PWm62yBDl-|N!6@4BQCyZ+ z?k2JgqkrzB!=E~+uW$RQ@P96zJ9Iy~)_%CIa#>w>7aV_qk@m|6#Zyq8DGiua6?=c$ zd~H+XxlZ~Af3}N)K!MD39VMcdYvt`%#xB*B>ysOOKnpc|Pis?gzlls4RKehpZ%Ve@ zjd=PP^6-zY`6uFjoY(w^tBRLnt$`tK(^-=t^@Z{aRc-+mx_{lJgSUa>u@ZL}@h{|@ zJF_z8K+zvpHV`_@Y^b8W7tLGhy9AI0C@bI|tTsR7Ce%aoa?^XvMe){PXa3Rh*}mqLk(o)Y8{*wP$okLL92{+wzcoypTIV*P!E~I^hqqT= z!D^18e>5EFZwQTb&M$?x%?af9lQwnaQ%PeEUQgd#T(@sm(||^H#NU;Rc;Ifk(Pnux zx;y>ULpRA*lebop^2V(VZZobLFhtIKd@Muw%KFu*G;P;COh3?wqI@=+lkHcRWYEL# zgi@kf6(0;=g0}nqRrK=7Y3(P^ZuFePB(3{mdND|vO=1J33g`LdK*WoozmE8{Tx|{E zyl)ktEh?fp1@h6R)iw=YuKU5qcrT&OWr)6dwc^yHAzlPH^BG5n@qQLfyo9R(L9sP6 z)8jy!+m)Ab7D3Yaotm?q(*@5&fz^0H$w+t7FG<8rm#-f`Oc}025*ohI)`$v}YMper z-NUcSMdwSyQntHkCZkD|o;OUBJy&-ARmR4_O1=edI%~XYI`CZeJvdiNHj~{o4J!+? zMy}s^==QRLFSS*rt$)<0_4rYa>7M07aT7<3wtkE~ZJ*ePM=WT-4!)rrqF@3L@9OW_ zf6Bk0x&BZPP&SOF54!|SwXoQplpCxkdytaPO!97fHzlGicDUtm$z0X1Ee~%)!UkKq znm+8|(MN?Iv!^%h@d{o<-*eYQu@4=$5jcTuP9$8@t*h=)-aH4~yTgSv@mS1V=tIyL zPGT@Ve|95YrJYj@;hK_~h{N84I+br4!u8vVqfvmUg98>3s2C}ogapbV+OT_8Pn z`aRc|Y3{^W#f;_t{~(akL~Rjv9Gw)=bA~;7Yvwk9)Z#BX5`?t}=EE z`z2s{c3a9|G2LIAjTYz<7i+(q?nD$mbGf08kq7by~4#2!t@}pA*;J((P0=%`5lET+z2bM z084GucEqE)sz-N*W_Rov*r#TK#t}dqS8-Or%^5w|z&~@-c{Ab(tUK+$5TPS}X&%*D z_^#&7jfmy6zcX+Wus|@Bnb(TkWIy{NbS7G0L+im9W3nggXV{UG)fK2muPdKqLTkPWr8D3v& zzA>+odkK^FWKwMROA(5h?ozNr&(2*s;dMKPRT4q~D_~{FvT?|KTR3A-V>{eKsb1Gj zuLAso!YGM|aGmdb*AihPLa7X4Ee>y-+)QA3#djWU%@GCAb&xrQq2ls+PqYwt{H*Wn4dq z2fXOZt4ocsWLlZJg4&W=n12>qrGfHlbGnatA{)T!EoW)1JqItzk7xWy^?{G`1=AhU z$BO3JjUc*64B?p4rgP;x_m|pYz^l{+QZ5%%O5m}nv-+?fL-JvTlkB3bIG0UHs2K`C zfS!0U55_v{VSp`(SIiDeJ6Qlnh9_dYl9nB z?#!5Zg{;ssa%7M`%xU^X+?{e^Us;GC9w&VOl-@n^2t)4%S#D@O?<;(PjWVYQ&;|Bm zspI56tQ2pu4TF)W(iBS0Ei=7Te1`(7X+J`-@<8Z(xwd=8afAjMdt!|VxM-&3LJX~z3Kif_kqJg zM;lY?wqj9OIB?}X`XYaHC)lth-UgR_G9Ybar`lCOs8X z$oGUWPNNsbgkO?BSMFXwK`XZ@0tG$T)enP{yZoEYk$N?+c#2SX!Hf-U!20-Y`8!HT5W?^Ws`&$#1G;FAX%uQK2XnReo zK_8ZN;)r=Og%EbrcKsji$yIkba+w^VG{C|xeG~eIdMxL8(yX6X;@%X4{5*Z52owp8X z5EJs5ip&%gQ!P*@>evbGyV>iaVsAW6y98Yf>b$@3chN1k-%-x#p8%;w0AiH*Vgv_< zuHWmkH<0REL?L6ZGL_<0_n0vmE61x;N0DdUb&}X3rgpX6l|Du|H{T=r;G-HR$CF*n zT43RskXE8*Jr5v)g{keTxT#FwgFwvsp07V7v|GsC=YleSgkhOFNNH=2R&0N8uqj5G zdGg5dO*cgniw6yIkkdAEQ<;6-yZoCWn`p4onD=Nc3z~8c9eL25t^?@nx%EjKAE{}L zCTZU=qbcJGM#uKM^b-;{mMl4xJ>Rn)le91dWYq-8?V^hnzb7SJzIsE9qm-pzY60?!@dv6g(via`hM?s*Zy4#d$vheLGJdWh4%dg#*f0w z#GpJb5_Um;x6AQojk1#t-AD9wpIN$QpL*zB3%D+iPVYE2mXaN8s_0sObFa%V`jR!T z3N8EtJKgPbeD6r?V^EksaGaMFNoo$QVi zGXr8>TX6W^sE@!(BsTj4q=I2Uq@r* z2TnE%6mK8fm9e02t8?TZRkB&h-?z}adJ%8z@kz68(tEClqV2n8slSACd~i-SxO7Zm zZg{_XpztRkqyj%XSe)-7cG8D;wT(YBg$*vic#4_(tjd_77 zRpEs%{9n)s31iGH1C4T5o3UTcBt)z1VyU`uQ`~Ulx-SSmh{2a7dsO>n`r{78c2=GA zIZm)h{PZT|^hG0dV{q2b{<`ozFG5b#y{8Q5!zR`BSJscBZQ05@$=s8d^vo^IB9V^g|6kKTW@ z$jyI@g4Y8k=pUVi<|;U{^bP;@b<~UXwrpT^R;+J;jb>gKifsA|kLF!;hOftIz;sHN z78Gb5LL}^+<(%nmn@~Bj?SH6qGUt(#rNKQHEB5(aXX|L&T;Q2tJ%e>$-mVa04-3H% z0-8)b?_f0knxAXcfcgDgo1)??o?V)J<&D31(6zwXLSIS<2d{NGb*MXppvCJWapMPb zLQ;VZ!YWgNKuEfFF#F2AV)(b=p3O{R3F*#}TO;NtuU*?P$4;icg4|B^&nS1Bq71m@ zyhfZvO<^JuHv5?(M8dsEZ*Tmq@4iL4|KbZ359p>*itez@Eb7Qbmyz~!trB(c4v%S7 zz?f4-v?+ft(@#0W6=-&;KmlA2f?LfM3t8Itlne};akwGyQwBR4e{mal!LeZPFhhM{ zFU&vH0ym`jrfWVH-Jg>PbGwM^jUTNelmk27a*k+@MUT;nBJWmHt9c5#XAm%t+dx=5 z`FQfSzN$q1@yaULF=_RiDl-|KueQBx{VnN|$$(8k$<&4HzE=L|3r=u6G_Da+|I?VF zr>|I6L(zZ*0!H{;UzTWx+qtpL{!DKIkcK38FO&6>{DLm63pvE#VGw*klvC`y?^Hw5 zIQNX6$#@qB7z-6$wxHZmW!$Yb_&Bvk?=4fF{j%D9SNLz&D&k1Ub>;R{V2@0ECO=pL z3bF8&s>R`lptLM7(Q@wvb4z*-0>662S+hc;jOGYnqF+Wkq+!~~*?`g?N8#|X@xWq( z*!PNB=N!mqFoj|6ZKQne|WLyJAB&qUkmL?49gjT`Klkx%u^{BtA zn05<})C;_I6WCaNc>Ewje)gj6XzY<|-DBxkFEe1|^J!rx4`(e_;fBP$|2@_?IITN2 zmER9gx}A0w;PRWdqL@NB%y%?DbWWF@ECpiXdwht=s3C}Eah=4=$y9;7OD{y#1C`j#&;jhZwZtdl~>*&Pryzkl}q! z-B1%ZRgfZps{8JB_p#tA2d-2a?oyQ3{vbe4Q%PBDw8qWMZEFVeZT}+*6V?c4CI<<> z`hqlb)F%Nl*&T$v+ajhi+y>nZHL2jXc=+^bzjh{R(kq)ivafjjKO$%|1HwqOzZSWrK|R|6GN zkI>oyZ7qKEXe}YhE^?Ovje?AuqNxP6mHmFWCd%o)WJqQIia`G3KXp7Pr4zjxCESei z7;ORjIgaV+)6WXUpFCX;+%*TRh6oMV5f>^Hfv#QD_+T35-wA;jV@A2+dS1rI$9TCS zJ~L^WrV_c70|DD5zo>Vc>8Xp}T5{0NF??gQ)|a^Qz5f7RRsuesOmF9Iv#gO4V~P(J zlyd1qFVC;KCgJTI$9U}_Ut=n2!1TMWH6~tKWwC`xBaUAI*K>NKO@Fp{FIj2xs~fQ> z3h;TuBG1b0&d7XG0EK0GgG9s00_~c()oTR2h81m`wLnuwBxI4n3UX@Fk6v6z(lMQ! zWc^2yBh`lc-AMf(v}W47bEJD%F@vVs^|S>y)orj&xA8tl9*|5oW2@~1qffqDnnuYJ z?#H`XLiu=PXQomtO205VxYTMJ20;FVu_ zjdiAfsU9nxuORrq68WK`M@0mVT@<(Go5nN_QKN@qlKpPSIA&$_X??Lwl6%kXK*Fti z5w9u6L$L0suFH7%=sDyZD7X+DJ{-9q96y$tz`c$N7fJKmG@$IKC)6?P5wXkHW(!;{ zyT0NNN5bx8&W$<-hDhzc*!XEj1a`5)^ZjYqHI^mW7e?Q^LujjLDCdFmt)hD3VQp?6~FqxN;+En<`Je z9dflauqVu%H3>fPqUELr|OL1GC>zS_tB9G^M6j;m+9+I$hUq zY#}hjJeRK&G1QAZTXmH#Mka{k4RtrDxr0K$o+`+->BTu-#PXTg1JBO*+@IN3$AN)B z^drL;c;5$1@#caZ&#FOf@F5MDx_BXMj7rdcR?RjRZ95L}fa96QrTLzbl~mdI>w@P7 z{Dv}y{A&(D%Q!ttnBrO;!&@W&gM9sIU&4W##D)Ph6~mNW7v??U11}S?|6*PnY$H$JXsJ*xm2SV^`}=>7dNlL-yk4(!&htbe zz$r}D@fXkY#$`LTcaDDOSAEMAsUr>xeLJ&9s+^m)8WX?TV&`fttXC<};=8pj?T+H? zzGtRB$#4^60((Af=^9ijgE5woep*o;rM+XO#oWRb!%vW&Z&r{i90 zB--=u0IOC1o+)^I4^1?wH6uuS8vz%=J`)2fRW#FaR~TnXofPuLlYPt0oF;wsKwhLE z)U7dJj=YtYAoWMH$pKS8vJ+g#6sZ((UhRdrXH8F~cnJzxJ#{Re&~Zh*PXO@oTAAJv zos3f2e8;&WY{TsQ834Rm`C?ygIh14i*oHeT3*Rx+)~?tjdF>%nvj)l^lOMw3b*wuT zhZ%APsIdN!PV=uHMQRy6?H;0@NgY6@96{cexYiaphv|er6@*PXiC0n;-Z6IA{;I9s zmO4HC$UN7(#GXg;8b znNW4$`R1^t{c?Fqvd6h@4&Y2WU)V(u@2O7MMb!-FM*R=k4MLZTQGj!NnvZe9!p_ub zXI9*?GKET2^@n5ceo?5oj}ka}>0xO;tm0EZ&ZpS2&vVVa7{ox!ptyeQcaHE|77ex= zyyCVFM|qX^@@j$M5eeC*%~PY|tS;A1W={KEC9Fz-Y=!O{pC~=Ktglr8gA%0v1%5UK zf=qK$%3Kf8$=lkyW`HRI9<{?hs0J%f;^MtyU7UEf)ccTh;1#@yc`-FBA>Df2HWG{(Xzm z4@NUlhOn>`Vw@Rto!OVL2M_8^78I{570-WlMxCM7^SoA<)E*iD9n_GL2QjTc7v`BQ$F}d7zpN9R^j{PCnhx*=z^5$7qcX%?>-if z3|}$e(Iq`>t3w{bpP5OMqsTEBcl=30Aa;AF0QB@J^Kt#(YNF(NB9TH1AZA@F&>@ZB zmfY9)G}OexD12U0|F4}&N>06=gn%L~ts>?q2(SGc+AA#K@=%7dqiDbGGi)dF8Y5D? zQ-2c&&}Q|Di`=2D*dY~aZu1JMFX{$EGBRfU+|ysz&fXnbV>pxp&)czr0|FH0%gyHC zR(S^x4cHy@yW(WtT8#6Do06WzEs4k6DWa|aE_Y$^f4wehB_{?Z^?@PtWH+W)IEF3y zuJpQ36o!B1nC-L?^azX2WS$-(a^RLW4giJ>cR=1S=6(G%Zq(^q&a+?vOWE6$Y8b(M z1=m><5Wx?F1i*H6YL=(?s8}+=O}1nbg2YHhP#rGEpXRg@E-%(#7Bd0?gSSC0j=+k0 zrV*g$ItoU$d@YQ|ugT6KvB&-k9pZoqBOUORb~x!Z0kd#AtS2Rf*#g=&^c~qPomFj} zBjO~JpUY+pJ z6dR09>-zQ74O(3fXjFB@zN=y#knW%>GQpW4Y7C$JkjV_Vmi{_cPi&qpq5_gyJ=3(r zh=AN7?Sf{v zk2do+4*=D84LVMO#!9adWZ6{?FmJw?ff2Nk&obrSQhk@b4sGi@+C65S^wlrx1~MsS%f}sGMTgt5LHic z&3~G?wjz+DEP+w~^2vXGnPUml(-4=Sho4Kke96Nn{3FxRDq z8U6;~X#9E#ECsams`RUIipIRsZiaFm=+Y>TI?8`+r*lC?Tqy{KF9Y&l)}8?_s8h`P z`$#HUu5DpML2w^(CFI;xMtv_XR=9Y&X1d+P7h2A(0n42Au55B-CzEEOStD(jW4Sa1 z`fUUA%=BC-pjmH|Sja69MOio`K$q`99P#V|%a@?z)%z3CV zX_*I;xg^fCKNBqet?G(!usBLRoO>Hm?#?Od_ssI2IGcVtz>vjpLsXQcXiKGfj*QP% zmY&o@dgBy93A#6w^)^IMeZp;L;y2nQd7#kUBRa)M53o#XW}(yd{)3qv_}0-;bCc!~{2_wB6qOgYJgqnJ=3R*&;Q(r_~mn3`80RW)92D zNw$?O-2A)jRbW>HdwV&8S(Z544BHm<%d(rcZO2c~1;l;svD9PwVVQ$K`9@*`6BHP6`(n5xkz*#_3$hV>;A>PVoRmh}(+yoSB!l`Rq$$1Ne|ovQSFw(Sk9`(uSR`AS_5Sph zH32HS4N0x6Ck6=nti7u^eb7E)o{vKS14dAL+x@coCaHn!XTEDJN75qj4qX1MMAU_X zM2LS+O|B5^V#y114fs&Eti~fJ|KNmiZ)}9yucJGrxb4CW97*+vS0pm4w_KTBC*nfd z$^r$u+P`hd&j>5$cF4ix0azi1F)Z?y(K)1vEC_-S=ICU`E+2guBOciCFA8W|JJnhA ze;_C6m7;jT$|y8v%G~}5d^0JM0(5lBDD}3scmb-mR}Z8re|_rhCJRc>*U4SwT@oRi z17lJiLOGBLn|q`Oead{wJ`)gK2I^Osy7Wo+5oqCEacz^{r|jitkS0aXIB7&BH*AWZ z1Dm0U%{NIKuk=z-U#joQ(H#iapyOvq#~v7_4B1mc`w*yhvO1V_e?7S{9u8gjx%AAz zdR?mD0&MMZymoPvb!qcc!eGeEup!e8aSXt$9OdbH;RDv>5r|>04@Am9C&J>V(weL) zlW_3210s?dS;uMHrzK|tVaJao43t#3M;&=V%XZ{IDc$(XylGa#IGF6woa88YipW1s|Wr zkcL^SG&JIpTo_LbWjNwbzh~^AIf1)Ia|$N{EqumJC~2(`g_KM{zm!2bLP+UAdKlw@ZL=4vMiRWl6!f7*KBCq z2CyhcwTH;Pjx{X}A5k1EFDho;lGsKO*KD#OE72rw6Vzi^_A%hp$k_^BNJAo$Ttmg3 z6!${LM7XZ7h*8J3NG+ez+v6J?)Z)3Sh&%dR%#~I$WI_O>82LwD5;3lJZ*;pU@-QRl zGXs|;j-@BKJ;hlbM4UOG&jn$=bLWHUTXKy_?i$43gY8iw1U1HPqAg+frK9n(lE0ot zkEk#d8X92t=B}luC&u1}$WIb40ut9Xx)A;2k2hVQAi}ZqV2Bbwv%Oc-5y{XWj;3ta za^_nd$cQHOe{fs@!=%%CwY`KTnOF z%TOc*{K{^ssSR|-=0FghcGgX(KsbC~+u{~s&U_-GXnPmxzb@onE8UrVUqqYCbVkIM zfkV>7X}~aM;t1WkxU-?xe6etYb|q=lTkxr!Yl`R-iYH^9#G%|AyOu$IqF6<#k6S1l z{i@LWiX<=e^=)V$Co;fke)KMLRqI#-n6ocp0(w|v!F^w;-sf_wFdG4|g@{JJO}%c` z#b+eQ>?^ofg!X;FOai4jvP5LbEwKjBUVLqbr`13lLWy5`{vXJq0&7MS#V`Eyy4hg! zwJBg;o>QENfr+#KeionZ(a1OfG2m7lk^3V>5lsh?rGal3{T|)9q=jhBR72v4@dc@gQH1Jl`R2gH&^e~mfw(J_{p49%Gzg7I4(3SS z?pWNS$86R`TzJ)xZ6f4eYUYOgH~-4>3~Qs^xr>cm$EwucDna;2uDB_vO4fduWR{lkFbKTC}}U$ZCK{9iDf5Jgf64rlRH%yyZUN# z^nZ1={E4r=-LWk;`p-&Fof<=R@}`km?^L|gFzB;CCxmToHq5se z=a2yxC-xVUb27-H!$bZB+cp%X-ev?-pF&+teh84GFU7Ogl5d#^BTF}+B_D&wgEXh# zrsG60Kbw{@2X_kWB6ye%(+=-F@_fhCTNax3$O*Wjt|Ods#MtB^vJ;y<2w_2|GAsz+ zr4PGTDq=NORVwR*U_8u#?qpz=kqV}}4$?UF!GcmS`LMBn&PW@xQf9B+NnAqm2}CeB zp8jQa$134=d(6^o#l^Rck-Lv{w$R!6-Hq}W@*z{hD^6IZ^lyZl+)0i2xDRh6D5BcB&xWz3BzY~;U#|rQ zXd~W$X(+>N&fw2{uNH-+=T|~EsTokFp?vRCHkFKB(ehJ_d~;zw1d!NH!IvW@8Uo zT^44j#=G4s?}O1r1mMH8R4dIoqWs=O>kQZ`pj6`FT z*ksV<8l;kDuojrE{-y4%ew#(-0@x;0pE~EFcW4R%w|Dzsh94n3MyczZqAj?ex3GCO#8aZJ#9z#bq!evOQv~C}&k5*_ z{5A7ayeBUA9g}xi6xZVs8IbRs^1U?TDO8oc?UpZ5x(CZuVGshU()bbstx+RW}DL$8t}z@5RBu7CfJB-tero=85}04~3Y}uXtVxm1~-}^qex2piod;Y$J#jv8>K#DVsBXfISvQnbI4yj53_7 zvUwJSRVR>e_)+AGMn8U+kkkJlHMXNrmxstO__l@FO;s8cP;*Q5G&>SwBL^J50RBeA zwslzF2aCGej*10wuuCy^BIk=^k~%A=I~Z8;m~4tjr=IqAkp;*?v;Nb=sN=(Cr#L9Z z{KRh*@~2k?dS0@;+u0)rK+mVrikr3Vc$HnT9?HUut^xfII#{zJz<(XPCKBE%1f>q6 zY$b|t_W&|?OSH52m>*Su*IzC*6Dv+IN5{ti-qP(>}k_Fm-UYQDG_=$C=d zsNDMetwEG6*BNzy4*~C8kLqE<$tv}kz~5vGAxK$Bf|zWEo+x}CtmxSXK8lu8&c#$3w)a+u< zjM9$S^^_sVz%?%!rWBKLDtqhlD$4;pL}CL9JRmG;Vod$H+IKwqkLx11!GLPvu+0lR^buF(_#4z&$(&w{M6m@8MAZ2v(cP;cL zjjNEifnaxR36e4(kHRZymbdh$3PnB6WaVuVKvkw>;d=2M%VXy}l$GE%#dN(#8t?=9 z8p7D0@2=J3lBVdQL(lNVm;HsCO!GY>YxWk` zl7mnOze`~v84`-d!M~$&t_ZVk?P0*(v^a<12`#?ZMJnVMKs?t&WS?H$weoecn zX;EAk4g+0f8>&wk=e+6BC8J&yMR06Z<{2q9uH6iR)}7BH$nTw-b+^B1L=Mt10jI4> zAX6h8V)yvX*y_(R5y%}}G2p027Icze*9lCId;Qzu&wdg4RSABcA%4*A)Bvd0s>(5C zD638!O#!c&ecRs$z>-~$mS_(2$XR-DiBzVWP^wPBSl=XixbghimtpRd9K*GSh+iPq zkX)ge-g?p#1m5)#Cz-`RWk98TQ^lYic4UBTb?ENm?}5*}yLaz|!5XIfv)@?n{^WzH zV0R@(J{(0vUOmMe*GHhje|l8!vU3BycF-vfzF-6?wVLhT7`{UdC*%U`x;sXQN__f) z#GzW`SwGpVm7mPhASL;-n33<>uk>eSrmjK0k6)_KE?gn?h zt83n5tcN3gIN_iXbUZo*Ji)ae^!i$wN*owhEv4X)=QjS+(Rbu~vtYxdXa5{GAG5uQ z=t#|3OD|gvx0b1Fy5rGDngBh~Zj_#W%R9+jgf#S@FtPt*7(5pnuJt@s!`@pQY9u zDyg!MY$K2(`0X5+yW23@=~vI(eHZIY6+)hR7@>7*2y)QQY1d8`o_XB17Y|F?T~bpm z@(l6|{~z@3n6|Ea6n>9<+$XoneOk3jQCsQ2#p2jW>zZiN8<7obG|7&DZcZA3($=QI249pPoUt;!CMASGtJL33FgVDs!s(|8_0&>#@O zt=v9K*ocfEt1IN0nwemyOC34FmR}f==FF6_(qFrFYNaId-X?(4L)3U%NIcCTA!g|y z^}cuOdS0@R5O4cWYF0PoSxE1U4VH*7faE6+eZRm;MbtWH}ZcKCh zg{|)=iIFX=Ib!vF%9O#UE8tR_FmdU*U*|eB^W7S783~5Rjd`HYHcgR#pfej`e;880 zf?>h2yB*1Zh8-@PICUvV5`$;c>>xCz8=>p8vF!fvnA(>~awo`Flltz7J3f`9h+Ewj zCQX!zk_4zaj!=Oy3?aWztZ;t&{v?&fbbcvn78rGgtBBn&h6!xKyYS09&6in-d=-Xr z{*-13_gpN}GyD00Uo5dZwlIqHs<3f%FE5h+$j8y1*Wz4%?kS6&@Q$JC9jfIcc4dES ztxaqr5%7R@aV;^84*uUZhYp5nJ=An#2z?&{@ZK;aLpXk;%Bx?eNX&%>wj6u63jAs9 zAqBr+5&Ok1yh0e1+QW&mR*X|<&Qa?Hg}sdJkgj8%!5w3krlVr$lvmdEXhbmxVpxni z{z`lM1Re9qy$?g`>q6WCT;dGsTbuO`toe&pC8+`QlS7a zs|Lw+PaL!(Vo!n%C7}J+3>xsD9NY5c%JPye>K0%sEAwdrL@l`VQx{~Bm=RBR;-#Rg zVw_T^Lr}uaL1fMD(0o$NL?>4q zjL;|deE=%04@^Iq9-W`6^(M0cGA*6J2u}0AK%UWINxmSgHcMcPX=K)j*wy_|X zpc;v9;+96waFA0X5qCr^AHGjw!P7wT0vtsq<8$JN_TYS!WR+N;X3Suri5<-)4dJ7i zD~Noc`-Q# zq!tGwJT%2QchxOH57U+RXnWup@Y}X^;s z(}03U;oQ_*kv3lCDSnm4kxp3!AmmM!KLE|M)Bt>UyFjNC3?jxwH@0=8VYgf{qvbdi z+EdtTpv?PGRrykN&(Znj&b4@5$Q*b?SjM667hRpJy`7XycqDUhrmGXn$@fTr&xqox z5aY>CBVZ(VB5K@`Kq`391wKA7Q4X<$Myw|BVZS9+hg&%t0yBy;VNy}wlc}_ z*t*b`wMqT0rU|HPEhjOm5L?HZKOCjkqp$(>W0b|L1JH)<&8kOU_MymN!~gZLqnu^! zLO(mc=*_G_sdL~AxieVDmoKl9yS{y|;n{WBX?H+ieGL@X@|B;C9;L2V#u}(?axJWv z0cW>MIYVXbZ1lDG;Z6U5QjnaJD}oV|o#Zu5Ofj^Ft+Rq^aBm8B6!9GY;b5ah`f`f^ z$a|p0=xUH4*>K4<3>jQ9e^We#A{#8h+=F0PTl@*TDvA~srA}^w)Qv?qZny04SUnl_ z$xUs(Ds~MGK$KgNC-eIVu*TV<22vL)W!wl_%nkD)(I>S*r0P)LekuZxsrta=s(7d?53X z9%g>N^b$DWrg!pcSfUuCqy86j+u7nku+qNu3a~GuSuqFt^!wLC7iYMKpnSBuu|TVe zexlj&1NYaXKOf*{y@Sn0kQ>jOf=y21gbH&h+cOV*8$QelB2@+J90*`{!~uKQS1Eb+ z{7Q5)Vs5W{$;+3Ce`f&%X5VegopP>((wDo(RD>pNa`cunAZjNeud=3?Db}ITRTsbi zr3z^L($rbTTQ``ja9i)&6NvGEA7yn2+Hd`BTX&B9i^pCWFXDdJ1Q_%*pl(AxMI%8p z*M+cb#f5cw)~wfx5e#AdfIqlXn%fI1ikDnkue%vu{`q;$@$XMx)TJ0(zX3^EJ zGGi#h<-e}(L1S8>+^MQ$+r?(xicsuovIb1o{ixg1FTx3DDuVJ3ZZDRfV}-#TQe~x; zIxbRUdHlK|n|q>_?3A^y&7c??a;#kUp3v=YaO|6ZcMJQi3pd>==%E6b+tq|aUF}!& zRMBTMSd@dRS>e3jt*wES?NZB`QWS(mD%ok0^6L!_7|bF+yGO;#{E4Dhm>)0CkS+D4 zmmi5ECN@J#=v|c9mbIO);nmH!ivH@BAwp zCGO0&RYJTERl0EMKmQu{^GBaO%Bt-j$hE7HNxE+k&&4<(S2Zf+y#-%@W-hJQwdyp9 zlM-s(!2XRk`w8 zmbE8H%N#QdD(`_qMAqh!mld4{&8*FLL2T3tGv}PuvYU`uoK-9=qW){{M$MDH^2=hi2Chf-DF})C z4%-lU2dCNb3xaDoUu5%~%gCQTaqmM0QgS*SEN`D!k%e7;Dj7BzA92ord<4?qwa0Yq zRXvl~5+0hSK|75X3plzWFIxJu2dJpm|ASt7#~q|* zHy6)K180R(^4mUW{#-?Qr4P)QtlW=P{|#1gcY2wXS4?i!(Y$^0wUZqa(OeJUkXM-u zp${%SjQ$@4lQj}Agf$?7sjvTw*b~oA?y=(<_5BU&11L%q2{a)-A5<~tN5PC{wN9NkJJ%nenH1TEno{hg20CcZL?fdoTZU>FeC460VY1iyzeltg?f(v0oxXt9fZJ^alF4Kg>W(IB-w|8LC*{W|(s_;)MZ4gv>e=tz ziGfC+;b?l9BZ|Y9at!9;LUEveaDSQ|uOkWfY3y2|H8#XdE{QwC5o%{vNMnECPBMbTg%dQ_LI_Us3&q$OV>W z8$Js)mJF%Ob8RW;g?riyUq8)Wdm0RmQrNnq_9U)C^OYR_Yfu7#LF6W*%wub4Trtjn zjRpBvigV!ENt@rxY5lF`ppSMBEN#M{k2M;dJDZFx4nS)_Q=MfG46_9LaZ?L=X6N-^ zF;GH}gM<^mTQ+~j8}pK0D*+MH{nGYUP>QQowi8RhXU+hn>%~pl^tMoxWxm=zcEglj z&8ifLi^A=NMPW@ncl0w3$1x(BLZK{x_U<&mL6D$Y-+K|+(uE~3C8H?_lk05>XYi%X ztJ!%PNX8jD$PylTe6p|Rj_9}F;RHA9fJ=Z=3e)Wy3>3@cwkuYN+-{EgryZ*>0r3e2 zUuqB;7mifrwVEe~!HA)-NlDOY_^u)ZJ#Y&~9tKcj>)sdBt?+`q-TVtA018owJXshD zWTpH1-W3O@3Rlp=8nE|3^e=7KBGrb7uWdClCUHljDLW-2w>3wzCiYGHT{_-x1PfSJ zl6mG;1n<-PZ=?sK2maDQrQoPftc@SCfILJXeVS#+n;{QI7M(c zSKj*BKBB(C;`dPIpEwX)YAq(Vo_6!mdAy-6*>-R$C6~1wI(Wq@eC_dZ#HvDjRVvTk zg(sOHMcFZ+-=Yj7B_`%g>Ju`bO52+NAZ!6eDz6SDCh_D|QD0)>>d`P5T3{BvR%-b; zUwwPjw-0S`jHr@(6xl)kvWGIs&6M?zPZ6(K3{%(*h@rw&EegH=p66XuxBT-YU&{I3 zp{Wrmgg`FMv~&X|dRduXtf{W`k4*)RJM^eAn*__TQ>RII z&!LXx={~dlvOxIH+H`6TK5zCiK1E2}Nhl5ruAFtbSbLfTQG9c+c3@=1mWZm&<`QUb z`Bw#{oUl=+>?4(>|Kth>d{bcW1MbSf`YgiB5taIS$h9X$oNYc)PyUUd21E806HHHC zs>XmrOfCyVq8ADphV{Hw6kiq0x)pp`wAPvF1rUM|SG5@`^LL*h)uJvZO=eyuWv&(& znDHw5vBJ7V8$(u>{tc3ntNjrPu^eIcy~HgSDCoSNspkqF_g$$^492|+G>6Dwu_jRz zO!VE2i%DiyX3^ku*>wH(+{rKO`0uqbmy2QrglL8m(~*6-mx<>Y10XP0Y`fXLYo zX8QXT7xUk$i_t*_IQ-YwIO~NZmg{Y3KS8lYOQL;OD5B=}!Q*lVQj|(T7!3JB(|2Oe zT!f)Xi2TxPxZ0)oAn11^Wt-O4ljCJ_DGYlvR^IcS)LJL)m>!AR_rUKh!VIIJa7y0v z!gk(a@Ke~ac+hw8!qq`HJ=dt06(LIh$`%};(Rd(0iAXZt!Q039D}1Yd&zphAxIDH5 zfId9O8Z*melcC8dTu8@j6 zy-dLni*i>L44Gp0PRw6^;qyL*2@c~8jn}@d#>SqjA&lK<)!Ku@BAIIyK)>rRAiv0s zW$JHg7ca$!6~Gb}RrJ!&`sb8 z8E@Q25}=SPiuW0Bkc%jbOpCl8O=2)S4hUm1W=1`DX&o>`maSF%*>~O!7Ls z43b@|wR~TOTz+gInU5D_9fC}>YOg37=p&dfV{Pu)tb$S$nEDk4@7HQ zE^UkSIE;b5PDs)@Cn1IE^kC68PL#yB96wz&1lE3> zTczzJo@D&|o>G9pcd*{my9a^;sxD)ZyY6mtk|Osc>wWaeUP_^Clkd=dm2req!eL&} zDxE1VxtBCzJ2?t}>U@rc>Wiy2*2}+&LRpsOLpoRFj83d1Ms=Qgwo}9%E=e~l1tU&n zb(o2<3`y|%zVY7ESt-q>m8>@$p;I69tj%+3GNhI8y1X=;YAWOm3zw0PPU0@Q*~vK{ z8OW((yl1EvWk`#?jYERF=y5mgOhNog1^9J5GB$I*h-!J%#8J?`QK=A+T}ekLegO4T z?My`-`L^EWiC3gzN9>)KTw)SMe1>32ux>d2?(82;clwWjxX&a$a3)E)JBc!jzs_El zc}V?NdCm2UOdrTKk#5Ke8}GplOZ*t;KeuAjxT;4znR5P_#V+G)s)pss ztLhV{K)b+-5-oOFo&3H)VCmdc+QxlLk%|&Kldp~|*u=r{3Z*N$6vPf-?LvkPk;gxfe=i9GI9-(KX>gX% z^y%`)3!>sNOy6MHE)>oP>Kb1lMi-~#XxhK)+FAZzamfKZoK7+SR%ta8U`_3PK|%L@ zzPJ#u-d8Qyo>_hqSVUu_?rT~0gVrOb?lRMh+-zuk^-dclbs?q7*IjSo%6u&+J5*nRRU|O%KtzOXR zj(H7C&&73S32-~>g1KHaAm2$$(Mc#W7w;i-kg&aT;ZGRcKlIEHKvR~TM-9wA65+si zh-Xx8_9I!E3r%SrVr#^&r!=l8Nhg3%3IZoFh!%F_8VUTP{^4X+rgYSR-Vicj zX6+7+MwSong3|L%U_Y~Z88XoC&sOs`#h`r?!VAD3I@oqIY5}D&5UBZbydf~PTPQkJ7D3-rO*&$WLKi5hks8!_A zi#!|>Cm`-@pID9SLsXR|L9O>lF;y!DkRP-S_c3G!))G9uZ6bqUM;>N@%(5SOAS?e8 zYU6te40>;z}0%JVpT}a^ixrtV>6+&E}_QcnIj>roq+fvR(?R($vc@d$6 zlRrig95J=b$%r26ncIFj@>kRcL{3`2Vklix{bewmnFAUfjn#G+iHc!U=cz_8)8v+J z7ZTrmqA}z_>h_nE6%E)uUBmpIQ~dk;`YuN9>0sHLjUwi4?~AR~pATOjS(tHVKwTn<%?7I)Hf?6#l8?DoD}1pgecE(j-+t>W%DnO#PW zZq~VM?XEcA@w>rcVibm{4_KQt(GO_?`RMB+=oH#VdNWktZR2-oh{c_~P8U2sda)Lg zI0SiisJh3I_+P8C{_QUY1|0@yf)L48X`zXMTYvEwkulU7un>v_l9Ag1a*$q6P3u@? zs>7R^&i-Ky*zS-yCAT)4LbFFF)B=J}LG^cS96Tw~4uk;R&j{|(@IL1Xtv}aWFnjL~FUb)3Id)cZ&98~u1?uX6ba;%4 z^J1=`*r=mg0hju;8Ei zb+$-oJa`_FhNuP7KpbC{Z6^Jck6m`j_Pc{I5tFcd7YFVNd#X~AHkxM8uN?}n#-shLr6xnA_m;4x)b1Afyb)p;wU zNjucCQsMsbr5xYa|Ar2lGZGR;kv`5zMWbaiK~=7E{?+681obtJ@FB;#f0eN!w^Ooi z@9qyguQe|jn*&~|N#9j8%u9`bWi*q`7uWnX6i?B~lfvGJG)WhX9v z9+(qsgzXkm*7a&Ha--dZjZ;`}fYtY0ck;fEh5l!YJ?x$cG6-qA%dU?xcIX94J2ljL zgIb{{iie(@-7|ISM^rgXE;Uc#219e?JZA^CN>^ zQQZ6gh=ep01;L!Oxh-G|@CyXE!eGAoBmGufOML-R!8H)1HNz%8OKcw1)br3pUQRsr zpywJyS+jSfIJ6rbQCljqU+mms{MQ-0H_#sWb#1$VmG=fVOF?C4Z(txpBLih`b^OGFe_z4lI=Z3X4GH`=T0hT~lQOJ8E<%^jcTB_hP#kVI4eN;?02g!ZX->AM?T zet!l%mGZlAR4-}CUURs;tE`cl-SG?u4>hHfR%UtALX_~paz7SmzVbcO{OH{uPS_e4beNxrTeyn?5rD&oYpJRgJC^b;0MHjn z<|!8PM!Nz7TFZx#Sc|5F@#3Pqf-BJki4kWgHQ$_5egzUnoDwc(-A?X$T5^Rh@dz@W z8)CiR$41DhQPdwfS$?~+c;TUAAXvmtYa7|Ymxt|LqcjKf3GxDL7x;DUn{qWW7F%A* zeRbJu!I%!Cg26pn^+gliu=lKo-7b;mfyszs1@HuT9)7M>T(8XP`EcBl%cJz3c@3gC zc&OI@)(NX%zCEvn!>&1}%U+M-g?{CTLS7yiD0(s=s6;M`-a`~xfB}56mpzxBN@xYE zR<@g=wBAk}^m7!!iw{l8{_SkK>-dXibBb1}pjAHZ#ltPyo#`JAZ)oI&PD;gS)^t2#Pt2Yb_dPBexkq^ zeK(DCC%e#X-+^NEubu(*`A5?Y%)}AS3NaL zpNbI8IYTWQu1b3GE^_c$F5uouYE1bxGPtaSah7ajK^GR(N2{m_KigVo{u~q5(sLXc zzYh^_{$uBJvETBsat(SU|^-Ia1P5d;B4nb?K3Bo)+*}?%F zCY#iWY~I%t$8u}Ac-%Z#z0cB%BmG$nMS%-G^7G6Zuq8-!64R9M%roVGk6vWEf}pE# z^d~M-;exZ>mIDv^xdBjt%6@5Z!4wOd)Bu?RZpZ-`nh=;t_|!VY`mU< zqd-xBdF}cjCigX#BLuV6YTACgt0rQrvv-sXsyjYS;5P{v0kZ?nLC1{%(YQVtpKMti z@a|eya+@G!nBT_{5tA>LZX*GF(z>vjV-Okz90u5WZE1r` zzTjmKm_Ttj{dXN!Kp#Bl41j0uP?P`1MgDZvNK<^&3Mf0=QSw9UKOpM!^3+5UxJhT zI-nA5e*Q@JXnte5IQ_5`mfVU0r7D~2@QM)mXd z!g)zD*wNkgN!tHjpjrIAPbXSY-5dutK&e>k2!%saU1Cl2zX*PyEt9xx8oXfy3mG43 zRzj?`+Pdd^%0*J|u`Yqx?ulWAe&1f71=tH=gLu+1Mdo~77e0gKGzF>8-U$27J8+YY zp%UuAeJo^)lL1V;i|X3&_#0Wz+j{^t$y|9Ww(4c%pU`-{0IMos-!0B$cRUf z>bqVwI%dbQR~4b*O0CQ%&*TYdLWT1Qp6;E@X`VfrH(xpdI{|Nb<+my~$D3 zR}wc4-*>Vjw7}XEEOt)2_01f6V)s^B#T;Kx>*+JjL6bOQA2ElYN+^jhC~VByIP4Pa z2IxvqnxeLLT<&)d3pMj|oXRXht%_`8dofP~2uoe2TzJ=9r^Pdh-iPH$6IW-5KCn%4 zsNm#v-1m)2`!_T>hNu8ULArzd?K7OVqL*e%j9(a(WK`B+VDRUU|H)xWqFjF5go?Gw z$y6}KhKpbKl6sEMxiIpo>ajQa@*t*<($5jgonMXaeKnPs_rrdinIi(Gud{JybQ|k@4W7L*mY;s6tBR0oL7!`A-RE|+Ma>^-b2$93)lv55lOF{}s zP0nXGD#ubzNhmrg_5EG>D~d&MJZAJL z9t1a+-|tb#x!mL@$%__;e#fK!xqhU|H&Z<-<79TbyX+~*bMAPxllwlyUh~G4L&`){ ze+%BNs0-5gNT_N%6VuN9S~)H@=;WzFyKYV2JNw?q*?eiCOwZ0C$6fi%p$Q-atoiMc zX)CmE6Q6iz?v0A{dXn@m7%LxmWyADOkLD}yy{o9O^-CHC&RNQ``fnY(w|@G=Tx-&) zDf>SCpv6%{-yF#e8PRGYHGv4CF0Tc+IC!i2SsKRR{mN=zUBK3}ZpTac#b)FWB4K$;iP8k;fOYiB2E zg7kcT2@>Ud#If<2ofCA-Kls$VN1LtmqYJXX6Wkq@BB@RPS}}W2u=#gOyS@QkWLDg& zI+QwcuAGDKbkDAiwG{;3!GF-r?9c|u_f~C(Y8JmjS8yl2Z~+_&bww6I^#t=pO*6i z$)+G@6DHrUbM?5wrS+XC+}yflHSc?{M)O71D?9)P+*zkM&C~Pdb{TKr+~gd}3689T8ce#37{_k_3~w`Fa;Xu=BD&#nc(o zW-?Fe>oSUwS9doufkwy&=pu?{KeM4+bsm487WWO>hvX!80O*EQL#?>G5Z_uN+xXnt z#W(zvdFdM=MW|E0b~_FV19;XEDW%ud(I;{onzBp%v=bqZrrRVZnGBhyN{2ONjR?_a za3C;q0?2GhYMIV-2sTv>-)Y)mS#|h8r~Nz|Oa)em81h8QH!J$c{;aj8i{J!}%dNP1 zde#l=u68+b9gyDhY?K-uUU)#C^2Ts!F})=WZ%tGgYswm~p^f;0yJ(uRc(8SeE!rHc zZ?;E1k0zTc-7&@@ZcP|-qp=0#HwCit;&poG$7Rf*AU$_c)+YvzCYL`cRz4=Qk?$8c z1ZiA3e2?t_=k0Mb&)+dJB`hv3G7(tKyp|BS`6WATrz}hvif_be6oSX6XpmBB!Y2X- zSfze^y=Y*;vb9i$MWw%fRT$W^GA+4GB6 zfEZ_;sS5>nyp~OvOhw{rsya-?CED8_X*|1-KaNlSP0s%>bQ(F$x79uC`M`U^r$x0k zsxVWz0(4#y2M3C=`2C?5ZvPV)H9QMe3ZHR(lL#SmA+?jqH{DuEp2bdp5E5}e$+=(( z&HkeYC zCNzupAOBLxa#jr&qTzF)>-~cwnkrhBOA-M%v-OX_a?*)1i1khi{CjkN0HQjDx(0r= z(pe|}tsek&qS}Y;0qf#pk*;X`p7u?~PW0I?0S1d#IbaIf;>R~arLO+84pZ*;Zx-4G zt}g3;Qkpjl$X03aSPw#));HiXV(*dlE?hHbn&n57uyp@6LW7y41^g96E`FVPRXSd0 z6zS-8dqm3oV!vc~{fnlPBF2E!Z{*03bWr-JROY8=djV0W4i!a{o7rJ&CwI~nTzZb7 zPebOayUtNDLqtd%HV!&O2?*At<5_S~b#jH;EnEzEXaP3v&OtxQ@ks%t3p5mq)IIN% zPu6=RZSLQwaaDQ(rrgxm>2V}5+(&_LiW6_~AKRG}69WHCH#Cxr|Ue}>Mx&FEM8 zWoOS^a!V2hlWPxKQ*X>d?tx>PEB-6R_)&zbL9~YGyjHlxI~J1wN}?s~zt*6qBvC;(~l=c2WrDSzP;RaGxYH6C|pyh1$hasNA$BfG{d z(YsRwI?S1`k1)Hbh&<2O39b zy?q2dKi&$L4xjEN#Dj1^<@_IXFnzL`Hq&{Oy7Iwc8@$b= z>rYQQ*AFXeoX>XioAuKGK{J?Gw)&||G_?Y`HJEy*ny+0Q<~#k){U)i@Pw1`nN$OU@ zB0GFPR6C}5d#jl1_7+=ls3pCHz={O7K4xC8_t(`R^(9BV>e&_qn-8SJpH9XV`EXAtMUj z{{2y|1r%>WuKUh3T^YWRVDVn}W0L%gl{dU3?4#Bw@`v9Sg6)Wob6~!W-Z9g~1gCPh z5d@m34%OdPk23T{lg5z&u;(>lH2pD_3NLqj9~5m)v9LUu**~M=k5}6Q7BrIgJs(wr zryG{zZCatWlavP|N%P_1Oi1<6X*r9*S$uGnGqYY5O(YQ%+8`q=p?P@WWojGERmpp# zU7^Bz6ow4D?f>HOEXVSMoUeb~`UeLvVtHIDk&_vf08N|rM5H5#C3t1M~NjY!D)y&b$4@Y{}Tx z&GLT`ylRDYD*S+zpFPAxNY7?*SUckGscX!RJ!aX9evTmK*6AUsXIZdolt*uOtWWL` z_f*;AW(7nPFin(P^wdrb7+7C52SveP4dlKuyUdch^wFV{q3|lDj1ziN|pA}2<*y&E=i2{H}cX`dOUZpNH?V)UERumS5;m}MewA5eGhvUjhh z=_cDT-k?(zl+h2)H9`Y7m$qFz02ZEf-K>$VY&^C;OHzk=Rr#OhJu~bM{bieXakmdH zoE_Kqi;2>1x*aiG5x7CqeU|CJfUGp0#(GXN5fqH}iHLWp11Vp-1n^${9ih}2p!(~_ z48=@t=N`_9XzUx&6@+Xt6790L$5S$udvTE4^UH)$1j+R$Cu+w*+T_08YW8aE%V)bq zibcpLVrmH*i>zweM`*e5Xiu*^Rq7#K)RYKsg`A(lP0JfAhRz=i7W)tA^eE12(7a_E zmn}NE5Oqp5@0#x$8+<)1_3<>+at)>-8%5Z_eXCrbi{jUkm|v;ofsEvKVhGzQf|%q* z!Vb_d1G84E|H|BQpjiv@bl7;){tOe*wc=wO{#J@QTuR;ml4J>t%{FviMjPS%er(9QNeUx0sy_N22IzVwFz!!pJAf z=x^WWMFqkPT3j8q1t`LIF=R$^Cr!hv+!vV0h zE)uz>>Nb0KWeJdzNF1|Ri4N*V3#Z?SccO2(D@VHjF8V!+I5)>1T|v{ok`gZbx6P!{ z_>Hwpj--qMFx8gU+z1)APa#|O4iPEU_hgJe&_61|L<%(rTvi)bYA?NzP3Px|&aTuI zAvw2vJtRW_LD@#7=A+qH;2$r7SoH7e${XZ5?zyQGZ?Bd4xcX(^kpwI6|^PfZK zK}O3AD>bdgFm7fn+Y~8OK?B0aQ&>j`EotlkXiiw3o4$b{LY&d*7g6s4R+fa^)L+_)d2B()jCmk68pa_kX1v`01p2m0eRO9D zJGcN~`2>_3hUX91hxDZb6QwexhNVc_G7g8)MO#2UCX${WEo^@6Vj_{M&2Q&o`|7L^++icRJ_O!V{DB@*BWXTmCk}l&`%fIv367r;f!a zSSB6Tt%+#fvCvMC8<@yS0Nkpf#v z=l*`a`R_8XO8m?XzXN!AM-eGsl>4nbO|2Qvv%>a*oZdb8wel*xN>}ZZB5YEy2`?et zwhHwQhoZ~Fx*(nw1cdn&RGwdl3{Xn}EI-Hq##o6N!GcWNCKkOX(hJ7L_>jU`SFK+w zs8G-+Md?B+ZUV_2QbJm}ftQgB|ER&ZE8hViV^EcTEH%6q>X9#_s|C>zAdvzG8$MPohv_z z6Bx{|J1yLAfS7r_s!i|vhb>PbXAbwjC73n4&ud4`quAi+*_UX(QO;Jpe zU@N-z&;-Ej4c&PQ3)opwBjR~71A z^#qiV^WiU}iD`tz0^tf=5l=H_*Z{j@Vm@mvhFFq%8bl?cSo<4zN4}u&`6(H-C^@J3 zl|v~&DDP|gP=P4LI7*C|=bgQQ&_914BHJ!_MDiZE-{|tm@#f{Wi7P19L3}I>Mrt~6 zCnF)ca+a}@3tf4_QFPlM)wsAq`(9?#9m1mcN747rgTy))KVkKnyjx-|Jvecgxv^a@ z|CtMt0psqc)^7fYcX^u&c||$WuV|;3o32|=`6k@q6YK#~HO_ZrGT)*U_v*joLHeJ~ zJoE&B9T-qL5AV70=gnr>-u?khL>zTCvQsfz;&$3q6(mi)L70w=WNPE;Ovbft%Yx4R zW&eY87nxlhc+h!2Y37uf>N%6;3OjP6PAgYisp_49;ZNH$5^}yOIJ0`+o;FB$OQ9d~ z&F-8hbltQ6(G&(Olfkso7J;3s)TR%a_dEBjeO$^ zpH8lSE5f{zx6H2pb)nQ=nyJl__irWULZzLG!f9R2@50bWeOxpt#yFFHzJ)eOSq^nJ zhe!MBX!`Sl;++|;I*g#C-ot~r?L1H{BUsQ)_ zYwNa4hrTm%$q9~iuutH<&p#v{a6~)dTcxMGROpWTNgzYVL@`o_QiZwad&G$7_;R+o;2|x@!x~6s9rcrj(c%(uY$tX(MwY%x5Wwb zmuc!SaSq>sEFC<)Nj%rxb#HSsSo@>}b_2v=_VNq0`;V90ezd;)>7XNL&`x!7V=KZM zio?>p;+B5b%qfW-*pB=#)%>Ff-RvIhYLlfCo|ur}6{rE(%fv!DXH1tbZzGIRYGY;e z2ArHbu(So8? z{wk(ydtj?rql;o^ZyyZ}@{Y}6DG#SQ%+!AhjP=G1 zt5Q!OK+R1`j;B&}K;lpJJ<+qBJR=b+a%tj@?#U8ygUK~oqlk)Vi^sZC+MHpkZq>2s zGmXvO(ZMSZ)S=&odtas)ZVeZoUz;4y)t4`0eTj{{kdSY*z<3wUrnJ-{Z`B0x_db`dQrF?wO?87uSqTFY-F3K%G@-l#SXof^JH4(RnFRAtkAx=cT66sXzhq%Zm)vk-ngu3oUD8)6jx736oB z#ql?4b@@Eb2Gn_A&{C|tr*!UwU=b$e5vKK)XC(-LJ>MY?QwhcLXG074&&pxNLWPQ0 z!F=G@vX(06QZyF`V`}#-6wbf#tXfTkq=L=)Gul6em2)_)^_Tlu((@`_rZ~K_%yqM~ zJ< zaHphylKa;(>jur>4$M~oo8Zk6y)%saDS0)~VR#_PzXc$nXx}s3q4d|Z%Kpq4lF|i& zr>-Jg1B13~4(g_slVYcgM-N!>H@SccusnGoB5PYB)!S_?(fJK7SFOb+N$8IvY_Qx} zW66GN{|juUm<2Snoc*po7TTWp+67U> zjyDgVe_(#Mly?B3raveITjkINIgjzI8peoj6|-1HqzWm-*IooY$WrAyG$NhB72+QA z#V&%P5}(1~so2%^MBACV&vT(GptgCw#*0-b*b#r(pZ?klL=cC*dz|!_ zS^?j~AJ`o7pLV9kT1eHjU12PmI@2AVq6WAjd#Jp4{2uO_g)+_opPHBd|0V>z4MXX# z^C_MAfX#OD$9tfVgw9ZB{oek$6sk{HjT#Suv-ZcfbVN%&{saZSmWgF-E_K!vhcBgs zsY6g{k<}^G);h*`t28$fAw28t>rHIS0cjXFKh9S5#(7&7U7X?)A+W`8CT)$)4d7A z6Qiore2epL-zv)c?y`*sJ!0COBa<_NI9#%D}RfL_^ZB?Bb1{$La&H>MU zwioe#$zII4MfMl^bGZrT5n|HIM9(v0U zhz`TPmGL)_jO_j2rumPVPUnI}<+daWlyLUN`VjM%i`j4($&xsjB|6?!x)opr1$B zIt86Pz(+oNqeD*Rliuf3& z>+rC=y|l!%t5Hk?ajTj_wXg^l*9|xoM#tI(FP&#MU<&Clkn3S1m<7}ZMkNKBY8lEO)>uu2-sBTMEkmVLQW|& zL^KUn#9NxsB=+D^3qm|^BGD7dzs?ciBF5lK8S4z zOjYyo&RE%pQLn{eHSqUV`Zpl-eUB2r5I4VBN=$AsZ)hdRK$A>=#W zP+;2g=mfG@eqi1?Ze)Rugn2$MT;>vDpS_aCY@PT!Ng|d)pUQ&Fh2r=N*sy z#NiY!(IxCAY8tv1yp)e|{#>H@gfe-jwRY#c;53AZ^9TDtlLJfbt3B|Fmj_(L?L~`R zNX{}3EZsniR~Nn^zZJjshTYzTpV4cCdJcj%Uq9+^u*k~=L46YwDbTIGERJG>2Ce3R{H2fC0x~ti_#fI zl-#hmk{{7pXHijRw13B5h?fve2Y(<3`+tUbNjW0(zY}j%haLCXc^#+S45$SP4bJ7R zG5q$_wOseLN1BHPRJlNc9ud3q-QAlxyT3N?1P^i9?e*s(z9ePheEy2y$w|@u9#$tW zF=RlwdZ*L*M*77#|0E&*9#mn7Sm(blo(GeGECD3x`Al%4@3qhg1>N_x@@|gJ`~AUt zelFs#hl{XGr!5r5(hhC+zHy!<110Y1g+DHci_WHJ=LF5%OdGdbXd^T%koe$9mVSxC zNY*vwu9|?3~&3%xl>#o3zh&| z{6{|Nug7mSvOpxrki`;~#LwVqhIv6M4X6B?xm@cJ>r+n?kHXk3@}@k-pb0{~=ZkjkM_&8{&+YrfxZhFv0RRZC%`~1V`f7G$ggoBuTYeJc9Ii91p-gOSJ!P z3$%aQRudBgqE-V}q_`sIFY6?_Jsw&J7D|Gc)SkyEyXK zB7qu912lSh0g-$1JtZKxzWo>AX1sAs!C}oDLxgmdQEvshSARI{&a?qO$Gxn>osjUW z{GP!#Z^`F+uti7@nZt%ao?&uX>AMn1`e`oY&Uq;7zSHWdtCvZRGd;l&mc0seDRGP9 zGf`15q6*BFjdt$ua-l`{3oJ>=rNU^5Fl5v#*22%k=un)l7tAb2R?0ZAhqn76_Hh+I zgP%Ir2Z9XbSSanJ!WApyx6R7u-%>|bKrQBGRpzkKZN70aR^m<=k!}882o#ZH{==P! zT|lrdj%xA*zI^rpY>P}9yPY!M9^m_B!}VdYtGlfIabU&0>S})?Bx2tcg;kp$5g&?t zy`}x>=-jom5`y%ck?9eSxi~2UaHNz`&b1}WoB1|ZgX_1R-?)MXQbXNv*4rb+<<6zI zSjJ&kT`#!iAy&ZTU)lWSf(Au`^!-sVdEU8jaNB?U*Z48K)crmx4)o`?I|$S{KiH4T zK4e-2RVo=$I>tYc#ng)0T|JgV#`Zh_Y9%pT)bPKB=Po)TqFRLLi~mXdaDOuJ_|5gd#%$L>HUIQJwq(B!5{uxKgn7v|(}= zbacRH#1iOH?whAdW~a0Lgn(_PKT&F!TOy@HU|3=lHdP*MfIhSypO*fT>uvjKFymLcu185S?7JYxAVsmEZa zlMrlOv>EhpsoARC4-LO9skLEQP%+JtNbwW!Wlkas$hl0j=%NSzu~vfLGrQ;9d zrxRnqt*B@x7RX|>tA~}=3uksXi-MFerL#l0Ve;x&LPI@U71m3{74bfa1BN62S8=3c zg97{T_v)lRz!jglui%4vu7}@FB|nZ@9b-xJf11bq=Plfa`q%ktR zdEr%wL-32iPQ>IM;JTT6#>clbfcW}8pS9j2c@y9yW27Oe06*Hj&xLBHf&bQzL@-fK z3M&8Q`4qfQ7&!h1E4>2tLTQojY-iYhCK0)&hvkzyI-?lf=s^0uK1a`h8q0-L%yY}! zn>=2b2_BGp<%d&{#@>T+46f$+HuF2V$L$VAGq7lJ2-$1vwDL{8K6*%LS{O$H0y;_9 z)m!X-@tZ&I9``3)i+AP%1VIqR=&3r9E-MR}scuy3BbIYvR3U)9R8S z36m|^ssVT-r~C^aa@AV0_sZp6CLSlC(SZ`w4`393uS=G)7Tq!6?(H9uxxIwpB=_Js)(=7RO_1se-59lMp)0SEgR;ope}$drZ*E|cXaFOpZj6~U2w z{?ebStGdkTno#BN%M}2=_-(Mw%X>@+r9JV+%)+nTPzaj;4LDJ`N|ZF;m5WH*jARdw zt%Tul&GX@aaXVFFjEw#Gru_b9qAAAx?-#lYD7sw&YK6iz#otozzi2IzOc@7z?9~3D z#l^>=)AWU?-ZJ`r2al;1)0x5*pq|el1&CZ-$St)}^$pP5=JCD)HZ0Y@Un9z|F{<~+ zaF90+Gp;}>3*cC;yItU)oR*4-%&d?Tu#aL$Qq7=f#v)H|g9-fs z5Ev-)Wcj+e!aN*eoL1I!E1*w)WH7{YR}nruwZAW7HjJ}-KPBYICGU5C&#S;$u`?f4 z6=7W#`7(jxmS9ihs;0Jc%|Cz7ER*LRbDvF;613aZ{7vGN&;OwJ=T4toT2%QD#Swqo z%D|^JP8&9ThvIu%9t&^b4;_MRosJd5@3gk!-`5mhq;xtb!GN=t=jPsnoJK@%X@&$l zD!D~L9fsKoyDgB08ny_})J*=wt0*o4c5hB`Mk0NF`(#~r?AMt6Uo3)s8r<@g*>9c# z3v26}c87XHc1!vF*HiL4j~gokx$AFO0)71F&7Q8$W%fjjD`Xq~E|y+J4*>&8m!L|B zKvF$(5o2wOyKhBPz4wq;tq%kcsC!?qO?(M)6$bhw&K(z^tO(Q#OFhYj+z9S=wiq&p z!R;r)Om`RrX(PzEv!<#oa?>y|oM&&l+XsgW<*^9Da5&UQE2;E8vaR_?E>O{>Tn6v-w=( z{z7P<+fE`NXEJ%NKP27gKO;X=csffl%a>pI?tuVv-Xh-j>3v&L4;we^e(ir8T=V#9 z9BS5mKa~4%l+==nyzvGQ+a&#SCcE{V72$O|WyyCmb|+~cl=veDVVX zGUWLbmLjj8zGJdk#_#75S#cjS3T(L$JPYHmzoIo$vb!A_5u7^GRb~tG1+X35p2^F1D|M3wuZWveVxn`F!pnQq%+#(pWM}N+nJRCso(z2V&Um*q3d7YZ= zKN$QP!|TYKV8POU6KH8maKk(%a=Rby4Jh@2^#xQ}Yp}}cF>{ZNfgG*?yNGJ*o_|C- zh-u;06T*r-v=uguFbY!zd^E-(Mh%NfPR%R44O+QLOc-Hav`hUkOjlP`5iW`~dGUC!36R^w3B;EcJU9N-|aUS57WUWabC@B8Bwmr zIcb?S35ZqTHNBda{E!B%EU&27-uS%p_wzt-5ueJ7KSiIzaKxgaa~y=6Z{PG3Xp)-{ z5tbV=p$n@gq^hw$0^@bNfoS}u6D(Z9`iC1@?{1AbBxKB|;3p0{+dR$a_J39S=I(pd zd5<6dDL|=tE&u4f4eGHO1F1kOC$_lK@1FE-gCbt(qsJKdtG{eQ0@PpcU901L4*^%c z+u?O%-$=(Z_{5-?-rJW233y?;7wmVi_(k!2*qyc^`&?^T`amoZ5~Yh)-$+i;NDCv9 zY&g4IxPKaDsz)5A6CV$K!(X*c)?AMMsPnG=2}|w0wpv!V?#2E|lBV^D9E+s?F%QCv zrd|9((cv6Am1&IcG2 z@r2CWCT1-_K*-w@gEJy+_kq!%ty@4}G&ewLzURBfoxCL#2D+sP`S7-;-Is5DItAaX zj68yp!xobw)uBZb4=4Y}bVikY&Kux~t#?`g`b2x>v5M>&y;E)eCmDl^&nb_c3d#n#RQrWP_=f#rK9P_`Sbt%(LJcP`jW2GRZR%+$ccVxELvMHcm1a z^*`txZS`A`ATQx!kz)cvpts!Y_aV_^TN~yOSVgS%4uy(Nl>Revj<0;=NM7Wpj~d@W zAxmmIC{=j75$UvNE&rurUeGON6r|yIKWRTSX1O%8Pto3A!Iy@rY*y{rmtkBsq!=TlMKE&mV)oedUux|F&DIQrwZB6W2+sPdX2@Er4o4 zd>^MeO(YFliLTmRb;)RUS0=jr2mvK1&tmTdoMpGNDVrzRrXGu1Nczo${+^${)1)RA zAVjT_I7VNW8C)Sq=M?eUIj7hdW53CqzBG$n9H{ILL9Ed+#!2hkWqTdO>E_>Jhi$Wa zqJRYB%Dwh*A-`JTL9GjUN2|Mt18D+5x@vTR7E>#=z8AkPl(evqY~+NPJy!*9#@q`O ztEHXfMqEAh@7*c)fB?B*7cjBoH_7oHB0vor>MJgb5Ajl;^fSTm!8^H`*;)s!+tw0I z7sx#YPeCc{zjKjrLuoQReMWpO0rRPvfvFJ&4SZ*Pk3@@d3uy~LBC#>2DKU*^rV9T> zB_yxk43aaR2HA?%x&D&bXR+zSpMWL-`PpQtF8(T|_vxP3tJY?P&yr0b_!+?VDY8zk zC=k|FE=|1GMA%G~zxDw>Xzlad$<3u*KhyBQRu_tYJElMpE@HnPdFFt^WB}Sc<0Jz9 zsgrUT4GOe6Og%e=s~5I$al>gcD@5|&=%s7gOk|ow_pD$kOI820Bjt;_V@JhYPXrU@ z8+GEL+j2x&$0NCp$2dzvUyQ~|GteVvneA=2zvsL$tK2pycOv69V$h8RtlV-1AYkK4 zK=zT|$SHK0#!he_;nY1EuHy@E@_tVH_qn8d66!m(vQO-EhY~atmty#l2NDte&d&KC z z2Hmq8`;!}T!6_doCebP(WST;mXv`8&tWyDtK9ygJKhB)KI;0V-l-v{DqOdaxl_|uX zbpK@ebKE5<7W!)4?FBoqEf;L;E4<2NJf{Vz;3A!SCYZ?B8}%DZbbfL7yWO)u$7Qoq zV#mJW&3x!2l&Ibod)1J_n8!;4(YT+A{|SQSmRr)-^6KtwQsr^q=ZaQ-d_ZU8D6|k8U&yS4(;uQHo zjHi(k4>C=+nz4`&ZEXIzMHvFi;eU4G3;u=MzBDAGA1m{PfDUg_5la$!1^{|{PBex7 zZc9G)S!GPctKD62+*cJaX?!^6R+f9c1OhBv{;DVCC6N%E0~Vu9#E}$yMa3Zc!K*Zh zPTm_lobEvSUi+AMoPtg`#>NN}A2z*TT?Z7Xx1osmi@u!`Y27uBhM0oC8BpJkqury9 z;y*^dR=yCZA+4YZCCBEwCMY920EI zpkkp1W(GIs9yTLh0}g=Qmt-ebR?fUaa}gSZnAY2D2Ly48OE9!Xzjfcdr6eO@#9EMbF}KyKsM zegT$I3BcbP{CqkPxV0|E#v^^T!#xX!a~BRZLuVeC^FYVNDR?5vEkCHIP5sYAf2Qjk z2$kM9ASlbMAbzV%Q~g~x$Tzi8kFwH%_bB}j!;1N zgZ1`RxnG{k_iu?_^atfXI;8@5A}^iPP@S9(-%f;Fm{`H&0_{b9=wPd0Qh_3jveYaS zw16x3$b)35AB`Z|<2nK+@HMvd#k(lCR@&l!(>hr~A3IEa{7#Wj%rh^TB?W|mJbTYc zRajOw!H@F9q;(vYOqpn9;AU8X`J-@sNk3EZyT^hTlcXua>X1xkUFc?Ot>VeB8=Qjd zU!VtLLA#TaVO-x!)>{xC4h`niu$y#__+F*a?`w=sz|x&D0XE0kof_;05jSF%_Llz) z-u<)xcp{XgJ3xRCoc`}aYLcdN9SAUA_UAMM__PHZ$KNZg0wJUnA!djVK`cJ{lqr#n zEXe6!D1+1wpkTAtlq~fl1u)Vq5Ta6+x_qb!`dYcpCb&}Df0u$9s?+X*e_cUfhx_>C zhOLK;>du*bU^a@j-bbB_TfESSZMvP=ZE;KE(9&X%^wh?Gd$; zPZg=>&6U`ux0>txIX^{#%tCVz`)P$r{q?QF7}4F?H-JHPdqEj@r{cVi3>u7BpSa3F4^Ty5%cgOavU45uy%F#Yn{h+$HZyk&gA^Nyf+0d7+Y)A{1Au zZc&1%Q8%K#HB;`<9rmA0YLH%-=g;lq?h48`QzVoHI<@dYK!4P_yVZ$N1Da#qYK>%t z1R^cj_Lei={p&JIW%Zv1Z8mi(TKn@Qz8!bi@V@lM055pR^vd90-W}I#)#wCy<1Dr9 zCfRFV@an9%d1G-gV_vr|(@S}lw(YYZb^ZylPk8VR)xgybnZr%1WO+BFhY2{b_r`?|0ImF*N)y9kV5$g?p>>{DFe{ z2iYxzRB2zMA8>hPsK@4bkZKY-Jh8gfWfRe_<~Ns(u}t&Oo-*h-DWf9I#Imja1mAC+ z%SwM??IYn(%YCsQ8m~Glz1Ao0?Rl9JTAh$X*v3m_A*bzU>H%RRZ-MhaDC`E?`Czc# z#LGR&gE5(sU=D(;@5`9w7VEMW$L4f6ePZQXc?OUskn?5_EVVvKs)gad9eAK7alD7e zKG5@RB2?Y16^)j(=cKG8M!!q$M;c96hyDQdRdC@K@{JOd!^B?c_m2*=h%vg2V8Q>< z%^EG0Zm?1A+A$&086Rp(o7hGHI=#KXUWt5T_UFU1E9Xq`i8Zn11y+^39t3@V`rdy{ zlS>q8$v7$;&WA_Bmx%aM7kUx@ zKfhnf-dt%a(|fPGiNAh$fi(;E!H2PbI2Z5|k@{M7ohQ_NQDgQ` zHoHM!dh=nKypa&EP#$h|v$xsN3B}82`SBEFAV=)9omv=nWK_w`1Vu}7TD`PoS{90n zP&HyE7rrPSS;~)9$d_(>0RjOJ$O>s9CiX*YkP(xjXPp!^ouh7Wr)1bxs?I_j=KlBB zt4Wq*@RGu4;)yX*sMNFV7cjv02-@QE_l!;$-HAe(SW&wmQAY3jiarFt|KXCUq!-3K zTv`!U_OSI(l2E3riJ{cJ^!)NS;bMZ2mb|0~E6C4(#PC(MvHXq*ywmU6e<|U_W*Y^C zsxsx(gPv8^F}aXO<7HxAz|#kc7AHM9CzLaZ5P~Ri{sl|4Ieoy@c)X{+3=zDl_eBTH zy2M->k3+Kl!;y_9oJZ|z-RRR$n&Z@A69PnovztW*=}uylGeG-{<1fwAV*-1hOjeco z<}&Z}gtI;obWVHMFYjh31+>`t6^FBR1MN0lV%MdLLtOYqr0Bpms60)bBbjciaK4Ou z;lI+~+?E1-%tDT59?TV?rh5fjTzRd?Ibk@&aN8?d$Z={xd0lg-Xc4|C%py-0InXk3|g#f2neHPox;S zk;H@_Jc3ke&Q3+`#6i95R*p$?yiORFWA-&L=nzeKM8oNQN<>{lDUsd_D+^ES*u7VrGmelI4HIl?Pqe#3$_qgAh}tAllr4nY^^AKegX6_(yShp z)`=VX{x|VS*2F7>-B~zI9Ab;?cfw1xyf4YT9zHl&#`j?YdT$`<%E35qj}Sjse*dK1 zOZXoYlnaG7RHh2YB5JkAPARz`=B3F1Kojn?2tHq_7rSV!K(C3s%+3diV1Y6@jwXXW zoTTNlj8wAQ!tcqRrb|$4jQrQbC6W%l?{hy?Nh(Rw*+BoAEAk*(3t#v3=)Qa+Eii64 z04)^;m>wB@Pjo00Un`KH)^1&>B9oV%3t6#RMIt{aAacX2lv6sOBLwi15ia-|+Y`}~ zqct`>eFI_)z-2S5SmO2)gcxgUmP+EqK{+ord?`kH4VF*LA z#f+_?>}40CvXcif%AN+1GzMeW*s@Dy%Mz6(j9pU6Qg*3Os9vR>_kR8WAIx(9uKQf) ze9!7fRhpgmMU{N9TaTikctb<+eBd4|LO;`7NPkCME`a_KI%wEtjh{M;6mkwWRx#C+ zhnc!=q1NP>S4Rz`V^{4%$2dzk7xy37;Z%_c#b;idgpBcDd;tPFV{0lOQ`DJm^n0b@ z`$>*SH?XB#o^v^=Oxm_Jq5kiBhy7LjGd({DmzN(7+cGx&!CtUx$nNUaN)!ih%#Uk6 zY-$moH&T2#3%gWMr5cdwV)8tVk9oDcpU-R%vtSMJVdWPc3zCIk%fQcc+;%jrM1<0v zygmy|0Az%KS-%Pg9{=Fz@+^8>1j;+!)`0sZOoq}U21<2Ppj_@*;sD}(5qr46(}*qr zHo$~c&lx@=?O(R!+GPTI(_vKHzz`AlZQrinnCK954YqFS@x2=(g+g_3z~ zO|ia`;jK}jTS!%;;0_b02?iKMMn4K?|1GA}9=sYLZPbF#XL0d++6=W7%ViV0R#qku zM1uK_D&60DS%x@7k8OtD3FfV73`<0XTD0fGRUqH343-8Yf9x#^k7#y4FvjN^`w^vQ!OE$;3=*9;FBox5)$hIYS zD!}WZ!tqQC5!s_OUq5jc*nt;p562%T%_>>KmuqSt3J%U@kAqiIX6GDEbGU82>ao&E z-A@Ow1O|Y$akxLucw~o=()U@bQxSa7)F(N5bc;EGKqn?)P}asd0X7j_DC{=WqrwVlSg+0T}YKs zx9A;M+qHJD)rt@rmoivm{t`KxafxFNPyOI8h`-fX(AXEADM6X> zurIe?tpO(ioRFLUj;i{||FH;~;uPeW(Y*om+M!yx;m@sULsfS|o)cWf?}K!ga0hah zbmduuys0{5kG?r1Nm-&-A$tvMpVH+F!3qM z{dk#A&;F!BPJZ(*gP}ibBt2t!_uq^MsdYDQJ%!Zdzwz8j%lgLr`}oQTXONlXRbLI* zMY|_+hL`@izGQD7>lM;?YE8H^Xb=nH7{iAJ`XfDmHuUi=X>nELZT_AH4Fabpb8be? z?r>rT{F4kbk)dg%*bDFPQ3>k&>azJx>v41TugX2p50b>E<9k1V$`7*8hsODTY=24; zXcVHzhN$$EYbDpe(EW5U1%~$V=Da(q?>-h4cK^_up<`YA;kOr$W{m#{_dhc!SKB=v>+>Frd46zfp-{ULO}%xOLsCYLa@S{vj_5sd`dj zd&8(xr91`fT?pEAAno3f70?)MP8~BCkGfl!+C~`wrAV;Jj3YeDMIwBAJq?}Zq%QA% zR1|dkq6>+qh2kX=Y398%#L!mL>SW(V|^Nv6&lo#*+|W1_T2YliT51Dw26p z=jn!W;W>O;{qdcIEkF33Lv(LJSUq(3`H7<+B|PexgA*$NJw)&w6&n~TL*HAI&mH2* zH@+AK(lkkiFRAVQOV8JLOT7`%L?~y#9CC@iq)?g1Fy7A6dSGnMcKWKCfPSRvhZ(BU zJ-l!ZYZW-b|hs+S}W^S-8rN2H^@?55fatYfV|Tm=EB!wq91kr8m+@PQN3 zxn?qjR5o@_@F!fb&~3*l`i7hkHWvPwIzF1(4DDUW*%8@4ENfaV>b37-tAFKghz3m2 zIOT=eE*vJ&UN(HW=#2gZNDWRPn~ny0^d?>#ah9$gK-^b(KLdS|4&fUj4mK+pkxhMB)d)O-NflM@)ubYWnW>a>cPPJ`rVJS8|8Yc;JYH zNn7FxRgiFk@sFJuGkAQJm`gjTQM#j4kDd?BWL30sa7#b?jy!lUWsNv!km;}p{B-Kt z_+#Z$GXp^@C}&RAHd)wZ@W^})AB8OU+2%~Asi#hHI2Vwbi8AwJ!O_*5=Ps>XbfO&} zQYCY91v;&miFQ`5BN{eOt-Lo*tzX|-aRaT8)|%d)>8O~g)!ZSa`_$nifFU~?BHcgQ zvn=t)FVdde5w?Fb*eVJ#W9Ib?K|=>^;{Kx#rD=z4RtNhz7QmQ%w5W!gf|8W47R}#2 zYN)nz{X-P=Ad-Cp&$1pNVZWU}e=4#xhm1@*d2IjlM?ZvaU05!+JWq#HVM$cU}#C1p|L4RLwY|25fa^uQBbGCYtOyLh`fe?uB|cMoSaUr z1r3a-fxQVs&iS+(D6nv+kJEGtDuPz+4@nCY1R;I8TI@mID`DaZ7(m^@c4gpGb)%~11 zKiz^kSRZ*Ilc?TJw|3k65~%0m1mx*di=h3KWxcMWRAs9i+EFcyJlDAC@M_+M;D?PQ z46+6o-fugVuEc8#7WX&ZYXjU)PoD0#JXQe{?1@wVgj5fib&(BXyRKEgv0hL5^JWhF zNG=63K8Bmi+mFtmaRn6Xug(xz`u$XVj_2dBjJDuVGo@~{=vZjd>@08)`=pkqkh=D* zLq933Adjh~kD*o20BZo_#PZoJPo!BBzo`tzw}zW4dl^ZP+L4&4Sv zlzWH+{!r~lXLAuG?NLMR+8ot!SS$?^EPy019)@|ygs$qCfpPTa|0dXGbFb2mUPRD| zzuT%)o(#!AUOWf$AK|t=r}?-AB19cr`^^(|fBpo5X&J@*A$*hMnn721*?$Bpy0VxI z%$&r-H}cof2hyZ_fJ^7VBk=RF8~< z*T%m9+2WRgcbrJ~kr4mmov`za;p^~B-kL})ZZerSiig03nEf9-Jb)N zCYje%NCb8(N7Ss(cTOe$N$s+fD&RDJ)a~_qnKWF5kQ4c`wcpQYI>rZVsNPp`f&emc zmD~?q+4@fbkh~suhW3@bRt^h47-%JvMj`2TH&fIr;&Q%5RZ*9`5cZv4{_QvHe^^R9BU0Z zVr+x079zf|DaQ0A@5il&tbmKz0w3v%>8LhI)QRM#PGy5E`()A=YUanTM^uaF)yp=m zLa1So-e8s9kw<9jdo&7*CG^5*6Y%}OR`kBp z?#}nB+09+eG9JXP@Q43~O|x@~-FoUXCDbb(JE9B1RAAXvxdwY_=K6E}TNP8LE=-7z zU#|~ROiaCy1y@6z&l&isHq-%lj-${eM07rhy11WRHl%56 zFc4KhO4ly$OWHz#WM;t5%FIII3QyV_=4NIN(V93A1vanQ!|quyreL?I$4U}SAl7CK zT*m7QhkSzg4Hdqfez^ELIOId?1IWJZ+Tk&p*)HTj{QWp+G0c^acGn$Ck1XMOTO0If zgyLp$WG9Me%0ngjwtyV>TVTyO%aGsYM6+ypEzsv=;~6Da!LALhBDL#vwx*ToU|;C< zIK^l=fL4uKC-ZdP<{uMTE|u8ZmpN?Rzp(r>+7f8P56)0BVavXXMcr9WO{VmtcjZu4; z_>q%L!3kJ8aKDX-C=)N1{d#qj;a7HC>|mr5uqv<<`QK^Dc^$o!%7#RuX^ zCn#&CiIb-xG4_|=xBOr~h9=INZkb`GQRY#(DI-Ilv>Fs8z^=I_iFiB3uI*n)`1tp# z$j`#%9g_GA@Q@X2QcBbxAJi3!vQP0&ha2AKL;W|jBV=3n?L!8)RILf+s~1cm@31{! zB5{ek6C=--d}+MmjOPtSx8GgvO~5_D?MAKJ#Lbkdah`nO`^9R3(`ur&yb~!t(H+HTpwo(yNB?(J4@w?CswMZYDY9 zEoeJ;npp7(gTWwP$SJ8S)exOwp;fr13K)zJ3}Qx}Bq+I?%g=m?^3?qs%}b!OzZUpzC7?qw{px3Z1ci#azK1tZ`=&aYnN9gm>{Qzeupun)M>#S zU!IIs^on7@8S)S&xd%0paBKABmv0K>JMqQGfm@YsxT!E#e!6@ag}HJ*n$;+OHC=vS zbn`P=pf+EY&vW6=xcg^Y4Y?5ymo57IN9fY~71LnVfj4!s3=5u{yblvr!CrDPfAWo8 zaD>tCF8eK?-#EH-9Xc2cra!)}oqliDH9mQNZbZU51vQ*me8!s1U`<}`^0XOHDm9~D zV+Gd~SOJ~|&;(7RgxFYX7#@7-X&QFs4ctoD<}@#Y7IP>J4hlmPbmZQpMu; znZ;31t4>n0@26VWK+Or;znmBB751&hK@{P9(g}kjN$-P6qFCx`Zu+eGF6f0na>Co7 zu&sYdp?a2?h!X|B88)fb?)R3N`tJK$S^KG#W=)APQXrX|_Y{K&d=~KNNh7%COL@YLTfi6pdt@9dzo;0C72!U`|R>pUCO-Zjc51c$h?P!5+1hpZNrJpl5a3q>%!%Psa^p+Y35!dr+27I`H@ zXM;#;4^2SVu{Vz*TFhBR6%@v=%rsub3|!i9SW{fPCn`r`fi2B#G4Ve3U?(W*DQb|F|9_f>aj_tstHEcm={t#9WXFFkQaet#@a8K>HrEbssPF; z+ZPn2uxnwjVn%Jee6q|bEEhlGq91JGp@3&Q>vP-$SKjua1rga(o;xBC$~PBg%KcW4 zfcq>y_9F8#LN5NsAA9I#GizH>dSGC@p7xt7QJI%sA(9dEIpcXaHhhVy?4L7~yQ5){ zVqQA+qkGg$W>TCmV{dz2e%=byV1>MpZk_v7ItN*`Mg_1PBI6~GYEg(K+^)^0_V80! z9(xZmBYOp!19(ZeScUw`_8~sqYk5sh=GQf_H&eZvq$PHDO_r?3PB_Hn zo8Rk;qY09ETP?*$iU~Zh@AsczKFIc)%96!yk-z|I=c+eX>w2GkT%t1VSb}1o^m^$a z_%6*E5pWJ32kX1{&taSj6bvHr&9}x!fU^pK$^ck%Kn#q0Pn|FSm@q0|3W7VNrpz2NOa{rjpj?AKXs$6?&krCId?P)#-E19Kxe5JwVY-oFyce&WZ4|+ljQ{y2RM~e41o`JT?zd)iD(*C%odU_VP~Az zqpQ9D+blwwl`~X>eFlGNS)sUYy+caqtAa;LLisoI(SHSj1X{0~nF(pGt?S<47=>h?A#@I*$1C*a z?Hh}3J$eeAi_S_;^s=s6y-0!S5o?5u{JLC82=D|ZJIE3*tW(ChG#+W!||OpUb(r8pOyL!hZtSmT8U?Q&V*(UNvs}+@|CBliMLc+l1^py*oNp zOM4y~zx*KXubJrSN-fhbYKDo}1-9)^ZMruXo7-?J#2POA5P{)nOgT55@Jd4c@)g|) zXecoohVkAU(75aJ#0$HSK<3GeAQ9}ujkPCA^BWTSgE>mr7dPLz@;SCdCA=xWb}2s- zsVYVWvgAW)eLpBOW6qwz@!CFHh7Wke(nbPrLBgL~MYo)7(?G}Ieo0oEC4Ex(mX+bfJLZCx*}47^7DoY%Kv=A&?p~!pGFl&1 zA_7%Al0Y#PpI*&JwYpjJ%*7Zfh>oIn*PuAK3wWS=M^O@_xDtp@gewX+L^HG9~d!t)go|J zn&4!(Z0#>n9yEa$PvKIe%`u7KMyf9>HGzH&jKE zQxy+7^(ouMt7>s^-MFL@*Z@v!Wzms`@3@2DaXX#t=dZ93IQH7*}Uh|PKvXZs?{_Cot-5bBqG&k=}@ zYx7FjoMdpklzDtp@G*KN;XPIPLN}WG=#tRtD~YB=`GVxeA*YqFV1pZ3p5H5L{>f|X%C}2*0_S;JOK(COQ}jeg=}BJ%aeolRXqvXNn*H0YF0S2~rSJ&Syb?W&-h*1wH;c!pv`htE z36m2t(l%FQ?9!bt{D*#kEKE#a(Eo)rGJ@QTs8cF0v_vyd<3A$@k$?Q)D;rYpyg287 zGU<^gG(JetaUwUD!=D5HwNyJ-eZM`keeQbH)oQ~nTIvdub|VpTQmK1;KS)+A&uhI} z6n6W_g^l#m6hJ7QV9RpH@}Ndu#J6wTxTB3RVQs^_WwZh))O0j}0Y=?@^7rxc$}1s) zGryfOioca`3yQd41jn2+MGJ&Za5lt#`l|QKhpIRztU>$asD$kB5~b_C#P|oM*x) z@7ag5InQmH-^Ysi!YPp7KKreR!x!R8>jaRs2$PjM%{RI38u|6i2NnEO=8>JlE32lb1 zgY~n=&WMPzwgBf^x!)TL1tHx)6lTkkauzb-s&Af~3)n8e?{7KqaWN^{yKkuMQXite z-o3*+=R4DU469v_k*NWXZe91y7cK_JaHx&59B)RvXn!~EuBEo)LBklsd&kB1k<7Q0 z@ebPlv@w?%YZ-W{OiaJvkI}Bo#6DK?s~_P$=#s}zWKL2sSbp)jUDMh6V174+N75bo zY8*sYFz$((X_kLb`+`9|FoK-CRC0W8Q_8ME7Lr+994(CWs>;^lYd4n zfU)MQi}{QM=~q&fa{tK_WtQB%qW8alp(9i)3>b_^;C#er42@Y-nxb{0UX}D1 zBjTYeSrYF+moz%bA*;O`?Ja3Z@N1B+HU>?QR2esS!87W=%Tn7uXWHoe{WihWj3|N4 z#w2Hm+A&vN-;k|%;Yn$+G%MlyIAR9-8(0z?^}0_rd&V&cg#?HC1CjXCb6O^Ezm)}~ z5(CFn_|*SntW^)LTp9KyB2zr?8QzVq?+&nL^IHT>^Nn7@|0}qR)xM<*Xu`so&4Hou zCUURnS+mgzJVS;8TmoAB;J&6p=4Py~NosCKJr0ow%Igq?IR@zzK*@9Yr8osw2CwGJ zWxAotdl%kviGJ#_eGioigLP&~wy0O@KHYN3N&^zSDeHL5d8DQZv6|k3Uf;Jyp4~lT zzq#|eQKYCc@7N(5JZrXr5%Pwrp|OuF@HfI8HGPW+ztCRwPL-kaJM2P-wdlb&vWP5Q z3xT%P7P|b`)yUu~NphaVPL)yk{q*IE;vPV2=Pc%27|;y@ToM|0t>m^AQS?OaK}^87 z<+J%}xRaQm8r{H?_Sy(nPJ@Ep_47a9DPhYXbH$}(B+cmja(HlJQa!eF5%L+_Odw-g z{)L_;4;eUXxqX|G$d;~Vu&3A#84P7#1YK!&u#r5Dxd#$=QUnWE%osM4)% z5?<8JYj$m=pMpdF7`g2HB))6Tn`TP1;s1pmkwqV3EK8)$sOs3Iy-CXY^Wl`LR9$GF?7qJM<~0y=;cOZzfj(DQ5FEWbS1Aq;8(2 z8P=#^&UtssQsm)A*=&!?ve`PDBw~|FI$H?c`6`9<&~38la)(MbUtczzV<76u)=l=~ zS@YfE0AG!|EOVvv;ODGu@qCHLY`&G%)&zoVJ+`mZix5{k8#LR0?COqq%R$viTM$Ey z;QL?p6m1<@Y+e%qzeQ*Z_-X<{!4+ ze!E4)XM?sQ4>|b4T-~SAr}t_qZnCE)FFsiSuuZw)CY$(JH@#o~Lf>l-xp}(BbAG1x zP;u4|%i-XFfi|ohgHbtO)Pe_|LfQ`L?Ym|ko$%<1+bl2K=E6b-nP2L>?r~V;(K43x z5NEKZ%gG@sA^_v$?#P`%kF0!A>wKPk%J(s3(Wok&NP_;7YBO^$7?ktoeIOi!fsDEE zFlsdQ&yV$#!u0+nPce>lNaahFIA})0=x!irjd1d&QR0{9|L`^ma!e=uXVMBWCW^(n znUS{bh9y}XSH*4tO{0#r_|ED0NZSyoMlHOfrXUxB{|}ZZ9oROm6Uw}%KcybCNC6&^ zqA3{n%?mGZa*u{oEqj_krg-IH2wBjgRo9C_2|YV=$kx79uc^C)&3agB|MsS(47#3k z9t|zmmvEfg%o3=ALdH*U<|5sx#7|Q&MWfV*AFk!1xHHY`Z*mDN2s5tB!}*WR$a*6i zez36rLh7~z?NVi9E6=G|dx);ZSFX9>I9XsJ)976!Wax32A<#xQ8l$xTTb6HQ%NC*Zzn8`6?`yp$DwZ{OH>) z3!!v!MW(8X-%z?S*ZKoUw>F2j4Q?mnlP?Xj?wdAU!0;ky7jU!Ajb}^08A7s!W`jVP z#p;*G(>Hzf%*Oot7>v7OMF-Ywc{_`5iFztJQ>MRVgCkK8;A}jK<`Twac@^jS7nMi9 zZWGCDMMGd4VL;|dI>|I`v-@NfRHd;ow(`EzYc7|hG`3PY&4+0W`RE595*EuVMqXdlzcpDL8T_)&Pas&$n8I0td`z=Y)H%Lw zNG0`K-B>5Xl;3%Fb?f~PTuPUh&1XXK_=geSOL;!{pe|nweG4uj53*lt4pI7^yi~^o zJ-g8BUlan4`2N?G_HEhS@0iE0^jEg#e_*IMrL(I78^^Pe+?=7VKsGLBm~_*3XSoz| z;4`M%lpqZ}UDfg^pnT8iHg`mW#CEQ#hy_{}ju-JU6!SbA^sENfT zOfjvIPp*tgg?8w#&px>DJiR+H5n*mjiQ@7jl!mJD ze8E=IWhh~@M2fNM^M`(L3quKGc2$WrBJua$aKx!G2J1y#WO~9ZNI;ZO5+4??T{hq< zzpBCD9m=?$i4)6P2F(D-5^eQB*?R{hjd=Ku)VZ0mZFneM9@cX2^zXBu-P(~-zrI=B zj9tVJjK*tSV}W_F~&eT3bKaZ zF?q~4W??h(k8;^9aB3adJ$@*BrL$p5n%ZXod6^<_dodf`{JJq7)K zKAoyrF9NYjy)T!~(i8|LSIlub$95vuaX^@~m3DRvJ{t6_r`!mNr>RPWf*lEmrw!cH zF<6eEM=)J;k~#Ctf?kvb?K>H#Qw&`-o+GBK>OvB))BSU3>u=`@LHi)n;4mBoT zn3M1RWk)rksqn@fW#8I^`4i`QrPblRg}syaP5d&Z&kCvVt*{c_2Fb%Qm`Wp$=Q`NT znY!NYUSB`SBzgD~X=+EmWB4T-yQCdu&#rliFOL9u^T|-@&Ml4uL_wS^+l4R&>A$Kq zi)M5sjW0z}(+MuA!m!_iz`U=IkuX+$(ZTWQeO6*jrBnT^1xCCBM&@8=U){gZ-Q6Ez z>MbyP^o5ULw%F6#CEO~XNY4%>36=0ar>i?$DUZHem~ml0Vw}Yv1vQ`h$H`FNTj}oY zy*@`(r9ROl><&O6nf`Um8Jg}by6)e5x|NH^$q_SGGQnoLL?A<6OfQFh(Nm%*+(OYh z!0F#}cEbrhtkRw0Rh~$hjlG|l0_B${L>i@n>Y{W+Zb`cM5=`21P4-|{;3&m{G#@HHdnL{`^yjUwzW+1vZw(}zRhu*FIV-FPMPb8BxNJ<; z*^Vb<6h&J*DJ8ng!L;2#`kr*i!H*tJhjaz4p4&A^{qTZ}AbUF8-LHqC`l{^&R;Y2V zOrQ(90@gj&TUttW21DvFI_i8J=Z<*-xb&k`rQ$Ofp5pBly~m#J)iSeeXrK|?DDcoR zckz!%_SmUMBag^SNZ+KY zajbYgkZD!T;HJ*eU;qUMqx7+eb1L={Mc>f@(g#(bYyJ?sAADJ52a`{-+ey1}p6=tU zy1Ebq15SN}^WT?u9_cR>${@@!+I|yjCxYP?ns`Dw6X#|1Z8+*r`JP1HSJLP zV;_h3W7@Bvq|A(>zo5u1a3jmU`y%H6siK!8_7?dNBm-SvZF+|LF8imykf6Tiy=1GU z{VRXtr~auvx+=g4p<03t4Ztz`C zxow}1@9*7Huem#Px<`7s2!uHE4bwxK!zG&6Gj$xu0Tg}!cHx$K6HQ!MSj@s=B`f{w zEPaG~h7iXU6X+;^$lXL15;M7!HMNuIC>BZueR6ly`sY;>221N%SYY!6jbYgy1FPfT z?AjF=?TSSqWDciIs01UA<0ey?NgT&GcJ`0hIRW#{3K;Swb zUCb<^(!tqmZ2xt(X-qToeKuJvnav~?boQhLU!bWnJws~nI$lWrOf+5tT0~#^&SXaI z_Ssz+n@i{hK*BRDZbCxy0&P+a#I99ToB5ufhumPO@mGNf?m*sHBY2OuXuy8eIiH&0~4KgYKmodM$z@_ zpX7eOQ|J$U3kdCqpoJBA6z?mI-d(s8PZ+5LW!xJIhZz#ZcWvdiwyfm`iKL$;?BhFX z#mgR{nUgue_ncCaFp{nl@L}n?5t)zn_NQ-q5lP!!B?#5zxIrmP`E!3*0YPH*!2mo_ zG%9Q*`9nzU3-Pz1valKr}!q>Em)#WvnM<@1>F+Ks8C8sKR~r|cuHv>mwBCRG_wCVMCP@5cHMy5d+2^2< zdnL%@Pv9dCDRJLXPnDE-XuY~XT0GifO|OgII$5EuV$1rf#hR7`p+3v$0{OfJ`7hd2 zwqqIQy^`V2LE{W}B0Xlu3)5YJI22$oklzKa@S3{i|FF-+-{6X!8>#Lj(0GBeHum|) zl{^VYB$tP=SDb(wU%4m1lh7?xow}fdwrbusSuK_6=MkWjye`h&F(fsiL~n5EJ~DLY z{_3xVU)F5NrB^h)P>MLotKX#`mG?b(Kw-B$1=j;irYG!X4pH~|rzH-~#c&{P^EuEf zzCZ<4yKnqEa}7l*-w->>)ox9B)(ndcT{97JZ#RABZPa{FM$Ef#v3Z(PX%?T=XY`3==nSrBkz!gk3Cy^3D?fp>1VFA`=E{K z^8FvBoHPx&D>{*bA~4GQUtdZU2~yp*5sUci6qkB;TgaFLymvj953^h4lSvvZOw8LL zOBeF~5?t=b4=DB=(E-+wo6`Uu(oodBXRQSa76r>#tj)+2P;G4QaU!#hLa`Du6 zg^4-b{zIs^?YDi;CB2s7@U**xTg`=Idxk9ryDph1ZQPjC>N9A^T%)o(oOZ$^?==h# zm^;V1gzO5YwHZMMoq4Yz85PtuGkjWebs87l%v(=A1rn(3TP4nByqZXf@jvf$u)0vg zW$^bZOdL?8PvdwV#lBV}AK8>9gtg6PY@|Sy_Qq6z|E|Vn*c$#meIGOI_GgO-3}+Ww z3LUhH^E0%tKbLAu0+d6P=M-03_(?HUcI2|s3Hj1?jM?4G61Twn zJ_u{ItvgV_Ew`L?bYZ>ZYJ#`~8Nk2jr|x5VMPU7EBra;{UJ>Qn@w}V9s{@z7AZIIl z*@OjxO&8);_=|IO{~LoOv6Me0w-!W$62xm|TNFfWasq_kg2U-1r}s_s1pMm4kJ8++ zhW0VFT|ONTD)*&hfnM`HTm2>!4j|t*Db`&UIRlGCuYmpg(F?WtND1?W@3oC=W}Y;V zJa3hzO1;{8bE@TwNQQWhbi^EC%F*j@&O0Tp`(Fs8FgCsDR|~YzIY;Hy=dDhi zWoX7eDk~tnp&*N`X9!^gSx4yZTHj?r= z)YvKB_zimb4OO&U`AaufW(Ms?`SlNSBQVphgZ&b=lsOThq1gx}=(B09*^3gm9k8W`My*R$m)BPu$)-u9sAIZsqHX4^2rRHr}JSqFm zA&{2L8W!_=A?xkIu|}0u6uRP3rY4zn+#p4YBlUT>!T-1*2fj;6C0|fP-6IV2nb2k~ zI3qDDv#1%ej9c-MHFlIKItZ%`G55$ga|)p=2)Gms0n7DE`B8#XcEw>+e4gG5@h6k5 z5s5RBYN+7yO2-MiW4UVoLUpL;z-DneO-P{E9~=P%$KT&{oNuLZL8p@y_DzQ7YkU4~ zrob(~BLSfWcK+D=5FfGwU#x#Kh8YiIi(`pE0~z+&Q22#+A5CjK4QnUr@$S3~P$%Qn z*4O-qae>oe{LQ^P1Vnq_Cw+H-qDy4zi_6L+EvO7gWNpBnw94FvV;Rpt;r9EmB0+3M zf?DQEX1_nv&lcw$&@$;9vQ6fN@i}WQ&#l;HrQdkH)zx(#N|{eEeaR-zKSEl@?N$Fo zZ<#sRrHxVLvTcl|5#hGi{c(xtQaMx`7^Rv6#dw#AU!q<)`i`Mcy1*P0M>Z48(=HCm z#*Ek|x{)sak%yfr31&AAp&BbB8jm!o;Qwdk5BU|^EA*=o=F*fUdJY1kobvuL1K51BK$xbn-5ee*B`0YcL34E?Fj-nUxsu}F$ zut+~vCXfj??Z#fxmTvA>!0Jgj;m>3a(K4c3$?xg=9L`+vSexkaKRtFmHb@?JZ@z;o z>d>|+4L0c75JU|DMDFSPxDRP}C0)?38ivizkIu^m~a* zt4_f;y|%z|rIR*i#FcKCJ4o8JE<+thLyIAUCES|0D;EtGcz!M2v**?SBAB!Ciwk-z zz*g)h6ZNC~?6-L}HBJFaY!=KWVLQWPI?jdBOZsJ(zi!)De*2Z&s_q2aXP2*^?o;k0 z>BRIc9Ldo!5;C1=*l}I8r-25$!vgBgrI|Xmr`{BEc}h8>J#St7I|aj*^0dh*l}VXA zkV)R|cyuq0%#*WZ@lOVq^8c10e)anj-!-(X$Glpz($l6S;6>9KP)7mQ@Wz|6_{M8< z+8)K%pOB13R8#kdX+W2bx7V4Ad(4esXxqPiaMuCAz-hFwHPG|K%Fi8PGc3V*&fGt_ zwQw#qr9N~3fD^eCV9E0d83nPW3c(&WHs4Cs5(S^^^k*nAJ+Xb;IqUgQ#Nvc%nek z)pV95X}}uXO(qzU>x7~Gg|HggGX_FQ>QezcuzewC+jCG7J(XF+{f9Xe+G^SE?AmZP2)9~(G5hN^%}#bDA7 zGwi6vQLPovbu+1t!4R0?gv>&)4`(mjVi9NG-)mILMW-?0E{fwdz#-J6k*nOJ9FF*( zLo(ue89#mH2o#B8xyPGs+noNbOsTyQwy--xmC(|dRFP5eY@L2Jdz|nn_!dL^2xOWb z>249WVk^(XSUGrykOPfm;@W?|lWh_&68CCf(dK-lEqCMS@0+3h@(^`-haoEGN%NF* z-iC*n04wI}F6g%`7q$G$Zrv>3MvK71rW5t;J(Xk0_6xsG#Q0PF(Tb+0+*15kC>O-c8ZboVqB=ry$Tc*T3;tg#U^51Y%3mB=|_c=EC;ga%pWtj?428*e~wQj|@2h zdTn;t`OY;tKjNl?uirN*8pWBs4xaEHnxiZ^N$sTdhU%)*CMoa)&#i-xaO%#`L4Bh4 z4f8{T6)7g~ezTI@RHv-jo3{iFYvFfwe(0VHY<6D_WOZHj%a1f*=ikL;MllJkm@S1LI~G4CIXsj0pC5r3>u&9W&!BPFtD-!X?u@e^ zFLC$lH^aOwuHTbYte1{Ac^EYnQgWHucrd$T^%ydr=#ZZ_5LfY+61vT;&>w57V~z0f z2qMe*7gb4C!1=X}6D?tXkM~n$TJ(((j7v=4y((_^a8w>KT?oy$2g8Oo$4GY2czdk05m$nJl%n2;s0%6kdti+pj9K*raYdvaNpGX+&1hPl3 zJ~-OJ>=e7cSBh|{C)foHNERP1=sT~bl2TS-Dj?E&>UGk|^U(bV%9BZ=WBmYC>%7Du zGmLH`l7MfFd$K0;Fdp^OiKnb)!#QW`Wn=E|4}BTrBZOKS`FDwM!DZ_V@1%ulQxO=K zOWtW0_O%;88uU(f9YpIS9WZpjL*rd87dn1@wkT`D!kaJrI3ZeagM2uk)$`bJbd>$q zRZ|=--gJp7f8qRKPK+01a*?3LkiMOb8E^$laS(M9;C@FwA=*)AtD%IBEbgPLlSM1< z428+$y)zq*nzB1RtRyUjml3k6F5zyfKYP(sl9zWJ|1q-mr)u*J#ojehHh`HCoFPp& zn5$Hq@zMvX{}9ajCR!=89_NUbYSSXHBSVu{XNFK!r}k=&+2^rjwm@eOysxjzUTjS4 ziYQuaDK+k-GB>!xVOvx*J1IQJt1H}BkL{mPp?l-_fQfMf{X)L%k6Zd*B71oUQ~Y$2 zZa?%HL>+v2|J)Ny1}~5O-)n_pY)@sp+J}SW(t4=s*TiJoQ|Vts|8A^3a-Pj8c)tnX zPh}reixMOY8{1BDtYpXfUT+l8=CPf7$g$t5;U+5AruX06aBXhQBHET1X}mVzMy$Pf zDW{yxb(t&V*93f!LmF#pQ98Ez{b%Lqm0jgmcz6{f%4-6~qsWuqSQ<{!Z86e2xHpO6 za2rSmAHeg+vs7?JT{PP3h|ckoLTXLa@L-A`{L?9ziau}O6cU>8bf@(hnRi# zFq{Y^Y&TTFk1uiOE*gBtKiW7aX#T$Q=+Rj^r#SGqH&o5a2-Szr_mkyH_UiW9otG}7 z)tcr+(c(tX*&1|-_3z|dG1TO}7Qd~fSv*moK@IUr1ZE@pP}#dZGkqoUpCSlzU)t!f zh4RU8g_#3Si7d!Wtig2I{l$x`o7QT9Pa>w%)d43H`AE!!Q}Jj_U-x#uUz~3HQr4Cn zn^kOC3dCa}uc{N+1-t&ufj5_nqP0#O5P@3#PN(ck`M3f!)d{gHyfjUe0d2(^kr}0_ zrWM@v1OIB~f%3G4Iev~G$rnNvz93~*7AC{!tl9262g&bd5{Uu7z!xxPK)5a-Zzd8o z{o1S*PqO9Xp9hYq>V+}xE#!CNXUkSKr7spir&u-sclY2(llX8u$GWcDR3p1e&h7E> zDPZ5S41P1w26}RE-2>vJ>Pg*!qYEZzV0Avd)7=dLJMNzzGkt4E#cc2)8$snXw*Hy| z!29o2hLxX={q=eGIugYMhRX*ne%&$nsCd;6t=OqS^=uCh9PGoRljach^tlje(Q7zb z%|=0!5}|=Jc31oBjY7z=b>w!_Ni~VWZw4Np6N5m>sv6^_y5{O~^@a_}W7VK%YZ+e+ z?!s&yx-UYRo)!2YxiC}UHa8B7}MOwf@+a^W6B;1TcSJu{ll4M@A8R;i+nTW0y3V@AeDs_Zyz%vwX~SxLgql7m&ozFwyo=DwPgt_S{QL{AF8REuC16v#vM z_Wvn5>#(N3zm0!JjV=K<89gKfq@-hjbf+MobV(y=jTCUiMo5m)As`_opdj6dsI({p zR7xr3i~2n0_orN5*LLj_=bZO_->*Ba&-N)*`#n2i@dL(G3_9?;in}~4hFraXGsX98 z@V+|1HBGr;;fS?RxV9?rOO;v2e7xYDJ98_qLCie+Um)D1E@H+HD&;srC``)!HB)^z zr=(5mFh5S?OxF+eR3G2=1zfXglb*DS^+x6`$5(xJ_J)iVCrA=~t!TtHV;+B_%G^qZ z)KAP(4J&go@by7cszQwA!x;4OMz5Wer@aMpxf%eSH?&`Qns|duiJn)y+Oi!CZ!|z{ z<1DC7U02%)^airhDps`b{he38Qi@hG#fd6aCzmjY#9kufth@X0DWAsetA4Q12 zznOGnK6P6k#sK%~qCph55IqX^JF-fbT`hj%D))Y$P#dJ|VTb?V6B36BKS)1uain3( ziCF5{?*osxxom|$L3%o@uV&-^c)he)v@7Pq;MW78)=c|f$cjw%{$!+@J4gkLESAy_ z&G5&s<@5J*w^T6Jz^j2{i)zxCy<6XevOsElQ+g-`$*6h>YHa3NL4k4Z2g=OvJuk^W z!4dY1X2JeBBkQG^eEZ>eBz4zitU%%=hf{K(8igPE=XiltU1lShpC1@YsihU>wQvH; zyk(@jY=t&%5PJXQ8-}g>nLorjlS8>w;nslij!=*q7>R+>z4)7gVF9z=j3d;asjp2Vb0v=wYt{tDQ1zh;`x#RDj*mQu#5`G63uD9Z>?u+19+bvL9{|MT5sk}acEdz3u!WS z0ZF2nVkO)V?ffT^+FafDq7Z(^YqYZL0}Cp|L9lBx8GvQiYlkp`TO2ow>-IKa*KSr8}4lJ;Zx;Wtn@2MAg}! z^%oRW9L3TPkQ)PIfMM&%s8}_WiP!l#vqSGn4%rih$ zi>PDQL%VM)j#tCNmavlFeqG@eq)gW}I?Bnp{oA@d47h!$SLd&JuPi_5vW*Y$TtAHZ}hQhLm)BAbZj2-C4@S!QkoUQRP65+uJu7EBX0k zQ9r?d^_BWClw-Z6M0I2%O}L#Hlq*SMg@a8zVO4u&G;%vau4FvFX8Z}ucqDr;W(UL2 zyD!0hi$P#vx%(Xu@g(kPiSV=fz^)1rto;Tve~vtMT}W+esn6RExy$^Z6~pOapx2ulssQ4+Il9~UlMOps+3`kUy{aMtJ?zvM& zN6RzbuLN>zm%cwrI6t?bL~MSr9}cxCia2>D$bZphNh3(c95mBW7m=sjd!&6<2Uu0v z%sb<_Vkmy{Lm+XaCJG06#;|d`9oIsm04<5Z-D|1Uxk`aIsXT# z*?eGBnBp)An+QnT_|In&bwjZ8iCGJ(FY4cg_-RI{%^wB5Da$eN5~Ywb zM_W)yQoeZT%O{?xWj!UqUi_WMzWn}j>{sB&ypmk_OJg8+HvI)km?-oIH#8F($eANmXpIn-TcX!fSkyTjBJ~+8dqU3TnyYl79%X zmK747Je)v)S?aDEY}MoVnKtNs&FsO4)31A*-4nPRr~AD}7}jX|*fmA>bj*yoA4BGD z)@8O?e{j(|-3I~Iia-AM`{-d$>JHJFz$z@9-{f)VUb+Ts=R^I?=G-rzCe3@!)0Orx z9BWS9n%bz0f#zyMe%+%8Ik9zYs4jH9&yeO#Sku1F_v+4y!%cfkS{xp$ya%2;y)2<9 z<&YvaYn7LEFU4~Yvi`Gx$wc#z>A~7XulIDKc=Byt!!ma4k33AZB7~F}@&e})W9!>! zy_cY;71~J-=YZ;+DO?h7d^K3#DwDE0E2ksCuEb-gk`FvEUN?lc`u{Q+rAde}+a@n9 zXZQnWR|ky@ilOHft$b%|BuFj5!kfD5dML(zH6z^<{1TE_E=) z7p1nzxp6xYXc2v&mXRABokH)p`E6|Kx_MTz`&m;A@P8ENC7=bohNa;YEGl8?DIaxEn>5muPp5(ch z85aB7Qm}|D3YfDqG}ucQP_SaaZ@E14lnG!)d?<9!+R0qpT}oBF9xJtd8x4@~M2}Bh z1>}ev4i=jTPE8cnwP1$bz6srUEOa+ITx~;L{FzF>60d_~$*qz^Na7w>s79dPuJl+n zZ6>*nOw7NGMGCc!_3DMZ10|uqoklU?JRv6=Al9Mq9$yIOLC_2LKOtM$oQh~BLM)f! z2HETSHa?}=XYZ2Ql>rqb3*)RfU~Pc)weOO>ag5uD z%9&93)C$Uif}GjO|J-uV+`3bS=QoMQC3{4Kn7<)Utrj;BKMI)R27T}|Eh{~`5u?mF zhkg-w3n;C>;Dd0vUG(=e&gu&FN*~xnpn;F84(3K<&m5&*cQwCDNdzSdMzm#?;-r4( zV#iVrvBQ~Qb0lR0BqEc$?t;UrY*i9mgeXgelsKUD$#r%;nd13oB4)($9T0*Qat(e% zqW!mT`Qodm;xM|v28$W044da1*34mIpP+Ia4X8H&I|oy&!d@reQ2IQsFfT{K0A#l9 zE+1YDW11@ZE2sBv)3Y`4{tUI@+=qV&1Au}!W?%F-` z&NMWu48J0T2f)IE^&mNMv-Rly@8S*pq9kJ26l5|fWHBm(j;GXoBmRa(K@jnW1Z)&^ zMZE5I>V-e=iHGjk`~vaToFXAz+?Q;^CdUeq%;F6 zYsG5X);Zn2lP}7GI!!G@8PV)AP;|`6txPAd+7-F}+<%tBsL%sLr%c`|vL^+d_U&P~ zB!1*>8uH_p@n`d4yM%cIbDcQ2^&W;*jtYZf59t#@TP?BLXEqMm;AfB|>V8oM3Nr(~ z$$xF0>F-w@8Ea#|Wnnmox}BP@FC#B@@`?R7-#qA+Q>>6_T|@T5qDr~X$3wD+C+Iub zqU7L#AfO(kTSxv#@MpQ*Ape-Qq<0Kn!{Y<{qR5V2VV9ff<=46VX^kVOQ4YY@0*_>J zhgsD1Yo7g_%Inzk)D8g@N8Vqmzi(k-Q#!%SQLe4HS?W9Cg7%*%gOM8L*FvvLy^$#< zu`@v?Gc7dARK;bbohwphLU8(E0RQCdlf1&(X`#txOuzl>%BW-~I8X+0$wiEs-IU81 z;qiMmD3mp!I}Nu2uQ+1xP2J)5haY{FPb=Fmi`?)!e_Q~y8v&vtQW^EuuRoH!PRxc$ zeLD8KGR+)f(JJj})$<&@4<2a%RE|gUs@V&nDXUNSkV+^E%99%tH_NgT0(Pl&+(RF| zsK^Lj77rXL<$y65cpNUAM5FK~cMZ|JcaF(F2|ln{YWGXhnpdZLf6f?n`7NgiH@6nC zfubP(mGRAKIuUiTr_BHE)X7Z7cl?;8&Lb~ZEvv?jSC2ii%=kaZS;40RvvYxqUKT9D zBN5Iu3uq`*>6=-L@J;liLipx32|Sj8^k+|ACq~p%xw1JxXBnfxz!zyT%yY}<$>q@d zt&5Lu`d2oE9mU3hzf(Vv?)EtQ;T&A?K0BY^rsE$yq!HDK>KcDdIrr?=peODPGA@3Y z8N95i+Yf(a@Y@d4%tlwSa{g%*0~*)AY^Yk2W)~HYW2UXiPcSlIL=-(e1;gck_JcKg zzbGl+&A;=2d1OcykI;i0GUV`%VM0d)oQm^3UIi@LQ88<2kS(aHSUyop7*GXnl}bK! z86l5>f@^&?g+94IkGl>G*ap8duy#&C=yyigeH&3T)9bqmY2yx$W{jY;oU$vTo0xqS z+icG+9DMlyAW*Xg_q@`1(ZV5Tz_pGTm|&#Kki7lt@hr^YwolgHg&Na3ZsZgs zI#Dyges8<-iiH16TD?tJ`Z4TsKAV@%%oOsw`UB0I1q%VMgI|Z-&SYtW6c8RVF9K<0 zlQPe}D=!%k?#bUcLvdLdY$Hi)o_t z?c7>lwG^sN4dy7#+G4=6WV)`6LBaJEk4Mr!;I0@3K{G?kUrjQfl&cDpysaGObPfqU z^iT)K)}04inrqh!hm}UZKDzmw$~FC?!25J9xDr@fb~re#rxRGcDe8hN&Yhjnd0 z!;#Dhi8iRRs}-L=V0x#M?&N)BO;o-MEOQS@xFSPW1VIU-q z3DOBRz9sEq;}M|IV_T7_Bb~*cgTto-Fzn&`0h!=BeW$zE80#ppEJ~yNktDi+Mfx-5 zfrvNmkNgicRkBHaq!WKkZ$Fg_AMteIOQ0{Pcugg#dZC=dln0! zJ{t@{l08^$k-fVkH%imx3~I{?b-brL*Ca$4sI(g)D|NV`v6odh_8qz zIEJ|tF>7RR+3(YZ!#sd{m~?e1N{S*YhmrzI_RtFjhd%ZgiFu7Ru9?jZSVMiEq5K$7w7$qC4l?5of{la3ZT@NR(Aquqy< z&)1495#pQnCu$VRBsxK8_y+!CO-<;yb`d8}sLKCAiI5VsA&yyE>_cqK6_x(^=uT+( zrNsf0Hh(%Hthp{!i9YG^nuvkr(|nFhfK zp1!e{(Te0;>ylrI4u!$_3XPE)&p}#NC~vpjOPwQKtQn8;Urv3jI~|kmLoxj168cag z-^~NAQDl3iCf7LsLJ+edbQg4ZTdJB@gX-_A%m#dGw~b0n26N}Gry6=_itVdf&?FW} z&s9m#cLzVBpR%#b+00;mK7S78zhT*z;BV1{HLG5`m7o+o5?|>C78|ZAA;6xa~j;D<=HZoLtT*I^X8!9Nn6&IF)ascRlD+^=%n3%8`N5lE zgnL${hh-9pFOLB&GB~P2DH!I8eO&HVl}0B#I`G_fCl(Ne^!upI2HpvwV@;tm9|M6% zcb;HTioP3STdE%JiIoNwUg;n$hwM`(QHx)!sNsm%s&LHOM9kuHl|iF9jqcGa8G~yHJJyatM*tW7p_D=R- zY6KvDfOUO!X~Vn=VtU);uefPBx}WR0VbLO^SqbrHP9P&j3T$K$JJ$_GUuxxyopt_W z!*B%^$Cd92bGQ)u3exXN86Dq1Y;=+6>V>JKbys$ysUEi3u}%>>CE0}Hm#`v9&dEdW zu%}rTW2)`iSdMVWf~Mf%KVkT$mr3ITzq)ckrU40VG+^{y#wc{ldBD2D`mZ_Q@J<5s zwD2vV*Od^j13oYIwMKE8U}9F(Y8z+FHV5?n=>$|#=)3&lc%(rCN%avwJ`jwm+o4%66##~5zE$ZxaAd z|3thL!7x@``cS#Eh#IeG9X!{jTRFOj9`bu^Fecmu$Nj47o0Q76Pd2=PK>v7<1_e5- za+BtwJ(+ho)zj3uw?~mKLtvjDpwxa|_|Om`+)C{AyZFMc3$L|6w{i>1Pq-iFhCA~e z`9J7Z;T?JX&_w%66j2)WC=GHtV&wXDhdn#HQSPs7S(%-g>i-?>UgUoDcK7i zAv2rp5128U#b(X&(&qF#HDH6t=$4(7LrClX&2quf_+3n?8>K@#*0EV(kt%j6UK zNB^CuCv@*W7P@~YqwnXRA;`Kz*LYK=mffZNf%tAJ=Spxr0tPbB+9MaO1paZYzH9VT z*RnI+|E>`7sojU-dG4Y9m42x$E9kb?%6#sYA&s{8)&l7#9$bx*fT7(&K5wc3HO^YB z55ZhCZ6BMc{v%owjNMk4r(2i1q-cU!EI*xDU->u?xE&3U!RI9f2Z<;;ZvUL;h*h!+9Zp?C=;X07CXXFj0`)3<- zznk^qCYAIlI0tHQN!RJc*s#&)DL-2CJ^;~H7}fN!@q-cN;HM+ z^02lR;_Ihh=NnL^SY69l0xj^2xrka=w6e;o;A71sg%a~++R}|cp*&S+S$B~3VC)h8 z;5t^HZlUzF#uA8n&PuG*{cvDb;B`pDII)7P-aIs@J$L1mGApXRhJ?3WdY`gKty&9ke=&;*gQIM_{;Vmow;wAiZ6s@3H4XD>!&bpg= z(t>I8GL|2&#eC)eyMs#{&)NlDcVmU;rpVLGzO=D#=_h`h^c86rq{yb5V7SIQ#r(Tr zzBX-vyr(oR=9ztMIzq{PvD?L69$Mruj;R&gm*~m(*K4{ZE;l0BED$AV9oI{=)ZXiihCj~)Bdh+U&kIDyAC-E2 zBS_wrZQz*oP{W=<8|W|-w@&fy*0$B=xeObn1wb8X@K(&sSKMtbD4) z6>7Dj@N0s#z7~HbN#q#F2iDOXKqI0J%5irP3XI9coGk#IT&U_;dBGDS!#`Al3v|(!b-M8F} zGQu#l8Ugq{!E4)mY1iI7L%C3YmC3>|uO6`{G%HaSeE~5^hP&Iqw6D-B)>N0~4j^XoNxAmBx1nn@SURz5L)?svo~{m+#Tf5WWL2-s*T zlxmMDvG;yrsJcUvr?t>`hneiLu_#=3YX{#|c^KE;Uj_<&IFFdkA6!V*N97@i-F10C zt1H+niqFU(Y3wlym>e9RKpL_kv$i^yH?#P5=+tAvL68OhR3>=AwDX#bW~Wyg!7^O| z;5(m^({y;baV#;p2!bVlUq6xx2k7g?G!(3pHm8WHfyA|Q#D-=vi6;JKXy4`tDxLJc zo-Z3R2brr37pAQE^v6|Vr+;PUm=D(m$N5mo2ql5W8Hfr1z~HBf4yg_z-l1>o?Bs#;%FfWuXAGxl4* zQY{ZvOCopVy_i`F1NXyz2=znSk4x5I944MO4GwL6Zqg(dZy(~*0+zb4&0fJ0nGPD z`e(FKA^hpOiA-0N8fY+yo|F!L+{I&F38~&`r88)O)_BjN_>4!g%dqlC13@FYP0XxqfA2><|(L}yqx$Tp6!4GfNUM*^9b4UIYG=;>{ z3b$QfK(L{kt)_3Qq9t%$pbx{eiHd%zGh*7$ApSzze3BYS_!rP<`e4SA_|w&O#r2x1 zbL|iK0jrY1EVv8p@(>``JY%gJ#7+X@_!Gg z9Xi^5Hc~7j8owOOm}_C!zB4m5v9*89{4A>Np1)>AAx3Wn?xrbEl=;*mRKr2&G34 zLMVy?#Di~^X9oE4n8Oj2y^zl4rna1%UNmLLT-6=cju1WVT~BImFa(AaiZZ1Qg#LSn z{IF)Ib49j#Qd%Vh!}Ly=K_<3blM`{_w>w~^bmG9GW$A}|5mP> zO;xBFFf^q?;hi#L@;AxJQ!)%W}k zKXY4?igQ#WU+FpI7@7Yj;BE+22S?0UPk`q#EF#_wKBk?m&SHu@#3 zJ5h#X`rWrrt#Z#x`cL&{1ROLpU+#1}BV5fM)_aR{u^r4+NC^z-M`h?>5bsvxvv{>j z(c7Fdg}bM9jg({jXAg-lU=}Zaij!Rw&BTYEK9%F7HRo<6f>h}&o?glIrQXaftgZcu z@OWOKXo5FyHq76#W68m?1=+%~WA~091)T2GOij5vfZt-NK6B;YGw#4;n ztdMP|I~WGPr(^bEaXU&*WSnlw$!acn#NXxo5){zmDikmhLW#@xK%yq+M@s*%j^#xw zjNjHNuC)2!dcc-J{iiS>IhnU+XRoIv1kw-`JgJAFQ4Sc-T)YHM>60;q2$A_ur*%I7 z@-=)~ayMDt2UN9t1zp}k=79v`fkz097m#CVN~Md9z&hg(Uk@kU-i2nKP*CUDe--0` z5eWiQ7Aa1U<}vUOTd|}UIl zb6t3Ns>@Sj0kk<9V0-Y0jj4$H|G{in3PWViwBGo;nRi~svQa#7N1jDZ*j)8;);o$D zV8C4btY_@j(tZdz#eN$(tZmROWdavzh=47^P?8fC?CnL42XgWrID>G_X~1iwBNNm;7dlaj8vO!?ou+!*PRdeLY2XZBUB|Nvu1*ZyKvr`9Mt68ocYA%fV+_ctbvi4auWa{qlFawEQv z1l6Qc6y_LjMxov{KoJ%rceFz8RBv_>>?7Dh7$?7Z za`*2>(VFdv?i=w&#<&-E;S4LmB$fXanXovmlta_%>9FDM@07Tt4^~U$H?nLw3eujCEX$O=k z)qz(uBKeCD_u|h_IBSxGy}&lS7BDfKME}0^h_I#dKzWskY*FKl)^kMKpeqWF#Usn{ zh7!1Vr0NedISdu{z(Vz?#LtaRP+LpH?=L_u$b@hwNOZ?EMsw1N+)>!q8V-s7Jxe*j zzQ`Xwa_d-DLhF=q?2|f79x9Q^4BYUxFuf^q*UzY~=v1u$itY2XlZpSBVH9`Wgo5^D zNfdT5<>Eb$4Yj#W`cF~r?V>QXqG3MdN(^lNp4H?st*vt>5^PN68Qp~#DFPgqVR-;@)NP z(l!=p3(%ByqOD#;%p~-Gk%7n}uMv%vhLij=>D+us6_-_FS>FIA$9m ziN{d!$BwNZmzhX2#E(t`6W1O7qz5DE$|IEy-HA{B?c!WGry#j7f-x$NR=w6sIMyj_ z)*bZhWX@`5a9?4}QK#0kiQ6?03IdzGZTwSTnd}uszDkJz6y6|M>udD~5KU5*H!6hs zc0LfUd*L!w+<%g2Ml{B7{tBEHp{q>J2`MWmPke_apoCzBy1ddC^TRrWgXKodayau1 zR%lmS%oIS_SH7X+Z&8j=lu*b{D)GSY^DikBP#_B>J1_FOPShf5-I7J4o}v>OS>~_^ z_p9KGNzsV?x}cfE5j8UbdXA}U{wcdjDV_jk*{ zl_+OhnyC|vTlv$61Ef3oYreWepDBgiaQpC2m0GW^rE-xynEC*Th~X*2B{94TvwF;X z1+;74{zfL4FA)syFW8iR`;|>>L#0|Ul36=ZC*V^FeG;A&*4>ksiGA8yDaNL+?O#iE zjD|9*1eIsrm7!GZ0BEr>A2 zx~;CrW?>p4G;?{#(@oj|Jf2Y8m2lhX74}fs7%Mq_QQAR%guuvipuUpho0=H&$;7Rq zmKNfpdu(qK9vXnT!ffLFxBhabge=mYaWF@;|6Y#&1`iNky*#>bjxA$3XR+6Uq7oJA zGAZL@r@_>?>&4kGy^(R(ch@Y#xHAumdqD%LYo{mE*ww!W4cBzkNV(H`uv73ITh3On z#GdVIjOF)i})TZP;dKQpcPU5r*Zn$egxR4I*70ps@=3Lgdj8lnUt4bNn?Xc_pxg`ju z^cE|OL)ogJ``31i)~6kJzj{*{Q10{PKU<}=FB|OrGIs(bs@zkOiI;aUhi6wcv~O9ytxQEsxB%R&H^mE+ za~?k+@l|>oU(k}%z2s?)+g`&;`pznQk0B{Lw20!XgWAT|GAN_a-b+}xrq9b<4mXyp z)@m~VOW=CjKI_~EZP#oxKr#!%v!jjnl=G)kXtJS%-()2J!4PE7T+xpXb`+O=a6bRz zmW0c7H~P+%Uhs=3bhNhDxn-B@MIm?|h<_F1Eth=qBdhB3{{-2ji3x89S|zx~d!`MR z7lMK`K7BNL|FU6_udT8AfbzD$(^TU86{lwVKUE9*JS-g0r>pmy_qRmnsZTUs4EX&R zJnSi#8t^g0kmv>pbn_$$RNyF&>ZPXi`bLVPq$N<_V_@(`W;ptq*0}zKY)!TJEpRbP zd}T^46a_TJM*O0}Gix(cId(lkVNk3cyUt3a#-CI}wj3#kj5S-zo!MC`mM+P81zI(V z=(>ZD7jX%KrAW#7S*oYGUv!VkCmI+A5-YULl$$ee$&=K~3&|?~`z)IEq zaKT(3s<-v{EL8Gx?@tUshv?2uZf%{oPQW9LF^u?Dc7RG|o;44ft97VP zn%DcI5@!ep&7~cWC(>5?#No&)CsTh7uV4h@$q?i~u>bNR1)0Pc&va5(@N6cc-c=8H z5>`uk(_be-)=6Hh$$P7%A@2=`F%6;~x%ad)SOv^JC{F|SVjNqN0Y?4ja+@Bl0|o2y zOvnuQ@x$!VKPHl7K&XFx1I|fz1MiA2LVq7nB5R0J7&i;PtX(~758L}@M$}UjcT;xj z)GUUKBZ#_-rg_@kN$O$7F%!aFc#hVb6%2qnU;10g9uZLQ4uJtSPtVTiaKxrL$UND5 zdd03$#~fMxC8$17(1X)5>zSMRP*k?=XUdNi13OfPTqGvd40cLleCC<7wwFKtOjV7XdvlJcE_V5B#`a zShG`6+sZ;fon`76qbC!-4mKape5fCtY_Fb7E540g1-&nJ`BQCQIM7UWe6u*)N}3Qf zx7l2f>!x{E06qAY zYN+?ksHIWEc8Zo`a#WfUn5KBrPu30HGyGJM0 znzCijo=-nUgN+=^UDGcZWAW$X55jhAeth3Me-nLP4%~HF^pC}Xd3HWXr%NmmATD{c13aZkFo<%h@ceEtp zM9B?)We>v`zEYE_!}4;iv&?iFvCet}AsP2b97@TLN5?YHkz(GxKV7|&5-Qgh2onE| zh-XDu%;TyPUK!kKt zdWPa$Ust5m+`zD!yt(Gyfl#9Cs>xqW3Ry;lfOVx$JP;yc`ZdSP{I9M~JA%^ar<8Z@ z_s|<+BuK=Nca@O*dVEpF?W1p|BTAHrnY9fLm?y1IA?ZZzSFL1@4H4t|5zGvDU5`9m z6gh!_c)kCu5F(a?rw@-;tFU}JjoG_i9l?r~H8+Bc!>tCHZI0fh-knQ9U4}Wi@E$`W z+BT>|Iik1@f;-~*1aS*{IrHI96$plyF=`AGSfBw_*X?ruyOkN(A*0ffRWxoJQ8hZ= zEIp0LFe}3%mQ#`;TCp9or?ChtTGU+<-TT4J`o!R_liE^Ov}fw)ka*y$cpXWF4V6|{ z53kz@PCiSGa0xIRg;BV#%B{UHB4FLkmA1Be;E z93zN6Uwb#|8Q4mdZ-~H^_do#U zUR|k3gc%JSqmliTRiC^cc~;023qWan@{@r}=+78>nA+W{;fBpJX{rOjWv6}{Ifib0 zm+`MnmV(W(ahFy9Q7s8BC~a{VNH_oUSKVs;tTVenKmrPQXABWbtIuRyp3O6#pqj4c zcCZF6nHim2IVLeNDV;?sDje4E`Y=EKSPewXvJW!w#ZjJ68IYM4$_t{<8AE=_9vo9) zHC(FrxpHro!uu|}6wMc9jl~+O>GZj}XL|Z8UcgEmPn(QZ9Xg^i7wrA;`~R z9DT!qFNT*p{m!+9o(_Vdgdy1tgzn?V@D9_vWS0*)?+d!oB;JdakizFbJC43d^i}Qw zYx!T&Zn3Cv>rb=t^FJ_!EPYQ?q@|goub~ySDh!;p^pGgt-Wn)8Q*OAq3O4G7sg*3} zduS5v7un9@isAIXzuc=IJv?W~A7Y1l^I`X*R+9hryalXALdHpAwpMhf2ld0e@&?Ui zD~I-tViKUR%4Uw%b4ILtT#`)Mz8Bn|7dAQq!sHU^LIeeBosgmvzc!6aYaEu{-j$F} z1r!2E`#e~vfg z_N~tw(cVQ0@c{pk6rv??{|P~Ss$lT+^FnMZ0qE3dD?L|smX~Lg>dlt41EYz|WlZa2qV;bpaqpz(@!icjX$()5kyU^*PQkmzZN-^@z?>!;LEaKm80gAfKT zb0DMNcta8;5w2Xj*Iv%0dls?+prZl=#D*Q!;}Nqij#5t_XS0VPoe#mhRQ5W3bc;JP z$-7Fa%-#~A-`rHE@yH0uawXjJJxUUBKl->IuKS>l{JBkLGIivp6iMk1jl~SjKugR@p z1+sIo15He1F1Lp#rl%2URvc#vMA9}T=+Eg;-PYp^k4Bx;{fE8VRip=wP2fCCf%rB$l5We}TWH5(Vd+t2@f?!#b#F#4R%CwNKF1@?)&rMbY|aa~97?;PD< zz%wq9D}n=vVPo|WSN3;O=WW?LBq#|xHGb@(p5_3DqW z#OfwG%UJn-XBa^#lxC;F5{1qTCZCdYrAjY2jnRfB_epH<0J`}{4Qrp%<|Zo5i3}2_ zmfBEwhZST$<_5PWQbVRW8RPAC)D{2OrZw8Ew#H1#r#cuH0AfFiF2oxLi*J2y!YlUO zB;*B)fUN}WMl9ghBnSq{Q$B`dPoCvc3%%&|SQ$j^sRNHew1)fSH-uEVK*}g8%sgTA zNKY|`rLPZQ@Ws&Adpw>BJJU*;>n!DmPHhdkol*P*sfm3j*}liYo(Rw&Y;$3`rdZEF z7)Xtj<#|)SxdN3>YEF=)o@s7)hbA4i3=1FF56FKJKHeWYW!)7)p`PmKO#;^T4>RZF zwE1HJvtTCUy5?mRBWXSEVJ|S01}o++AHj0BJKDq^hjK|`N5GcBY9Nr~!5WS4Of2h4 zC>)R;ZY*d_e|Ge7W4@PaJmh=%hz7ahsjoFcgB41g(B(Zbls;$v`R?`a`7KRxgIrky zf82)pqc>P_$MMs{dOw~U4_6b&3Nc?wutcv9D>#-3u>LQIxzs`EB8q$SE{#SU^dm{D zqa-R?6t&*USICkmYpO1}Z5YVvB8JNbX<&%3b~%0ZdcC8!2+-a+mKzx+89YKs>3Jk3 z?r^a{z9yuyMjJ<=8xFEJARi++U^x*LGn@6Q7c}pO``t*EU;gK{LCHwN1j+g&v?Sqw ztBIGRScbFwcD+r<2U(-1AVXAaRxVnRxwF5Pg9Deh!4u{B0gE7s(3B}FLl%e@>gi#i(BODQ53mUGfZmj++|wtxvP6P$E&i^eM0lb+Go~?gRraCgK+!P$%fJ4Y1FFB9T;q;i|IHGiUNU4R6Oy z65Ju!M=;}JG_dV}by>D%#-tKG@ciip$#)t1vu9x(u4Q0s*5O8@Nq_J9XllJ)j0fmj zrY8@-t8A4`LWr6lR+*JuEuu!ng2$wOSQFs?XAJUTH_baf$u13C`f>x!Wq zm5_e$4K3>(l7^u9+zakc_M1*+T7oJ>`)xH;$u~=Vw%|dwEO1)P>el1pgr&8t;}_Im7C+2QK&|q?R{s2W zO+2D+Lw;CSu8u6ew=yURX?AIn}DWSd;X7$}bfdRB$qKI=Jjqo9bzkQ`Or2FJ|+Yz zEg>a?w>8o3W>I5RbyQYdCa+`-fX-0cJtqE1T4{jhTZvzxFH@UM=EH+FFgnwFSVY;k zQ!-rGfo?p=E4c~fx*^J_8DbY6sU-Z_HOj>7fI*+EfoGo;yuEAL35yWOTWY~*Fz38vLE>}Bc*_k1C@POFD9I)q;p(D+wW!LGH(#2>k<8C|; z^nrp6ulB9T6c~OKV_s_*riRpGlUSepS}5tW!SK>&6|q8lmB@p3fvX~jY5gSh7nh;T>WJxGi4ZlXrK)_{o<*CrM0V9;NxJ+L?740 zTUt(_q?K8+?I8wUE#F9&8Be^Y_|t+SZ;>Yx*TT3x*RX3z>-0I&TV!RH%1Zo6<~Xf$ zFUwK2?}F4Ud+>KXbAU2QcrisKJTg%fd)+cqhb2#DsQq;7+; zI~}MIjFK0O`d*zL42M`R#W~wkcHNK!R}FLj8|r`?el%pa1XD$2e!4gRmkGK?)OWF$ z1$Dae^YI{!>wiP@-iihFnbXR*_~=!6e^$1&O2Tc4y5XVwO<<+VJz2atzDGmh9(f;k zWEcMAwgx(?xr^C`STsu`6S3go^J8=+ebBot!1t-lR3IWg$+{mkOYNAqB(`ZVd^)V( zbW%6&o)x7-jN}wT0>EY;+`Ly89T2YZ=aT(^G4qc|3h4>VrHfada9b#$2qAm6?C-sa zf@hw$LRDv}Q+i&v^@Mse4-))TjWKhbaJJzJEbz0jm#E*o_9i0_zgEBU=A)bH6R$Ia zC4k89EE(q$e>?(9*S(qg&4FDJ4$qkh6{U!b|9^_kJD%$QkK&(uFRs1m-fZ{EE-T%e z(M7mM_NKmOMmE{iHAC06XW|N3CEFDhS=W|SvXU8vqP`{7_xJw&-_^st*ZcE&zs@<& zb2I;XQ*LkYv(*Q)4u&4?D86S(A>Ytcc53$BNS|S;_&zPU*pK8e=S$pcBWMwd4BThw zYFrK>sE9pcPfk|1|A8?@x7c}%;a4P;p}8}Pl;^runn<3FbbkEE)PkeMU?CYY31XU1 zOzhaujf@Qe6@^!f60*B+y&^KOH{EL)>%J^xB`el%xJQg_*6Jp0rWvJp=j9J|YUiu^2DUa|RfkI-ky#T~x zcl2sJU!r$f$Vqi42?YGUV=-BCrdO(~PQksn>c)q-r!;pNSSLN* zmOEEoJz|RrXz=OQ!Ctxx?wNj%>-K?hsDIN;uX8T}3G(tO_)|}PJIf^~Yar%)#@Mt4 zpX4qj-l$cN{=ln{bM8vjG$TIr(LsZ%z<`DiNB6@urhyu^e@eIj)` zDH?auT#A7{3C?bDTOTJS0kcqL%PT{ot1e#gUdyx4x_-bXA-GHJ+Vsh`l^geYSx^_= zka$1XtS*xG476fjYe=6H%s}B|qv2wp^rF#L)Xts3tz+HVHO-Fj>6lIuFI`UuV1;SN z`}GN)5yBB`)!H(nOWlc}>#{Wr)g^~2ElZ(3A*&9b@pqkOC}?5GTEP8TywfIj^R?3l zY>MukgBGs5X@(FJl>xKath%G3?}JVqS#2oUru8hO9vIj}J!Dw0ut`SacTlOINDSBs z8$aAsC;!IX|qoJgixzbrL*zJ}93GCA$Z|9?`<7PlQ-eje4()U2g{DaZ}J zsF0BYi<)W#(u>l*f2AloQu)ekCm)ZpMGb>%2}EdCva@p^H8A{MC+K=eqA^BGgDHBL z*1UxX`c~`4oD*=UL=6Rp;VCCPE_h6Gg-dU~(%Yd(o+)5uC4!37{e-)3SlkBdv za+nzcW&=TP=c-|JancPX-S2fpsbn9fRr^3LKPX^G^v#rf8xpfUXfG=?`dKUoMrHjE z@>o!v6zO@_HGf{_KS=UXPM|1{gsdd{-rt|!sjMvnSsyCInX>k+KgQf_4-`yZ)xjYm z(Lw*{5m?mpJ(bycId!xKv54!{!xMkoAq)*Pv)tSfs1acK?d&G zm5;Am=y%#m*B*bb9p_maPg0}d(IBb7aAk=1 z9kPA{R~yJTkwf4@RDjdLVO>$!$A)}e`pCxqoJ7&Es5$5|arBUZ6-xBs`a>=q<1;8a z=)lO65G5MJ2hn>Ue-$_))|On%QlhLdPF=L(TJ?c0y3yK-m!`clCu{UiG1y%QU*}#t z2}v(V)9rnR#|^D+w*Ab*Nfc9*P+O((!@v*`BwP0G16lc9f-`Q2gZJ4-0jaEAjvQ!v zm%x)EA6|!=C#@&Ie)}yp|0%mR?VN(M0{(KFd;CsPw2z}Mf8=t9UTA%cKuyAQeq=s> z1YOcTa!dQiT|0~{d`<*cqc367o}D{(mCWwZ4(UgX{M&*OsYGp6{NVrjPl^>)=0g8s z%rp!kyCi|Oh&>#{RyPJN8>PGs%C-@mqr#HD)a215%r(6?_U)}lY;q(wOw%8-Xo7E5 z)SCL&wo5;9#kYRm_6rzKoQO9FX8K$fdJ?qg7Jy*jc`I(4`XHjy(b15tQ1Q1R zu+jut7ZlbWkILHY=Av9--~PzJ=$tdf$dIWAvXrUzo9_Hs|3`(JR(PeW#B^;Cw@rj^ zn#2`rkv{#jfSY6QmNMCeycdy8t??rnUDe_QrN^e5Dntz<(WF@wc{dv+_Gpdn|NqSH zD~fwnuu0_Evu&;e)f9pjfICETby(=FV?4^Mv6IHhdj_1ZW&w#bvDDZ;VS*;8X#%Yr==}=RJ3nDRh=%pEBwwVe-nDh5E9801{21pou&YpTx zY8&^oZ{(0O3|wW!>C9|HG1-Ge3)KpL_)|_iD8I8nT-|8xhTTeznc=8UgzCJcNdK?+ zpzGB)o+k%>KdWFBlu-z^Bq+Tgon*dddxdD-dEpT=OE?}BC=~>Y2CM{giI>_~1xCu% zB~Rk0EK&>$kN34*GRJM$Xmk`WP8vO;2J<^>FTf81?xlwcHYYaq&t&FJH96` zPY&4*{S7MvKp5Ob*7$R~2&81GdhRs0P&RnTeC{t(9SWJaB+pRFXfY7~7HBnxbt=aE zZpBRwuVxGEU;KV%f6J1W+s{Lm?mAy zsR0nLLcyIZBg!vSVnTgC*xaf+Du1An4%y>V(V_Uhx`>UmUU@*h^34!h7Rc8<9HxoL z#pb&lxsf=74Hl>2O9hD+$&IRkYm^>aV9ubmr#Q^+N?$|FlT;sL9A-Wgl`;v#=F6)@ zI{hDYyBY*6)xwwP5hW)wntS+h^%lwLTYrpj>sdG|a$dYZV|uQ?4q$Xrn_sYMTToO9 zyi21}%^hS}BoaiZBVO{SKJ+(IfDOA5c$T;Wmv0&&8)H~o`Rharuj9r7g;ZaGt82m8 zD9CR9Jg2PqJ-5@OF9t-hd`RwMQc9Mx<)78)`nS4{m+b`mADgm5uc!Cyw!PZ26OCa= zk(~?NM7L|q)4V)F7_D0Pqtd$TL2{M;UPm3}=1yfRY6Tt6o}+F4UB7{}>)P*O6rW&g z-mX`drW$}cP7Lu_0IyjpiD$^B82_s^c=mqU-8c`oD~>mOU>gFvqm2>Gi#q#*-XlIO z>$MWM#RCDVxGyxTUp(K2qC{4maEi|{vHGaao=X8CEYpbnb;`R3{@K$&=h6cam#{@c z9+w+0-%{EcEnvUd%+#>#n#{@0Sdq0EiK+7>v{i{d;R&#i?AVc)D-R; zm3^?QWKVap?xf82kLF!_wYL*X1Dfg{i>$zG8`i&h2}{#R?ZuSZ*zaOv`2rQ^LSsQk zpn67lC2F*%DgpUJ-`xMc{8^46B%H)-S%9In$czYo$Jb=!IAtLOmp;vt#MAkGE3!d#Hn`ZtRSvKuDs}rKC$@yY3EH0=%~3P7^=!$b-nNNt&uum zILJ4y2xrz&S*?i+S8u#mUxj_EH1QO zdOm^m$?Yq1VE4j_N$w!oSzR)vwsbT0Dvqc@RUFMtnzCC=#gZu(4RJygh|1c^WFWj= zH+1XKWvq+7!1T}x>j;5osJ59tT8TqfxwZYS5r-2&O-07&z^HC(8PcrydriQ#y5FRM z(>MJAweyr|Kp{iYB>r3Nra`_g1!MjJ7#3drradek7Fu{7lR=W}>*D$~%@|N-nqEP> zQz{xIpt*Os6+Q3Z3F4Sy3bHbxn!`UHR0*vrHL_+2HG&HXs+INB4+T!M*xB1*bp8#X z4LPK{ooq=KC-JjK*0&A_8#pGChC4-9N%Fl96ph{@M*LklUpKxS`cf%_wg4YWxSC>Wpc6|CF}8m?AO6wMAg#{aiw9>gF0(@|~_Qz+gFPf!|zmZNWLvlq;XvQo45|iTEY4#`z0*vn7~AMwC-pRHC_6wHO;0n{z1~s`QWDCG zTl~I1sQz79R3M$6&glcU&oioSqk0D}hV96;Kl@U3o)o$J3;4masn1x)p__(UT6L!- z<%2d86Zm}9K$+m8?%mWw8}GT-(UpcBr|nN~u><>|qtt^0yu;_~2;?-S&7{px(f}Wl zO95OV?i6SFqm7|e0r)T0>T3$2l&Z2KAa#g46tVf9P~syQ?cB`AMr1(?Jgnb=*$p|j zsga_b%)I6cxgtIp1qZBkNfJ%?5*{LBAnL(YBhMrO{Bc$Ad#8@!DX?9aD123Oqz03^ zU$l1lQ-NGq`G%Q=d(#VLwhg{=W2l0acl2=?3lfPwiz`#E)oQ)Xy0maV8!T0#BMmR) zDkKSmp2KbHMOymW7*Yaamw`pHpvGB~7kaPU=3hx1aa($i`*#o`c<8%agt^2V8nL@G zmuO`kde;Ij*&EM-50Zqqc1DM?-{B4F%91-qIc(4hh(KLgM!k-NOp=1!OgqWZFVsTE z`IWy>nN-&UJY58_&8y`+rr=}9&I%#lRD-6OIna4fltw{WMmwr*5)jX@_VPS*&2Z5N z+2`aXn?ynZtGKoM$XOskS?fuO2{AQ^qGVTIRQJ}s8^Xl3_c29h-<-DlxPyz@^RvPEse5@ya!`zu=sM2WKWaCB&j z!09iMk7t~f6V^v^`kvWaMIoB0Y(ZlALU{~O%-Wlhm65>8Q4DVCIRF56f6buRKV=KO zX0UDNoRMy@61|vNfco#$XDhVBKKPt-uE5`(N$WA4!?NKc_ zfr?+^w!z}*kukT`%v9MbvF9YK)I3@(&^$wu&cSQusjvqqByH7wujef9>ASjC0b*`U zHje9Lw-czqUE8-4IrFFl(xgczXnD|e#}cdf_RHEXW-4T-T=g&Y(6m4#d3iDBv3KgS zOqfchJ)qeO*&1xv?)kuY^8a4c@Dd9@_chkCIqp5p=7$Gpjy>Ct zoU>6(#A!IDlS2rc`OkFgA?ePaB_8AgMx6^zki>OhI8N3KnLb%^X-Jn3Y!r~jkdkR^ zb0=yCf9gJMPO+oaD@{yEO9n1kWz>a2yvpvY%CV@mM=$+(Mc@%oRB2V73?pB!F}Uc@ z3eGs@35f4BQ3I)k+bmDWUP%ZeaF9cFdUZ8KeOxC?dA@^pKr9I-pF10&wn0AAggvN9 zhcaM>*ihzmzRc0GTdzyKwL(6DbC?T(mC>eJo;+Y{+}MhrwSe=bA$ZMj7SX@A)tjx~ z8OBW`@T`&H+Ii~CjM1QyR>16^O&7km+s6IUn{4FVEFX9;!@vE?n8pjnG4x>7S5%ND zkCrpxHJdrl`E7GFYsMqK7XhC4&YeoS{f!X^7fyEA`@60r_`u=a{AwAqf3a3_k_oDZ z%3HI4)u4wTnRy0#{@pAY_GL2Y(@FqE#qMNSzT#f&$-oGs+}!v7r)V*M+EhA93+(x7 zMyDU$fKeF??Cg_P`0=B!To`rf=(J%V9|$L}@HCm}tb2Urpzy;9EgfkQtVb^rlb7(J3-e9edaB1_i@CL4@w3jNGPYhO>p-qxl zlak%XP(7=nRAd7?52YNii_b2O<;HJsTa7Mz9nFW(5unoH3hr8>n633u>1?)=_yME| z1KeNb;SPKynvREVL{}|7%6nfZQVai$b#auCE}9H^k8Ia#;&_M8js>k8c5=f)!3NRW zc0X9u#=KpWGDgb>u7anXklGxEwFalP6Z@9PBPlJ77q>tdoYWM?cqdO72soyR2d=P# zT9f>LODm)(5m~Xe$;ob)>uu!E>6Jh+_Z9nzwF2-jzNf-{Z4v%~&({bi15jP<1w6ez zKYzUmjL&upeyB2XcuS8UD0Q%RVnVOW)8*z_dow%Aro(}rO4(Bx`26r)aW?ghEZ6kx zwF7~r;45ZOAm9@)E$Aj!TfvJl7_AGe@qdZ%E!;PLm}--Q+V2FL6u^Xv#Wg?EM|1u^ z0s|}h@F1G>VzBhM-Lc2#{>v2-JBx7F53sLIO%V#1A=e@hY781zK84BWp4t?J>!Jt0F;)xj_ee!*)TKAJAJNJi?ZD6_=^>~NS z%{ct-9z&AVyq!(7ZZFKuenS#8BlVYx507h1H=|j$+irZGW=K#nVy%crtv+-bYx+}t zhg%Y6`0R!ZT*F7y$omla>oxt#AHN0XhuZFqoX()fQ?kRM&)3E)9;caYZO9}JdGBp) zPeAy8YOQ-k>lg~euJ6O4~veSOxLKVYFu1uqRm z7Gh`O(}d<>?A4-x=TXU~{#`o&GGY3&PfiCv!W!&rnOBFxXc-zqo((U!*F~1(e64>B zz)pT&AV2+bsk#h2`sMxn${{3!m{VhySAp8- zMzr+r<7tE=Tsbr(H?wZn&i}q=sa#xT&}7)`G$N15y>yL%Z1GdDs&4CodQ1uAu+?cI z4h@4&RkhdUJn|uJ>^Ie54nbayEGz3gly=R7T8B5PT|XIb`&%G*mE41r;KCkX8w3+@ zM(iQo8`2;W#@D}*wE{oqxtTj=t>n%$r0GtQEFiAk*ugIi#lB*t)9HMEkEbYLgkqeY zV`JAiD)pFuqcj(>)9n%8)4w~}nrBydrl%3u_JhDRnw^OH1zPR>aYecDjjki5 z^IXP|hf&MNrhvsQR*jRbTPdXj$nl1I0MH`M=xIGI9LlJFm?T%Nqv<)#peQ5<|G7lT zb8Ju^PYo3!|KNu-C0F%iS8^aNeLcD2HTM~TsPZ%MKW-aC_x`=7?c?4T#@q3JsKMx@ z&_xD8h4+AoamPPF<>mor%~O0a!RI|Y3kp2Uf;^R<)IWP8-h_CHeg3r5$Z!n91s-X- zw3Lfz&6M?DuZm!&-r%mtz>OSFnB5)E^}eS*P23)|jYZGDyYl|p4;SEE75-}6vGT+N zD&^Xb7&Jj}wRB(9ZPwiT1Fp!C6q$kb^@MXguO9O+j3rq;v)@Ni?kO>XUI&b%dOsTR zX+oRDK~7ZV@#KXkAQueTeFot{1z2(lx)2CR@MKkdt)%59NM%m$juX_P?Sih{klE{xT-F3OdCaPk zktkW!dXQ^;1IwVU@@Uoq+9>#^D15u@JKuT3?(tyr>yjk&6Z8rwT2gM3rcl$19@Yv7 z2crLvF(C3>JLUXOKTNrhm=)7I$0KEyoaj&v8$`k*w!}?n?D4h_6a=I zJ*ta&+5d#9N3E<@Cvdl}vfR@x?f6ipRSR8fD0tHEeDrmlu@~)xSa8Yao^Q!b5Gn&?5?pLn`u_ zYkn3^Uo9hK?H}6mp+^8!0e;TF6OBz4>Ei!Luuns^GP;!gzeR@LT&fzMW%dS{r6s%I zz_Zzj8(N{^KoMJrOX2%N8R38(UyjlorZ}_*=BF71E~VWWGy49I4)TSVHP>ygd36Nn z5Php|{1jf{0%xZmuY%G5|JF6{vXJk!mrBoZZ3TMr+#5g)gZY!cdX|wJYVJg6rl)+0 zm8a)0X|C1Kkj_CuTcys=}DrX*K@V=X^^oMxqYGroX7SoW;pPMeKWs*L{p7U#;@|G#NI6+a?H#Xj_-zC--E#nHfWI^vP%{5|h;+IR)ROKZSoN3?hJhh4f|;wy6*4ddI&tk zOKA%SB(vyGCajF1A*0_JEEx0INADY8DaMp zK#`}luto{*r5`TbVSo8Q&b!feIaOZ+GZoLeFMn7s@YpXX^Dro!li4nT;*yFCUiPg$ zHokVcKebc+M)Q#ctWBjwtjEv-Ut`!dM7sg*2UO<#;d>OAOZdLoLDPh|W$q%_GhKcJ zywBhgK#rZg;-9#oKo#u)?eC~{>gwH#Coyo&!mvIcX30%E z=GBfFy!waS>sy;3cBXAei0hk;DR;`da5vC{;kJM2zW`ynfTIAWBnaQ zg*~fl@rE+t^YJj7Zx!Ne=0nP!3{@Lnii&zYEYB%ahOqgmaM-= zmY;m;-Nu7=uFnc*{!tDS`m@>z(MC;DKFTm@xQKz_=BA(Du>7tvTnvmUK4X z8_kn)>^yTU4az=8!=#HcipHgbt_)S3P77@zR%EAI5>6hz;3oFCJ_x?X^e;ax4{bEB zocwp2iga|Lb@}X7GHHrD>dpr1n94m~89WB|M+UAGAC2G68BV|6lU@_UlTgJ2_U#HN zN&6A2D3N3$>qGC*jo8E)5`2SrAzT!R+AQ@Pcb*3Q+lUSy*t}bIJL4Hz>sF#&mE2Wq zStvNNaEpuN=KV|8ql!Cda{VYOb2a=_km8&YqFqwypzlUykS?%ZyA(kN2+6<5iGQ0YcJ!eK98x^6en<3Ne90@B(R!~1NBBy z_q6&Gj)ydcnQ<2yBBMtXly^0r;+@|^9Wh^i@AR14tzn_-f=p%&=>L;{D8Df#zfRvX zuG9Xup0e?1oU&8fkVRcDftvokbDbx9ZcRRZC-xQA)`&!A6m^3=R6m(!)Et^YA6TI4!0*G@Nm)I2%Jj71 z)y@1x0nalBTT-NLg23x(S%(_+?E7X^s+M>-!hu96g;2QGWv(WKacFGLe(Hz8g6&)@+6!mgQHWH-K^;vIsE44rxyEg73 z9P`?t@0u$*Eoy4{BXGU1Ub~~*EuUEW+Q(t??$<-k7z>6ItY3>}WL_tRGKD1C6So^gXY_R|f><2WM}6pQX|d8x*YjJ?XY%wA^6r{MonGMi(+TpwnwPcXnH&XccUYzZH zBZmpw(>D5B4B9_shwG;WWX&P_PkG44nqO8%Phb8pOn9!OJqu$Ty(<2QMlN*MD0_{f zpfL+F8j~z*AZi_%YUD%bmlH&?%v5%BzUA9KBh_?6j31bpgFOI}r~%!~z_7$IXdpfcJIKM;cPi&E9uju5rz3t)nI#g?s)q?0o~zYS7tLU~ zVaZb*<)9Ti`15phw%A?qY}?{0iK51rWl?|Ipif@r%MWMDPwUTj`z>|x>a?4oW3SVx zOlLce_@FH1tP4(_5!Zy&LYfr14(K~Fu+6#py$w$hf^bQAM2mATj>)pT=s$=964%yE zlk!yMBgZc+*Pkn*NM-~(?18zJ#m<}dkCe!}tY-gRVQa;RfUYwAzKaXwNbT|zANWK| zLV35ex32nZFa7I~vryhq+`r(4gU-_GX$p||n(5~}fbTTTxG(kTw~Ga%x#FAv3cccb zXlZh?5fOmersF_k?bk;`$X^6huUs*Ib04Rp)2dGAYeVEAl|_5vYQt@s(9^=)OA9rI zi9+K(hl>Ulu5)w zxw>1YoNC)ulBABePtquxl5qUX4EcYK8t(Ym%NYt8f#Bng60h5n(Q`q@AxVi{5h_3| z@z2Z{<?g9heTDqLXGxy#k_7u{I;*#R<*J;LYfTWNCnHt}VH ztosQpz_$Mrr2zZ%>m)%19m*wIYQfOF3k44AhFBQu;gO}K2}ny&Er7|UFHNu87>XKM zA0!z5<1!5wNp7;<6!y|rtNqk1Kg&dAxLn-{Sw3-36Fj3vTF@nze;tGj1{xA>L%Huu zEVie~Pre>nz~wXUrml&=CNJR^qUBD=lSxD{YGd7rNR3#YNx6X=t@I#Zgz7gXBdAJ`&*txWC+XRm5Rkv%+TTYpuvkxV3BoJ8Ja% zJeOaHKR3Sz{?i1cCdvtT?}jmV3s^bdKwo!c;NE1^JL0greYK`Lb3ZqW+rc!$MKpMa zJFN_jLp%1IMSCq68jfi{L|1b34YqITkN5LU(1>F&VG{l zxWC$L+2Yb~u|kB154>fhYc6>nT1m_W&W}fIb#|-Fo50`Zs}1-n#-iKj&WxSBH~c35 z{kUz^n(cal`M)=6;i0QjW#@Q8b+9}B?oJJ`KyD|-~?gNP2t)> z;dZ9pn^s|JAw0w^nvd#Z>M?|f%*{dwf{Bl_K7Z}p`(ncBM@%p_`EKV@0cJB6{u|Xr&t8#d|52G_4auf`ot=gx z(<7Fj%X2*|dyo>JkV4m(lAp6c?1u4EQ$End|E(S~lo9dhV}~ZeyD`{tu6am#}6XPie*D_9Z8>0-(I!;e%J6$*708kJyFmQAt|CQ`bt{@308 zXIF>Ecz6HEz$=2DJQTXd4(Bqa;NHBli1!F>9y#s+EusnvMw{Go7FCHR@>Q~jUzSp$ zsmzbAV+5wP@O`59+x8UKB780W?%5(J!)lPJd5I;|yKgBg*V2Rwg;qS$p=87f6?G$B zP7cgP;rz)^BYvpKS*%q-hS_FhyV0Pxk!5 z57TNvpY)Q$Vc6z)Y#+2o-$5Crt~c)|5_lYz{$vbHs@NtJZf9N7Jy=wnpE@xvv_%F> zH;e&GI5wCe6qIB)umu!;-iea!(ubr+n_DVf5%D|nmKCWk7;e^Dy688UO)FDz9$ch5 zqPj4={HcEcNCa&M5B{4xfy07X0PM ziPJz5&3IyhXNqJGMIO|pS~ ztcD!k6%G6U2kDu5*)T`pkR`wf)AMv4Zm=A+YoodH*;>P#@PH(=o{aK@_zLzX{N_=k z8wNT;FmSs0Ig{QTS4o)M>+j_6G}>$u-?fUyJcVqoxz6N~Y0L}!Jilg8h1Uc*!3lul1E=oP*zzjxUL_L`Th@#%o$Igh~i*R9cq)SWO*U7#K3J?g8nzLiXn#9fntk zic@uG(f*GoXiR{qm$4+|t5aa%R3Hp@)Pu(~ZCeqrgdz z`@ol@Bb4?1+gX){`-Ryypko%yUJO>aLzqjg#$jo>akJK}IVyGdfV?N&L}i~F&PzKx zj|ZKfinCn(rM3^U$H$cC`%wy9U#4Mk0#52yrdBe-k!P={1rOe-7Z=lr0b7yt#8b8o zdU6k+tJ|Z&TF9t6*~3d&`D)hnNgtS(p+r9;+>B*P$j~pKB6(9@Y;tb$b1)F<4^ba3 zk=D;4MKT2YB)Zbz{1WjSw=QfE1h&#RJ}<1XKO6|(zi4?s%he5yc&_);0)D6K?{@Ap zrZ@B|ldi%|dT#lvWs;ofsP_^I5joL0^Uz>tCE7botOtmvR+$_vg8G*RQr-`5+m;>h zuE1xMj^~7L@`Adk9YS8;zyqOD2ua^+evqe#ACJ~e0x-K; zSVzTtMEHM@O^*!d!Tw{*Srgh(C7g1zmp%E%uW2Tm!qDhjDqKpXdtyWK_LmHKh6dxO z#II|DT5P_HIgjl;4}nQSw* z56Wdpp)#<{)cyPUYe!NmVmH4X|7MHzO_M)Zi9`Z9uR24J|LSs;DD_)a{HhllgGD#u zHD%8SKG^fBxx|t3y3>q+63R5S6*nvxSh>B&dEq4FjyGWZ5&eG3Hp zl`Avj^&y91I+jGuDWy7%D;M%&U=xr=rg(S2pH0=Wp8{slWdKfw# zdy%-p4T{~LRp|s=PFFsJH`U+>|bG4tA&*#C2%W$`23lV(0{2CR< z=>9Mi2YFs4ad+8`;HT6`tNM!Z|F7-C7Ig@~Sh<|;0R10utn;U&?8OQ$45hGtMv1?m z1gjE|)(Pqfc(G*DdNU;&u6LDm%I@P5|FMlqkjjZbt>?dHVJdaK7&4*&L6V(5?PZzU ze)DVKZ?Iu8IlpMs?h-I%2Y6@rTe+G%uVz3$ zbPpx8P^i&IM#y}r^+$KQr^lG-uFF=Z9)WMT!v}2}FTS=>n-ps!94HqPJ^?X12M^0} zT>&yv{v8#je1g~HqUe2+q(k0NDC+CfBRQGO(D>ft_fAk0C&p?qCx$X>9H~*3FI~4rh-$X6}W66+ExA1D*e^h4lx5f}R3szFBruhn-K9qVRZ+`mdt>h*r?2UUgyI%F!4%>CZ92h?er zhHa1=W#GtL=YHILx2SQlz|OXFp5TrfKBTg~d8sBDh7N|^dWOhbSjDibJ^omZqEbK17Yd(-8UC%8$*b%uuEh4IGFe!?>uC z++Zss)V5ta^vYpG7Aq*8a2~W`&5!&C@fIy^RZP8U-hGfm43x5A$dh9M7YYU6Yz=d? z=k`co{?(rk&$&A?MZMrW`VV4-yz?F+xLH7?X^N>AF8Vd&t_Um`fVUJKV}6 z*nGFQU=i3!)SeyqGaVaaYeaD9d4pz;#;;5>v?$nhgsB)b&`8ja2g7VDqD6Q$=ttVT zQhg6Oi}=yUVcx_c`z|kg*@bkHJdNUrla4xy{$9hUX3loM6;sE4o}>>kU#@hog(eA~ z{YJ8R6&EgEPS~oWGE9CE=vb>&D+np*>USOL1?CugjSo{f0$g?I)%v3|3NP%+Y_(w4 z@j!7AH99H5N3V}o5xv0bIjx*=MM>{5ippZ;v0fXe!5NB-$=JGucQ_n2_W)=#Chs&m z&72x>+pB#HEo-GYKs9n6?cu@q^)m47$UeMy@zSY-SUGZT2~^X3F?m^~duo)!9DDjr z|FwNX^vLk&%{JI&f9KHs?7NuKvqm94+3#92_GuI@D%gI=QFbD-CPd%I9K|6yDXtyVSozUKtQq`Ai~T?4+%c8-x#*NZ+Rz3d#QL%85s(cYTKOg#t@Qa>(qs0zwLAEj?`iTk}>3!8cdVA z=jBG%@{?t%=2;Fpt@!BbobUxzhDvlelPs3s8dU+{2HLbH;vKc z@Iv?3fAOU^2Wi~Mx=4!7&apJV4*UVDEOKC!*V}YT6^Wt57 z6aR%(TEsACQaP~iZyy@us-@TP3g_yNEX3Trf)4pcWy@}|fE_nJ)%U(^B;J$AmVajh z8!_bsnGBC;+;s~}ZJvvHwL4y2pr0$%BpeHNFfD^`uZ1Kg50zZ-(3yF9M-_1$5xC?EZN1aRSBNc|mzn&bO#vW1umu4cOV&LSKHy<|e*<>SS}M zXd^mc!C*K3V5O%NVz%%rg`v~MM@B|I%fR34-f7~7^;v0HOB|1S9)*+vq$+Ry{UN>6 zp;7+~QM*W844&dgt#cNzaO&8}TTR;j*K5tb#G7OkV|?}r>?(3ZI$s*nZ0$ejkyZQf zNPCg&$Jh`8YxBdzE!SY&W+6+POx$5F8YCbjKe*H)*>VRJX813^gU*ZtY(%QOfn#Z1 zkj^h_OUs1y+{3t^XgU=h{A&E+%a+V(&KH%iXPYY&)xg4tRq=tf5*wn==VODkd>>^!KhN&5 zp$6m1i%5|_*wVBq*B-=y?dBB#Lri}^YEQeD6#LWkdl0p9qS#=}*2D&xOYZs&xTotc z#%FjZ*Xkl_xO+HW{vHO;pnN9{U(4UWTe1ij*Oni6e zV%w%y4eh;YS))x7rvcDr-hpd$YOHT8>6KJh5`S*{O7eygJ%2^GX zm{OoL<54IX1KZ~9eN$pD&|!x)!$6@IERZB2(>%FT`bX1fG zyi8aha(}q$2DGDm=_4V8#+gFGp22W$XVv4^uqW?CiQ|vS(OQQD9%!_RJz(48dEPs*9#g)~eh|-or7#Ybuilf_e$y3Bgn9gn@EQ|zJD?&a}0Q{P%5h-qG~H! zLiH_UKawXMmx&Y>o@N*|>%|ZGM#S~wCrSZiwJop#JkJ>yF z^d%#`#%QP&!0$x{XxptWCer`5ZAh^NV6YzG&g?U*7HbH7?W^VUk)kYqI|5~dZ`S>X zR8E}b^@s>HUPG(F&UVYWy5?)CniY9D3@HuQiO#}A-^`zv{nYpOy0}DsQxEL@Q;0)c z>%dS~m8deXg%#Z@okncvHghwNOih%JR zb+IrSfBR)cxE1|Y{9gb@J{LWcg4hst^R+bf?qH@O{iT{oW`0B{`b&4(!uxmvvY^;k zcy0ETw!sq31EZyJ5(?%xNfKNCRC>McHABO*Q&xt67{fi$3>Q`XJLM#Jg7uWnblGwq zOlw9Sh-Zbq=x;l+if^O!dIU0ar>Y?x=IB!eyQ9e)nU81gwl}q`aQp{_|A3Pe&`aXI z{X|aH@+WfFs;>5A`fbED=l--{#MzRQnx)H>WT%@uX?EhD#iIU$q5vD@KS=-Hde9T4 z7u6?3qeg2Pa)?x<@5leod%dbT7mR0;QfKxc<@FD<-u3@MV6SfFEblON(k=UWROzd5 z+4alndKzAmn(R08U>r~BU7cm26fs2snz9*OS4SUF#jze znh7rTvB?Hh);36}L*osdB0$OWzQ@-pcjLQ=@~pK#({ZvDZ6SARi@@T2X6%&b@!{|1 znP;!Dzsae%5aC}A=xIvmZ$B*FYjuC!dr;Xy3)A$kAlj`Tr$b%gCYQ|+P>vxvwxrts zco*9)hcKK23pKSAoj}rK6n)|n-(I#yUSf6`_|)!>TZ<#hn9Qqn*xhjn%0dMOecHE6 zpxYt&uq2smyqr0Q<Ch{YUNW_?pyg-5RA0 z8bQ0mcu(MQ8ZovhJkI)ELteu=za>ecoxFXXt#m32v#rSB#aH?Y(c}hMoOUJP!zp@X z-*@32M7BGb$x3#IGRvM(*(+OS z>&(#M&dQ3jXR^u(bs~F(WJk`bD1@Z?{oda{pmFQ{dB0xI@o3zRl{6~vQR@EXgEnObZV-goTe@_?_%kj5@lU!Vum~ZafuE2p#zV3sAr(saK%3N zAT6DjzbJj^XUZ$t5!n&*@^ARVNc4&lSLufU&-Bg$e}&G=h1L0KoT$Y+q$MvePnD>R zLJc}=u?zGI8=V>oWgQBid-(MF4B>fB8C#!l22ic0H?=&8vFS)Im%-7??ZFd2+Iae| zjw5zX588(7%ttvL)ez-g5pOEX41Ej_@PY16&>!x$<&*jl3sbFPHoYIXv=t0Y1ws27 zgFojzT!Q9klqGY*W#_joSDz7p2XJ6=#N(*}dvrGV_MfSxSvL++%3Toh|KtO820V&S zatW?1dXRaXFqKPKd+Q6C(fGbynhZIZtI^)Jwg@`Qi853b068(WtiLlf8!5(@}d%;oJlOyGZSMoCMy$@n= zN={D$>W=C)rcS!sxM*bDT77q8013f7=fY%dY>QMy$oTkfvou^eQ@t6>^!kFERsTSB zy(l}4JCriZtTqlU8%q;=sRM!)PdRpY`WNvYxYG<=)AIYP&c302K2^Kl+A`Y)FDtg7 z{=NYw&quxwLfYNOMmwt?$6bjSwQ3y#tza?76^3DddRPWE)2`=R%!yoftO^we1w4}T z*)FI9aT=j5%gAd|$G7)(_}>5)H+x)s2s5bvXL?o80JH&jcdRYqS3Gn<%-*jUW&pRS z>+Bl2Zf8x?$WG`cf!rYV-#S!7r!6p7mOA@x!BChY@Yf5D@pc*Z#4r=?t5$i=`&2qAX1-%tZM==_W)!gLEk=02$H(;1v~7mJJ_zrg{qKd;S&NsOFPp9yuj=*YLJw^TrA z-Gcto)_LN4qtq8?XEE2l%FzS{mp)&^vYd6Q8#ezT2l2a21u=Y1q)Xz7Ccpu6`bAdZ zO6*%#jB}fKa`L*@nsY)ZY7xr=Qof86UCh0gE*<4`G0RC^PPb<~KFywalSh&hMic!p z@)bv8-(ax8h>i{SaS~r5R1M4vR2`R$^*$)0SIQvM!4V(xrQ53uP=k}rc&|RB1hpim z)6Bx9$?l_jT0jwkYJ)_}2D@7M2`WGgyI8!)hXDRyP##Ivz{uRyNvBCQtUOm-_+fRQ zPXCfT@Eq(Li803OYE-F(sdQDL=%&J@R%;5tgqK_sk|3P1l(`*Ox3lceV}%B(bo#)1 z2a&h#;kK!s$yrSq{IufZK##ak;!f4OS=D#8G$~O(iOb$_6ZFtw@ck6}C(#3{G}a&N z#U_Q@Z?u=B5L54ZCPNj?>Sfvwr{(z?uMY{wNbcN-3zC?51TL+i+du0Y<{X-(l(1=z zMc1n%*E`-Ty>GZ|4u04^ICPS3>zKUmQRB-b8q>B?vh~=H?UgF{tIM_Gc@(K?D(bx;HlavQ^b#d6*2r(Wr_dX^$gXPm0OX>LND#Q4|NG0Ned z7gXHymh7w&^sem+c0Dr3@(w%cRP>Kiu}~c#F`O%#=bR`MU?;FT{;Xj) zf=ft2Lfrs<%AxU?lO=v75pu}@0Oa@ut(!epJ`wxQ;zW&c0?6zt!!Kn&d}ih0uDSW* zI}J)u7dMkghKxSq_qZ;meEPb1jh7%5G9U7Key;1?7T55cX{fu+5$V*U}AaS;PMYZGZOZ4 zv*BPS;$X{pUK!qd>&HSTv7ON6noEyG9+}ZeuF%j6wk>b6iu)y@%BK6-Dp_s+=SEdhRsnf6IXjHxCA6BZT9P@s^-gUK58 zkt>U)2pMz;d<*@I%Qsk*o$eC{e-uNsH}DkKWW>i_Q%@ZGqzaf#PVbtt z2Jvj5c+&fl#JSjiKR~I-y0U^pSOjJ3vZsPmN)2UIU=wc5xPj(#3_W-!xe0H@xB(!E zqievmGCa{#JkXwrA2kJSCWy&~L#j}6N~-N$lp$p!9rzHPx$>X8NPLQ8RvZI&o!};d z9h+^1;)uw#I{8%Fn$)s=EX`?A@qe;p6)ugM8r|3og2|F0iB3FAQ9n)nJ<-kZXOlSq>eIKQ&Q;eLq~D z=&ugY^8s&HID;0Z^0l4$@c)(6GmqANiT#y^C`is*x!D z3`PNJ+8wX&e7!Tno>B5Ul6AjYc>f1w7bSp~D}$4I=l`swC?|H|?$m^kuo@12D$@~6 zK5&U)Gap?a{I?DMyJob8{E4j+-STf)zg}Z_{T^wP(k5O2bfrz$Ze6&6 zD>^7`Z8Rt^MPx2rNp+#XRr3m@8lu5x9iyV!rSapojVkYRk?>10?<4&{JEMQDvY*?_ zd;X6Dd+&x-Z!f1xTPMA?&TSh53ZsF=hn%Wovn=P-gJ6j%2aeM30&OIE{~OQhsPgs4 zcGSC3R@oWfe;g}P`O{~4G|XVWiP?PApcJ=n`wM$ z)MpP$Hx0O+K~0~VE%j$oR>ZPc(01P$d|LKH0z0 z;=+d{n*4Ph42@>cZ|i{{{spcWTm;Za&JZG>$XD^0wW)l~uxLsMiXT-6OIz|jrn5B| zzH?*dLUmQ<_0pi-2K^Z%MZ@;nw5t3TJ)=u*Ihb@hK>cYNtPnZ%e6(kGx+E!%+uaJc^J`QM z^oAy#v)b{k4b-<#Pd0ATXo(ZuDY&d7UIC1v!D8Lm^0kHVKd5#iOjGfCxMV{e(eFeF_gdc{0D3qd z_}&b3dTZ0~{MW~RxX9h5N|((PXw3@ve-0O!sjwllte^ z6KGtEE9TIbn_A*^6jy@49Nzp?dv=~Zm+sTs+%RR0V$>qos2^?u+ZWnkrIi??-RF4M z(*LSDmnh3ej0^5uSCNFH{ur&J)seXJ?km=n_09_EPcyG8)D2#sj!-+SHSg`sP%lnZKIm+uM zWh+cvs%UCkY!nM1{<8|$A^#6L19|2R1%J_nt4Mi&d@nEedVJQA4SZ^ar`987Pp^4w zKFFN8_eo^{?Z$Q^?C?+tX5$b!;FQ)L{DX1V#y>iH4r#gX$GbrW%ym3A&Hrnw$BtED zZ$ba4alL^RLZBJTnoa8_GI(5gb8qSMAC1&6oI6Zf9``%IB` zIP}7@mOTkqAXcfYvzPnDpO#+WCzHz`e%R<~~V97UXHH@4N49(U>Nfm>_jQ zA@sAsW3&E|qg5ng%Icijki=XcCK?Y=Pn~J^7HQGsJ$4g80txMUhI~i;E6q_=) zss1*Dc(|9s{w$iXUT~qc;a`qXd~yCy)bQYq&}^R4dFL~*M6VUa`cUO%Jx9O zTP%KAq9Gx;TV)K0zxl3wYE){aa9&seW$Ov?;h0tsmTg9=WjQb?TV>no#pA3DcIIQ< zH1O2AWV8QlXXtWIox7F97KPs`FXr9=#yMsIqIm9r0jOBve^A+=nKVzo;G=RoyJqfR z@2wM$GnWxqhQLHFE&r7HH(Y14f~RG^t;{hvtP4}mt5Cy8Ot0_?vCIUv{$YislG0bn zs)x(}E1fCm0!4v(Hrd9Dm$r}&)Tn6n< z=N=WR!N3v6>3wg<#iH_!hE3VGe^*hImsTK^cHLgiPEQY-3+bDv`CGp=@K&FI1Uu-O zrQza^&yT_wQ*$a69J@K5#y$wA^DQu=eLUNf?l9cs%Z%XvsZt><`q_|i8~a8U)B*7f zVZ!$HW@Ne%h?l{NW*Dc-g2I(73Fb?5p)LlLSU1~UCgqEX#=I@0I*TH_mO8mjH?&fSbhbGqUY5(1Kn#+7AW$VL(@^X!^%0`J!`rDdMpDhJ+^GJxHs zzxRT}Ad0Ly`i1Gi*5l{A18DG`>5mo^*4%aFV#>Oe<8KY@TL?H7aud4QQg4edEUi$S zz`^(=r0A@o@vfJiUK}|mx)QW00efFyAfFzRt^kQ9ak6xR(qZfQ6ods@*G~1=3lB6O`t`m@zq>u20*2QL{}6CV3;}Z1|nND$bc7 zgVrz8o=hE(nD`QS5`0E#!EZBom52#)c!3$^EnsbeQh*^f#t}Cf-n4h| zXS6m}i=4fD{D{_&_rIP|f`H9p@POpr`a7zd}{u;6>s7@Qq0&2`9(Gjvs&njf7^YRfv~C#tA|_wgD#K1rJlH zDRrs`T+VibSggWbkeR++(T8sQrmt(Nv+8eD7?c4Lf(9}@D18C1B4%7kf;4^?drGe- zwim|FZn2T`&y4T&3YNuyNs*1rY`C6RyWT9krWe>557FYDAb#l&6&{1LN!Gajn`b#> z$i%JZ>Ds8OdQjEVUSVPKK^)}V6E?r^ePE#1^3dFRpCE#>W<)xg(G=&Y9um_l(E?qS zJk6o&jpk=qv*fXiLS@(X)(b4Zo)X1HCUA&izDt(6Hw*Z<6Fbw*HK#~BS*Q{7sN zB2$%a6$xy@9&?7k6b)?AjOBN&f^viHkXMjAph}wCY)FA*6VQR&y*o9_ewEp~ldlb= zXv+gjRQZYufJ^_b`itCYRn7#At9UVqsj<7&S|OO*)bx6Wjm~?AUOtv~tqo2XZTC(@ z0UNjT*vZ?97oNA>StwQ>eZbF=6reT7>xPk3Cu;`!thGE3bzAabiEn^__R6Qoi|3c| z!5-F~B*Ajn+UD9#>m?An*L3`F{3(6@raq;Z^UQV0vpRj^Agu4kS6Ax7o6PYn+L%6z zKtnrq|8oS1;ZL!@$+~z$pRU0Zi*63T7k|6SysiV60I_ajM37!yiZHA)3LRG68NnL2 zxjPcHZafCtpOeda9AfwYOPrNca<s#PWE2~nX2{>Ue#?Q32#FTDsS z{<-rOmZN8MKT@jrv0zDAWJjq|>L+OxJYpV zQ7(=u#;U-A2#rQV+`aY5L!wpcjE0sCI}x? z%Zk{SP<**gDkpHVBBSZ;j{XX%_nYmunS&M#IC0;iz{d7T+R@!xul`y4Ih}uPrh2Rg zmZ!Yzvu%C1It&D)44S$Amh{7z6{cX<*Z#D9pz`dMQTje|wMHbnM)CkF&&O4l zDQ&tsSk0I?1c3smeZ`~Z)B;A+7PzB?@Nz7c8P%>EKlitKEq==R7B4?=*CW5{B^cxs zY#*CGvo<|1k0Hhk4;zm8K-mV5avU!X4#PMn`Dj?l)m<2{lewlN8u`sIqF+i~(8ty} z=&sZ^_F!o2UNU6=mTL}6oQjT7XT5HGhi2sTBFoYJ6v!~5`~$W(Y~SR4eRjkx6Ebtv za-8@vunWADGx(7;hvX3&wtC{_bcH{2PRkXP>K^Ix=>!MzCT?#T*}pB@;)O*ukeJvA zO_$kz2#N1kEJo+JqT?Kw@8&_z)SEv+6nCwtyNY9cN>@%7hMUovz3C8p9c*H}`s}KX zlw!^D^GoI+NWJ}sd~48#xrv2x*W$A6Kn z{|F=#?3R<$swu=%%OZcG+P^dyi3XUA{BZl|^91a%)tyzP#M(7g+9%$WRp7g!(C50w zAXm4|yZ8hAG;W3NXB~pQq@);$<7$3Izn<`_)`0~5``-8@hAdEI51$qZpg!8H>v3(R ztA*1TX}fz;dI6YnC9$5iC)cw7WX4_gFi}Rr;ij^+B8F7l$k+dYd`TAP|_A zGCX?XEu+E(Gc<7@eP7_5pol#!LwLv8${hGs!Q8a+nTPjdOyEEe2G=rHK+M3Nc5Wkd zol);`ow2j~ zhp~MX5midR3KUQc+XraB=O!gKGy^_;X7(}u+M=hn!)D0JTN~C+W`yg_f0|^Lt3l?JVP^&Y#Z7#D0o(U6 z**nxYh<UZsux_^C$XPkac>_LdftepiheMkHmxl_x;HeLi<_Uh)r(+=2yYeZjt_h zh8uLaGr}H(mz>1KM2gV#lVHphyXIyTV0DD1lynhYkNjtAXk}%}y`QbwT6T+U{)9D` z-xTZd=3XrNIg%RoSF=Vu)!_VWSz>d+wl7$$s#5KJ-`vab-4==LyMW<6IoE?+^P06| z|ELNIaCz?y*r-A>MQW$MZvFLPS2wI&FoGWw4O?m-UK+%mF+aLC`{I!^;0V0*|(;!at zj>^>p^mCTztNk|WnUI`=IElo-ZuE(8TEki2HSgZajjO)z?C@^PPUE!Ut#cw4W;Lz4 z{qOW!5)09-1ZZiF(st58-6twZJKBFvXZN5gSMDM+D2IlW^wUvT8mEm;5g9Zs zc18lC@bL9(xTQs5Q|&EPe(xtkoI8wcDC%D?gumL+DxfmU)f4OP044P+TaD9Osm2k z{U>CKz?SyhH&rC>?{S!DMoVlAoxyrGdXHO`*#MlTXS;b^jJd&DrqFU@p9fefQP@8c zkbvn>d>`*ulc1bZVa_kA0>EUI5q{&7d>@>E>oN{Q?i?Ti%;cxF%;Kxb8R$!QxO0mc zO`W2w{J@4lv&A=j7Ija4@=X*J>-=e7m@+0lovlI*og-0$2x>*WIR3^4r*5gm{YfQa zw*KPe)EyF&x$=gyEYt-jvMK*l9eeHrndLhab#8q(&9xoq@_niO4x+`_8B2^VMSiYG z`ZvkL8kd8D?MT4J6fY;pDZi;#tYM^%Y+X6)!{H@JFd z?~ZP6mkwMx*_fJg;V!880Wah{ATfMSIGd>$tT_0aXvXEWkVr)e=!2`IdF&lxBh20q za7j}P`JYQ=x|Egr^$T2q^hLju@vc;w{Y`J*i|R&o5q-=+om?`_4dB;KUH2RHp5%E~ z*GMZ>qo)W8J0lH6p#E?0=htnLT?gzL3p-qJXI}pDORNmP+UnB^OP3^`-N3b7x(=~q z1}|vTcRlcR6}HHEwhePTPNI19i;E{ixs0QTif|pp>GCO3f>n(7?8iY~zW=7peA&-( zRlN9wn_AzWU4b&TgV!S1<@&+1oGs=@?=2U-_LxG|3&mcwz$L|;4%~V)rY|a7B53}J zKj*TUo%=GCi|$Eg0Elhe1(_)+3KHS%GmuocbyIubJO#W?y9I*oOFh9#{?9dr)zqAo z#~4rXIUIk3M#JOBTaHk;j8CAQbzHW&$Hd~w=^9zBFXJ?40&*o@w}lYCzTiBpJrk@; zJN?VWDdquG80XM&LoOI4G=Zt8C*rw-v?eQP0tff;ejNKh8`~D z*Mv@!oOh-4y4mBi;K|wQIreH#RC-PK zcOLYF%G6=lvRZIdIoqFdhxi%=Oad1=8w{S@-of_Zd@^n5pD0{ID~7x+%!G-()ei|a zYgFf9M$J_Bo_IR0j76XKE0{iW;~P^mph6rx{GNGG_Rq*7FS9oa z)aAqP*N;#h@0&Vu(Pr{kPBU^6SM=f=h_zDJ`m+eeO3$Uqt)TEH%tNcP9S(DQR7~RqibW?041LX@yir2)vw&j z2>&Fcd__$MW6sA9UKLmUoM!hh4-chJ+wd;aL$Bjxm`+unkP)QkdQ=df7^+gX|6z@V zQPAKRM37&ja8ht>UzD0h>C}6_b1sC8b^pIJma#it2yNMS?_zFbiP}+?)ZbS(<(Lzw z%JhJcUX!2g%vViAXglEYpVg>ZPb^4{P=YAM^-lCEP7H^tm5y>3IU0E9DSuQ*jO!TR zTGw;EA?BjJ_}zm`5#b~{+U_+g+?!{11tm3lQN(?}Z7kEN;(|C>auwsSv z>V*!{nC2Ii<73?&i}KekP?v^tgH_WJW$U}8u-g--D|-h089)$B^_Uw?<5f|AB3@N1 z2G?nVVCZzMOlU38>DFQMpPCNURI!x6Nl&q~E1WRSR`(r@hh_BJQ%e;V>{IFR4Dxaq zIBZS%3*1(xLMsiCMM`#;q~gJO*09p?&Uw5MGZj9_zT#Yvzzb>-Ppqa zm7VzV_;h+LNQS%AZcyHI;E%f+R%z0Vnxiz{`w5!3h5gaqKm)_xAjXvi0)DKYxF%OG)hPMZtkW8m&M$T{NzFWQdS_37E z9JMR2-Thl&-Yfd^K6*4h^4kVPCCkF#p?)or8RF^1H+i-+2TIw$Y{I%Wm{AgNJTX#s zubS3DtSQRMUJr7F-F+MO;qlA9zEdx&J}UZYr)4gK6EXbz%i+b2_}aA z_GQ73KmD$iy4wQ_%`Ya1Q}3Yb%{s5rljw&47D|r3Mv>Rm0EU*BQ3YOiubUyy3`ooc zY?l~oj$oeFF2!n2_*k&3vX684^-^|mj3Q)2EV^@L1=M+eLj|)y`id2oS9wN*zWCI! zwH_vXmg8Xy`HtllZ=|DtpE@(CsTO;c)}M9w9UP7o0CzRP1)VWo)j90J?<{P@h1$VAW@;Gc#}sOF+g%IBQR|nhJB6QdBGV|(hL!|hVamC}U)5zcFH@(* zhs*Pwk@#e2+-s)$goH8CL#mbxU%8O;XlQ=z&BN;lffSn7Uq=en#8N-8{UsL~xA`Z~$Y`mn6h142Ay(spb9;3JumMGgFSC z1Oam4D$B_fGIk#rsntLi!B98cLD*@z3Y^dMM7v66eKS_2nX;K44Y$~S5zvW~PMek4 z=r^c>tehx`Al_Q@K-P0EhhQOv2CAp$GoA&Q?|EM!d<)7EarElp%&X6o)Q&;%0YL$5 zWF_gTh&Q5(eH%3JYp)B||Q(`53=$j7Gai3_QU ziA{Zr6U7irX zQTub|>jP_#1#Ca~Y&krKv9}6#_pR)iMATcURN75sImFIS_G|l|)?2Qfogsz#wj5=K zNR{KSHv||;=^8Js3JYJj$kn3xo$xFz(AL|eOudtYVAGXrp|sHA-p09Kpln;MRw|?eb}d$6 zsG(Uxl@Z*jVQhQa9=-~D3>!ftjnoE5{~?*^R>U8$r`G)Y>Y6&)Uwb)ZB71aRIoLT| z#A#458M>cN@9z65+n>abx2}!6`o76P*Ex|Ibq6?@T_$XEpWMMy{|RQfN1r}IBZlPD z@!+o_-t`XBc-Gb`*`>$tG>tbEHL8_;{{q{8K%%ct7*Vs^3s-bekGl|HL$9H zOZq4kSio4qiSZqEX7I*UX!>3NSDD1{0DrkY(|{7pg8u49i*1+puY%g^?@Poz1N7WE z$*cs93t#U4%Y=D!iM{GL)!(;vNh=DL`a$5}+sEHeo-nS{d@+T_X-Q!Y66tb0Qu1w|_)0mP9Xnt<^u?)*03(Kwoo_d+A1h zHpiD+1?PRB##6)DE4Pl4haXsOs(q+pR1$Py!yi+`!Ki7$CQlB_P0X)4S2XyH@>Nu= zQNf^|Cw}18zN-F3amq5T1U^+2Yy`rOR6%a>hF$H8z0kn0z$i^pa4`cbslA zs3~Q4Zi0hQNYA@5ZU#FQ7yX}6ul7yy1uE{#?lrk#a|u4-q!6Kr6m z57_J9oaGWl2%SIMJf}>~wdQ&$9W$Ty<~nq40i$rUAC7-oo7OpY-8Vc5Q(>crAcX+~ z!F0Vdx4eU~(`}oZ&l=Kl%xJ$hp^DMWQVOvL{k7A>bOpo$I?ERDbW=>~ z9+Drm(cSy&P3%vuu{za8?guMBj6xNio(1pepd)lSHoi=cmv&~OG75UrcvZgc(YnIH zzbx}Iw`@Cm)Il(k4E%pr9T)`~suWCpQu4WLbp6={LDw!@L;8n`2nwIQ^{eHm_b7Ka zB%w6M3l`vPO-}jnKzFKe>TCR5v?Y#*<35;aByIk^PY>^%dW-nNq95cVmgn@#nHUWO z{tNnPmn5tiVVX6;DimRG70|!9Q;1>M0}>c+Jw|K1U}vRCzn&Fbac)KF=pzFKHVe;R zuyA6%+6o9xo~P{FFP9$SVS5iZ@J`)SEbqjdGCTb$PPzqL%sZo5^s!RT{_*$P2 zl)80!tA&j{m__}&LsuYA!)yXr9_YwwXX*Yq77|Un?!7RP0&Rm2P} z`6evNMhWkKe$L)ZOOLH!8GH-#dP6*;22ETyH}zd@cl>sJMID<;nT4#BVAoY2yyxah zjL4TBv;KLZR4rg1Wx9v~G_NS(_!LHC>Z)5a+GnP5%rPnC^Q0TVNAjeu0 z9m8kbsSw)p((xONiukEh)%|O&^3;tg679?Rq6^y0s85us5}hw6w&&37_*JldoWZa= zO0~n7VjhW47Z(k`$jvIzR~42A;nypgeRKDkzPI4roGcmDvXk{>QWnxpH?XJ8_6k;P znGPx)b?#-)%+M6mZ~n9&K!aICB8llr+>bvqG;n!C!eI%g zic=<3bZIts#&t-1x4*_xNJdKM7BSm(6wsa>;G*Uf^O&UsA+bRQ z^o=^MZqEw=rsqVF&Ruu~B9UWT-^pXMVkTM}i_ig(sK!j3TAGzHUnA zG#S>*){BRVEY9q1Vb6q3<_5@oZc`H0hh%aW0GdVr2lX>f?sE#a^&M}R z(?xj1rFi=cp;tuZ2AYki&o~ZMw`_0L?5Z>-$SPbw+R`9+KqDO@U%?!Xs)FcrJ4X7f zf#2spH&6jYeZM_7lM^ChLAgR@HQiLwsPy`w$_+> zaQ)wfpaW22);Qh$rT(IXZaPZ!LUUH2z+Es0Y0=z;b)hdA*)^FgE-g*La)XyHHyI3d zV(!rS$u;g+evY_qWH2AM=lBQsulYf*Ty00ERJDULa*|l6u0%>09JUE!fYJEyphn+FH#XgWan_3-3bJtq^L*k( z)qsf`wH8Xh<|wx}8Q7;OmClp|PFTER>gjUmdIC$U^4iM;4rbVv+4aWkoiDJx1Q**c z5|GpS{kS3w4mF8q|FmDB*Q8aJY~PPHF*SnHy|u<$+4R0?ye^NzhhDG|sKF1R(}frv z|9%5~UB|*pJ%|yTlvWvVG0TJ$SVeINJ~_rZ+W^{EpG#xBnPqNzuqzT23%%f78T~wp zPKEKkm;T;)S?u;Ii!1vw17Ywk*hx8`GCa$tj-szZo64^7QssMN#S%f^sPfTmC28Tu zKD&D$BxAKpGGx+Gt~X_l3Yx$v95~<{MV%T&6hCrlWt?)bPm^yWd(0?~1LRO4>kEZJ zJqn4ReXyZ+^1%g^Y9aegfQHx@VHc|MBzL2uB|s<(zqW%iC(qN79a)tZox(ZGyzzBC6@ z45=G9e^17B^mI2`ogKFaUrptGG&yjD+@JHC*k4s)G5vX!50h4adf)>EWek%O%z2m0 z{=6>3pNV&uo7XEj>#eD;0@mpq9l?%~5gV!cH}BUNIw;bfr0UTDyOV30CoDRPPc!28 zRIo}$pebP$V*m3-3VIYwQcwJOL`N(g_FR)K#V0I#;AASUB&*9W|IpX4Bz!x!e!jn3 z7#LVxiA<}QLgd8LlSY*}k`tmG_)2G#e{=_jjtO`-=2ZBlBVH-Cdx1Kd82$4i5wm)% z>W_rOiwm*I)>$}>7&S(Ku--kP)WB4oKPF#%CyZd(+e0a0Q5^m*U1XsMm99!zWjFUH z;P%pg)`GL~W=C>Bj@GJtp9Ogf%aW~+&cNg0s7qs9r(cOW#PVtF3^^%06(4@;BcgE|i_-9PWhdfM zf!+o;cIC_qA5Gu{Q_k*Z^Y}7HrCl!&?ow~t1Tr<5GQ=@^uxzbUOO_P;1&kpLrui-G z(J`V%r|v%BceL2a8`_ zd#W~)2!{P9{?oMDj3!zxtk)raPU{5+%_q}^kUQFGLEkk4%QWDQeYU@gx133w>E&tc z4Myr>;mSq@q0|%W*lt7taE`F{U}0}msRb@2^vKa?sVW0sv8*;2`ri#d%ReRIoX0?^ z|BXBgUG~0mBWs0>rnO9H=%XkOGrM+mR;WFwU;I*vDaJ2AGT_*4kO3M;5Ce6ka}lTJ zch*ZUORd>$vFbjBN`mXX~9+~C08BBssBOa6XN-@?#zv%q1>Vl~*5P8_bEVEYm*+PtF zlND-UP6{)ce>dUnXemWSx;)qHLoSRtZN><*xn;&E;2JMgY;jzn%}T)jEKk;anV+&xQlWg3R#2b7)(F3yfV(Jj8uzqP)-y<5FX9qNl%xWzSWJ z9#u|IMZ76jo>mpy=jiQ+R{xuTZro6k>qpA;Av#^g+%=xy>vEj52f!k0;)S+{Yr20M z598ero*c&$yrrmb5#m*XnXuyEGjlx-J%Kw4R6%Ujvp-F>uVhH(>A>J?^GRB_Jzhax zI7}e^W|Oy=c}y{%0LU#u=?92~V=u*))%gp`CB-5!7buhs0WWxS3@`t^GCY53%m1Ke zKUmJ+DL78ljU$1-CTpfFXo~v5{^1>lR`ihuq~`_u^Bb76Lmr%*JIe+CAH@FsO8@0YH16hRti$_Q?o@R6 zaVp(-&JKOmbF)9~|H43+KeGEOA!ILv9CgMm11^@SY=x2^&x4-dxj7d$GdwuRQ+-fr z-;u7jt+0|$55j{CHLydn-?$5tzg^Ue^H2DRTD>BvDx$!_h{f0q$7w||PBdrT_yekH z%nejWAct1W<*H8+vUAVY>Fu)Qe~Y==hS)BaDVK^Dqc`kg3~g=#^~_W# z$B=q&X0;P@cl*}lu=dm3+S}L4ULXJrM!qv<#EyBm8q3q$ZF%FSu5lzB?(f6`X*na_CI|eRX?>AQZe;aFUL(!>7}VWWpiCKJv@tlLCj##n+eO(Qjp}nkI!Row z^jmq>5)U+eq~pOwQ_kFg=ZM7kJ-d^Q4Q+8LEaYSIvr7d{yd}qCAUOrvm8QI&aGy2M zvK5NFMD}*gmQebc>d&52e}4qhzwO^(u+t}KPu*zXG=_yHSYexZx(mVv#sJszj`y=H z&J|N;#VKn`w!O=b>VqLAm`CVo`gErNG|gm7l3U*5ge6&T4R&xHdWJFP>{YQ&?cTaB z{8yKi+~|QH03inHl)}>tuSgLPRqAZnf?nMa0b!EiCw=y z>0Mxa8naxy5vT&94P3}r!lK_zE@fy%~IMwGT@#z9=0goY%mM zZdDkH!$xS8;3S_dv_2M}T|I`qEll>2uI4>GbXEAv4xJb5N1t@X+o8#|KJ;$oEP}J| z=uyzZ@)%D3a!ABA;!h?#|f|7`_kARr57aRnjRR`~_1NN;KrPwu;w zhg0Af-54>=6JwhFay1GYV zuJxudmx++6L9kkDWtE{m%Fu1kK(3t->e!Lk;#&OG0J+mAo(CmwMw$;b>M=>-542uj zn5Pdz+O>>GSkt5YJg8JG?)IH@Cz{L@vZkibPYzDc=p>bXfIyTOEp!&3^u4&kF8>^0 zJb*{~={mz){$tJ|RBwTaOPoAo4|518sobwLC^X4C%IDmacGF>I$IR>p(b&C90d%<&MlC2_`~fKTa2&S1O3 z7%QuEv^Jy@o^JLV1; zXMQ+14)|YR<}g)H`l9)TS5%TFIrqlM0Es4v`-{6-(D-1;z+r8;o`jX``fMS4Bqz+y zPhwQ#-N3R!pjMV<=JZ!t6q68nki2j=_A77?_&0y)^QPAxVp9(v0oLljXAe~XtXvAF zDGz-8ysOs~t%+6_Gglx1N_lJBY3>>v+f`R&z5d5(oV2McW>T_$uC(f_QLk&&<>)NG zV&|5&;{ve0qu&Wg|777s#aiyXR!=4qXkMmb&S4x0tU*&|v~k7}^=JD8@5Tw4!LxLUYi)w^|F zV|&Q67+khga>jY#8PVg6u7n_AU*=R(AL#ZK&g4CR^=>!=zw7VuA4WA4el$O*PRHzJ z2`9EY>x++nh&(Uo9r(zfP8n1@yAHbv?^y-)m5aEalCWh&e#O%4Ww4v7w4cZPw&H-b z@jiKYb8P%_*4S5r#v~j@rB_=TZ-#(i2EF5p+8au7EM#WbxBC*XKogyeR+8&+Kk z+8=>b;CzTRC|JJtzT~^sT7~EqLGWufu%sfZmv`sVmn@&wnQBVFj+k0m2->R!yXLE6 zRyBoD#Bop~2W8<{*{;e)Qofr-rc*YkL@4pWiA;N=m+C2g*? zH!lF*dVdWxqo5$CHcn&SC*qoIBTHFT+7cza4&#UOyaX=(8X5y@ZIyS5gU57tL9^e_ z$6kC`#=;C>wUj3l~K!K{=_!JWJdXb2>5Q2va zki2@%t-i*D#r=`XRJ{iQ1*_2+p#SQE0^8}-DYA!I?K*SlYBaFBDK^SChv0+d^ClP0 zyw};ob^(C&*`F@j^96`4lekB(A|CsNeQp152s*!ri$vpBIU{=wjN)g$9<{CG1ll@? zWPW)Fwf+=-h1tDGe0UT-PIJqfDW2WgK(oec(4Z{x3U7NXmVVSZEuQ&=EV$INH+b5U zCY!`0nfN*Qe~|3L&x4x8XVSrZ%nt${_Tn(9yn-8wTuf;@`-VZhspy7m%>4yWp~F4c zyCm>MmsWTxdG#BG^KEt)g+w>{SZq>HuFfRb2Xo~(8W9VcI%+`KU>KGVdap746ih0eUm|5zz}GRnh_ z85JrcRNv>7Am1%_E@SSAn>@D5Ff1eez89R^VnzmQ&5NF^de-GY+4YytJ|MBE!;+ zs;3&Waq$i2#u6mBz@*q)X+ICMFY=QDKD=lb5rZrvxHj*9-WZ{~{+RajSz>w<$O@?` zhjV@`Lr(vv=)B{p`u{k7?!C5~Emy96Gp>=9O&8g+XH@q3B723dnRJbNZQW4Fh_Xji zWRL75D>80m6pgRY_xJhz>v8`&KIe1ZKY^g>LEN-f*;bVg+=dz_qM0p?10UW(^qR-D6<^46Wc&m9^YTO&n_)Oo> za%X8TFlw;Ep@k&z}gz$M07uPryGg#1-im+iAGcAU1#Jb;%h^P^fV;dxP{%@h3cG|I6VX;6BDk~L3R;`g4WhNnJWw8BZwp+ z8D@Z(G3ZruX$!E*J}{c;l*ns!YFo}kCu7%oBmfPi%d;V8>a$H+R`!b z8#(_*`3Z%?Ddi+4UW*|OCcJ$ zecHO~%`ibEKIYv5UhpMrA&8Hj@7b!RhY$6RsT~ob86c6tvC8&=*qwj0E6s8^dCWJ= zVRVvo{QcY4iWJ&W9>?n{=r90`7COJGUhHGuo1!jzIqW+cM?ntyPEui{8f9?EBx<+q zuYYXdFR<#hG<|}ce^_znAGcaR!Ut`61u$!$Q!-1X+pjAhW{P;xzyVSCJ#`lm*P4z9Ety0kNUagE)CI#%z#QGP@BX~JGR!?@yA9#&sa?_aw-Cm2k>cp zBQMAEdHYg_k|>Fjz)Ou`MyiM+13OA-x22xN1x+`3lShmTrdpWBh`sy<%Ur+=MOVl=T#Dl!h35>55#l z1JVo|a-0^7ldF}3lfQu>Ed5$a?0=9rLtUoH^n91lYE~o7O;s>$aiSCQ@NZH}%$0iU zZY9(f#n9E)pq{|A1Nn_Msi-i7bZRhHXez47IJNG}TuB0)ZARde8wdC-Y^43;)fbuVKcD%G% zrkI|{&oD?xj#!s@6hv@;#!GH9@J-kz(Hv(B@clIIjX)BY?1%cQZVYM2c_YHbxfOL) zEQH-9;(IRNJp3^BvN<<9WT00R`2V=tL@kQe^6!q@d_X(TtmpJFQU#A96E}q^{Aa#U z7m3z>%W8nyLD!Jbc+`~v5P=7dQ3HsB80J#W`=Om>`h@CxS$3!%4rmS8;DdfTtde!< zrX2QN7r#%^1XkzOV-=iXRUV1pb$W%w|c~c=M*Jjg?44^Q&cEW5#%Gab%2EbR{yT!@lg21 z&2tbF(`w!9(co=gUi;GO5MnVk;J(^k!G5b$cX!Rj4SR0dWe=!;&RI1Zqb!l~ zV?0<_rb_0#6QOt+?8*bX!;bF_;oTO>@(jkx_4HQmalrUxo^32Cjcq~#~(@< zSKNaR-g)baAid>VI!?WzEdRKmlwVK8nl2Ruy^5rL1wje>J4;71KWjFVOd82qOU@t_ z$7kxfaON;AErBmcx0`hr_K^Ot>MOX(lZH%+uQM5ZT_9s3Jr>3s8V*m);lo=|@j1Nn zgXp+Wg9{El!UBAWEzpgJ6idu@mR{@~YeTFs^}-+-bU;`3Bs01NG~NkzU3INvS)K%H zg1o6=)^{ECJg8l2duKNu9&{&~Z@mS=^_U!J?l!24X<#Mp#eUsf(+LMGs>An{>A9Im zIoi6LZPd{cqhN|5(x&DcXMiv}di-QF zV?`dv`J#Zk$1Rya+3^kYvM-QJ^0CKakN%xRJQuwh+G@)$9fc0rv&k_-mUt&zk(2M0 z4mJW~qI`Ry`93t)NaEb%hhxO@80WHGm%m^}!ussCN7jb_plNhixgnJ_XTzK4^gu|w z;h%ER6*|1H0_L7W_kW3Hk-ge(0;)8R(1SaXWfpBwQkgE(xgO3njLzBPV5GFwzH`Ck z1&a2~nnigb1|Arz3Yy1xAA1{ZLT)!|c*PpOxM?f5`;H5o9X7i|f{F9e5 zr>lL(KQL9%2G&?s!moDgN=g8eZin@0H4%rc)J?l zu>?B>BNvC-^E-5(b?t|86r=g;{%PA^bPMH=01R=dmtBA30(al2uGdKwVMZEXt5>)U zs7+G&THet1(StK7-LZVN&^#stKD9=WcVXLlE}}>efAN&Pj2ao||EeESJ^9V$HnLgn zGW_GfE{uLoqTH(3oDWP6Q?3>|1X|Y|qMYa48;PN#DC}c99?;b`aVYKuez7^^s6V)7 zZ>h)dpyx(ESlAnl2^6icJ}OhuNWZug)hecP

    KO3#Y)h+uX2t8;5fH&5=VhEx=4T zIHEc0&PYn^M^Z-Dxl0)i#qt*Y5+w~pd0+W#)J_hinT^Qc_HZnYa)s|Z(_Y0~9Zz}tcsRFt+<%P~Axh6mD3hU_NtuoSzn&sYBCrNIM0>z3!B)&bouD+XGY z)-U;t?3eyDP0ZC8hqf+wk$X3lsP+F3fgVN<@pv!AImi0)3@sQ=4%9@5ngODajS*A- zQObISA*&GMW{>oPk<430P?XiV(jxWb!{LeFW)-#>;t`VX6b)(V32s333eRTo`(~2iIC1+UouZm zep7Mv-9~>eb77+#SkQiXA_8w@sWCPD#rZ(-@mbTIv*J|O2Z2z-$pBt@5Ie|z)ry(X z4ROrSNQk%32)>5}{c3i03t4Pn$*Y>CPBLEhC_v2{Mv^(4P42b!W86?4;d=@S8Epyo zzwbmz3A$DXW_cnhJi5s6K`~@(6Q1Z0dtUzrevL72&bu;GG5uhBCV0j7*fH6%fC5OTchJRg zUp}cBwaQ!V(7ipR);%ExoC45Pdu}C-Y>sHMFS+sh&`v{>41%}~=3m18$nijn=vdzz zW`!B{!&GzjW~RSec!dh>@4W$)$e^O?D$|3QNiF^Y99`giH<%jl6072uqRO2i!&7lj zth0Q+>>874tePtx`yb>@GrjRTJ!Updi`!x#WaVjnJIVjh2~G;5(>H?JwyttG$MO1J zlszu3$tnkUQgf=#tB#)L+9_u~5d{Z@$_!D$V6wX0JrdNOy!=iz@=d6Tqjqu9cK~3Q z6A;0Xk{Jo^5x*;X0WWit{V^ObkgGG+-_-IlXm+O5Bg2B{wBxTJj_VO8Mr8sDt6gl( zQ{=M1!w=ajIbiBSj9tduOOZ>kiY^PqCqsig!U$Rbl+{=K(%k8ZQ^NFH#70};G(+d? zr~-23=ytu~YPLjf!9b--OJ*m_S2bsKt@TH!fQ=w<|9j5AzR?#c%wrGBimLj_t%F+K zQdkI_A38U0P8H6kkSDiDp@6T(xtmy#M%l8Mp`Lg!Ul6>1kub<8uQX;F)Xy~})Ydki z)TE|%^Z{ssZ%3$Dy|NPgS1p9NZM!GAV(6adb-0Eo0iG(0**WhUf(uf18|R8cemr&B z{5-|t{6lK+6{KdD&Trq5{5A(AMt@*ax5lQ@YOo#{4UX_TS+?n|&&s_hMb@q#<0_mS zG-A?j#-X|?Hk%DcvZeH4^pqfTD>0HBn58R_{^){S@E|6O8Em**^li>& zpNIeGwf=4_P^{3No68YW^7#NqTOb|aepgIReBg6(cwn!#0U`Ym_ja-FBoEdtfl45 z1lYkwThYcws(Ax4n_0N8BZ(JbWX?yG(i^RfYpsUv`ZPQaze#h4<%$tx)4Orni)iD- z3UXs$gbW~@d7>74TD&pSeOzwYcPAQDEOr~C9*uS{j51IgVx5@*deo4~lu(0ZUpP8L ze8Y9=iL?%165!slQQvdT!Z`y7 z4n?exg{l8NAiRjnfzFa>%%k|;^U0%DUO}f{VC9P46m03AU|;*PaF_d##W2PkJ~F&1 z!+69*e~?CGsjYAG9K3rA`1nn33zuq4AnGKkYLj8-O7fLU;RX-o9t$Y@QC=9iBYRmv zVeu~mR3tioouv=aU7kzmq`KX_enkezg>Mv=dW<`Q;f^)W_%E3LO(4m1Y(ysJ{*|;~ z;xts@RvrSL49@t}Jm)ySzns@NQY0Nr*NH6+I6CgWc)4gdkqTBpSI4bx3<01~^86Cm zFCdw^k8ItEC8O9%)ZoA1)d{ZSFVY+2rm|FcAbf0iC_>{nhvz-7`?h3lLfdC<a?{A67KoFc&buL6I(0< zH|QMxqGbK+d)Ei4woQBMOC}p9;O(CK_>{yM*y$L)etJ24L@IB^V>CTA`p+bldrSXD zKp(YwR9xa*ahzi%lcxUor)Vgw+2cv}rL9vkU&;07L-$5Qd)@spM>sC8h3Rh2dQMLJ zSl>JSpQ1Ql67bQnch9V0H}~3h&b2>Eg$wYCUr@BH;0}&j+yP_oS(iL<7N$Ft%fey; z$$s!Vz?cwm(blb*MMmYP#`cj>HEQ0_84UY{4DHLX)sjo0Url64Ifa7;3?L2$9Y#i^wuVNHhPS<3hTnDtEgjc_;R)^OSG zctPivwAA^iAxUF_*M_-me%*_wI~%`U=M)h<6Ljfczv4Ljq;FJgW|_jqO2MYvEn|gVnMFZ_5B+a9R5(T9=5;rL-N%m zBw2hw4DMs`K({@ZhC5j+Hu}(4f&&*Ndv3a86`29DH!sds97>9k#K>2z>QT_E5N*;2&*GMvPplgoOxv%>U^kR#Of`P@EX9-)!0d+|-SY?>nGlC9h9Bo%$=S1}(= zkATN^f8J;Rg9;M#RC_2~3=A7?7@iPPS7OHOvAZDgEs1%=R28fvvgNK8B792*a<$K% zMF|K!-2%qe_|GgYOLarBm8etkOustS*Ej^qYCfWr(MtBb-)iPVLP?#CY^JiR5<_Wx z$xGnC%{x?(ZWGcqUoZJpU(%`HjYlc z%lW_y?Vg=zsh5BwBJlS7`Jk7UD17gJfh{5MTakOjW zk|JC^VRXN4BaMS0V)Fqq0$kx;#TGB=s_sTHtP8hq8;9^0R=cZ=JYNxmbF=0uBKKXg zLT0k9`a23)9`#jz1y0k7kN@zfSZ(nIt68lr@Cv1CKUY!$XCMfAad9|+#cLrCsB72jzSbD%^War^WG3UIDKbU5l#Y8$+QXJwezG|Ca+6fwIO8fc=FVq4 zI*m=KXw%LQxBB>GCD*|v!$!I$E+871q%ptGPb=6_y}XM^oQ%HE_-&wlkl;tccl6RWb{4?ObD36F~q^j+o8iLuDv_@oaS0V!N|qLz%Dm{pC@;ewXEw z0oRSpnZr^#5x3isjn8G)|A7rvB+^Glv{H?@(N=+-wsE0z?$spr3#7XYF*9$tm2+7< zR5QHc5>dKi1Iwk>QonCR4pu+ik$o2u&|p8pN3CX$NVvlE1KO5!P3v=bBJ6Lu*B+*@ z&LpkSydw9244cDacRbwRKYi%NzQr2<1siVs4^KF;-Xk!u)J+)NlZ_oGVWafy&i3@U z%FO5riDMbT<42EUC}>x_!p~bx8actsyx-s`vA@C@)OKlGR=;!txwbIknN-ZCKyAKl zZ!Ah$6>}+YI@E8dxa$2KrERKb06TK9x25Cq@_PoTnQwUrJrH6m|5x-+A!GR|{Ni zTfwLcF$eh*oJirM^57dPnRUhin6{P>`P;#Z))WB(NZLV3p(IoLhpxzW{{b3q z_kuD#Ab}qUIi94NqRG7G(Q~9e#wkpjroMIfAvnv}nfwC;J{#8ua&2P%S{J)#3b1MO zs0T>&LlV90yd{dXqO|;3Pc>dPL5*x>SAWVB8l#^-OJ& z!Pd%_jNmVfl3NBEEu8!wAM3z%BZi6%8U1+L8OHu!F@d=7*;`e zbYk!mPywl}n^?&ikE$D9-+eF-0Zvq&ttY?OI#7S9gxWgKJMn!4;lMCRgo(+}VsggA z^LAkYn*1HdjEN9T-Pac(so$0&GX*l=j)tP~z{^VCJRBHF9@$%1hEj9S<32QVwfWi_>UQpzn==#cYn&B{KEj{nFrdW4sEzT?Ao>DY&zTj0ZH7m>F&m4 zO^r5Ivx6a-?wvSiri6~M6~7RxM9MS z)fq6M80w*Ov*4{3xNn6aNC(osQX@TVH0YsfgX1sa={d^=sa!K9X<^;U3;HLZ$=W~> zBaw6y*V9z!g0->hm9%M4PJ(?0rYYn@oeswnaTbHPfOhalHx+4zV4efdg5Bv76|Lj^ z_%sM*C^9o2D83ofx)IYCJ&8z^9Zk@VpR2`6!1zAbf`{gEgfw}+9Jxf>qV+4-KwzEs z4setAzbrLPA_seqi6={tl$XrZfquIyFWpX)dTrI^l| zu~iI#1$~ze_$Q@EO-nTmpViqyX04tGURru{+7uiK+GB^~txmsOhIO^O12tFAFKilR z*rY;CPw*EL6iH^X@qh@s`ixEGiXz||x0h0%du=`zA4qSm7VGQ3C1LVB!l1YEI#lj_ zl~}H=8FVCj`TErY<;g~tX%SJI*DjaV?LEk05QejHVvl7=PUioHHH@>EpJV+|M+VpJO~ez z6qVtrEHZ}vy@aBAbBzM6-z99oEiyhkb_g=IVt5_^JA)I^c7NC{ zOKB`Vb^sB2|+KaZ7Jx%hT!zW^kyl`;{G5fiK>iI+Nn0;Hu z;(y;k3M2tqG~%5N-CIQ;Ln#6h*xe;j;Dej7^_0NGZ%07S?E(~95#sV{Kt@Cw4Vp20tcUpDps)TMm437xcrahkT7mGiN? zTF0-Op>CKBo{0r+|GTR8fyU{>j7ZQ-dp#JNeg7@6fwiR1B37_DnM&+}!!2d-1{*rv zljuFquOeDq=>R>S++Ql&8hqRjT`|iZX)JQ!Nj;29c~-IDB)Hrh0al6(H#o?BJi;$p zG%hX;z2E~E)9(R{jV+WRGf<_TAIQa%=+e+K$e#?|Y!R2+k2EqW+ZvwWYwj)l z5ysz-qx!Yu|F%gbfPU24?|_M_W?FYCg8`T-R`xwwGK6mOe)Qx${+u_fP^YGs7!oW$ zZ;seaZBObK%3%6NYm#YHWNkt3t_&isX#%!ww{Jx)wmIK2i>~LOrxM7p&PuF<^vcDs z5AO><3$J!d>P1$Dqofgk*m0a|8QhXj4Z6=p<0q~?eePX*B{aSbD+^_GgD{|vuO}`y zBpuv1WS?-FZL@6DAWscu?Go0_#ENb&?##Y#A4&a5`@^s((zo&~V4AhgePIF2OQgNU;xO`tU{WVBz zs>#+f$j(rqc_K~h@`$r6hs0n5n4+JBuf5lA#K+Q=r*cd4M47*v4~OG1?eu6yRwr(z zgKpcfjTDI#`&oQt&0S8&(62;Tb!eY6bPN_3;d2AiCxmPL})`= zj2bB9NFV(YzsBR}x(`F@5yfh=5$0D-LT(F5^~CR$=-{X~YmDXVJ7iPT5|YP#c`x%x zI2M$-iRKH44}e|JK^N^H+Ec z&=ys=(r3I;x(>yjAaPV=v2ohpZ_cb!w6^>$vhLJDlFv=XF0csMEKST?D}LZf^s^kP ziwAYxc=31`n|Q(7$>w)bQCMTFoX#%KJO8n*D5`<9O4h9MZgp$$HUKeJNE9x3SXfrN&e8m{Vc9UKHAlns{8wK!R~($$4ZJDgP;Y5yqisN1U_H##|}` zy++e*CYjCOnVrpqthg47Vd8NgRn*)NTJ>EpWS=BoeMH+`}_+tP;>cl`29 z)tq{M;P|neo={Po%)Pcc>kPY;&kKrFwS%7ev;rUrC}~#kpC=-#G*}F6>F=M|JROq2f~TlV0ZK~BSPn_xlc z@Yds*#~;z@ij4H(1aLEYt~16e;Q#L7MHe~xuu~Gk#9R=q@aT!8wtFzjpl@CC>9dFZ z#alK8C2F!W2*BwuT+fTmsNrzlkhh-t_)PnPUbk5eZ2KLg(&zix$s-k^TqP}?rbFC1 zk0~X5kKe-~XZD3d+cj@qxGa3d!Xyb}YqYMG^UZ*gjYB$nc~BAZXf=QOewNjo9fg-D zn#F{R;CUk1#0k#>ALAA+wX2-`9=HJ~mGUZwWT z3~jn*~=4&2D0+!Jd!w%Et491WY1 zu$o>Y0_RI41=UkZLvMR+7qy3GH6fXa(O|M@fucO;l%O|oP%P*3L>D8gE7c}SWey*d z*(B&oMpELzd$IpRz)d+PmF3|*B?rVu6}nZParzsXbrcMh_+9!l#ca9~N6w!Ga)H&k`)hplV#YWGB6LBdsRT@RH-U-+ixjmC6L05_iG zAZz5yl7~F?cXMrs8(Hb4$+qgS54>V9`@H3*#82w6KJkI;&kZ-s5d~R>8;~l~-=EPF z(zP|gkx<4{yae<&0L2DoMJ*D%&($am3J{?j{%M(#g85wjU^C}w2CWlRR+1PHdEFR>196mP! zIK4_E8o8)HA;46uBTLJmDvp`;xo6|yq-0VAWy8z~G^4TmK9gAujhur)I67#cXuUd4uDq)=xI>VpT9A9!%TKul6khiQPc zVwe7FtixG(2rP*OrEZIuXAASiYWTV$o~WYfncv~qP0g`}NIYr+go)1GVP&1)kJtby zp1cui>ZZi3jw_1F>u)ZjNpwW*^Gm(S6Et{cS=)~fNm`48btdKpvVx1r;{ZRbd%4k& zkF}@!Kgc$6i*#EO=Y0QeSp{SkIp%Rfz$Y)5#i>+3$$TflEg-0EpaB-K*M({YIU5>|FWV2* z$xDbKRHVKFN_6~F21G8_0P_uN?x*x8oryA%ZWHUdlJUTJ-_Dn z)Y`;dT4mQ87N!6{8e7D>Ii39cT!(HYnND)sRY2zY9|Nxb)6Dui_Dcd>Dn8hl0(M;A zVp{nqM-}0U#FVzTO@aC#KFxRc>fEl((Bf>#T&3qFG@cMMGEWdnl>H3k4C3-yO38$MC5y$nUtUgl~nvZyVP+dEW07>WPp~e6x`7S*FDa&TKjMEp6Ddg2PQu z?!$*$NPUw@`0`+LRi;4w#WxooM5T&Il?#M9tC~~QDTD`UAxIMv?j^p-tJbeP)WQV^ zykXMk9V%{Ps1_%czFRt)d|RRk?o*m-w}82bJF}4GdnYMm-X@aw`tc?qfq`{z4zIwPxSB}LIA_(t=Razs z!kYdda}F;Y8GlQ*+h$yTsi*-78MtD+e6lk~@H`>sm!)Q*XcBXD^~Lh=2EAKbmk1fN zP}?NNGQf~gSnL=!faQvjjM)p+3;^3u_1l8}yETQY4TBU*(>NRdp5FCJK=A+KeiGZ% zu*etAayRWEXDt_f;F`SR;b>xF#yt+Gc1HWbUtf`CEA z!fX5Epuy`+tS5?(f=9I64#jVl|0rcRZ+8cyM-sUuxw>BAyQ7>j?1biB0=jMnO_R;6 zy#+?No+@*6$?#%Tic^-#FA(3Me_@EV{G`uo+v29;qVnUO2)C{JA3sW|8}0k_M6%gO zDih?13;EYtUpBrhFtGXTJ?atJYYs1XyC`nG;i2z(&OSNm84~Zs%K8J$m!_glY+k;Q zn4nx8=FdmDwLySw(ip->z1gFFV(B~S;^wi~OsU%UhDo{EKV=Gpi(Y@X)Oc}51{U*f z4b&U#H?R-X+)!`j@`s)bDJ^6vpOn7QoQ0f>fC^vDm|x~(THV-hqoKLaY1{ha2DUzm#^63{y+kb5l?)z|R=0ng4QtD~14AFMV>eJ1CnieZ+%U=U|3T$~Hb zhwy9PCLhBLxHVYO=-x4uyufMt14d#^AezDGnfsJ#5;kDT*UE!LiaR!O***m`9 z$IQ7s-|ni>p+RnRD>2H_ET}V5maTH3`eV~s^2d4j7&HvBeqb71uCjp+vK=&mu{(_u zLQAp{R@sLc1*NpqX&bW6I(#9sHr06{UE-xYt?(h#UQwrv$gN9Q zH*G^uRMAUO1z*29jK8iI97n!;eK_Vn$On5#a-t#gxi@?{H9eO?{o2QX`{%YH)t&N? zPQSa6*{4--me2n_OzpiLe~WF(cE%&9Kq;dEV#uMpkntj1gM=E7vd zOWRh&fyi>k|A8M0iDyCzk{F8;smw6p3dOf5lPO&`{EFpf!=eI=N;Tm>=(gm|Apic? zH`_ng8N3cJ(;d0>}E`w7GJ zAM}s1_q3vo&iq_JE=96o@#?!gT;S-S<`3Ab2$`Ikmst+8tY_GY;rY_XSo(=&P;Gh^ zjlT5Ox$ShIHz;tS`S4;(bD%WxxnU2e)@`iw^kb*jHwC^QTZQ{~qjSY-)tt={#?(O! z$>v%rgYdExsom+kf%)&`4gP2-U`@fejVYbj9(IcLk-i|c<;BRdU=*LdhvV3YAZi&b zCz4^)70HN>b6HcYJsNUiFk+vriuI$~(LprHjDt8(j&fJC7Tk%VGN;O)tJkYfGNHlE zf6ZQbgiHxx;81f+AHWNF+2xtr#M43)f!TlLj+HInrHoID!BxluybaAPhAQLOf5bwB zI$MR_@bvk(%IW6TABhEFKjtER8(ac==LkvoDoO$6Wlvk zSvINX>#TNeAAkslg_zq~bEY=c(a`37bAGs#dIGf&5j3u96(Pp~tvMC$YD&s-L zvoLoSoJ-Gumin)%k_2ErF@($!vm_`WWTIUmV=04vK>rrVaE`x_B^T#3$@w5aYB0t< z2#l!JlOVecdCRCIWP`=fB9M0bwbC^c3@^OK-6Cj zAHOB`T*f}Q1GR-mrjIR2ASTJ41@5WZOq1b1H3G$8Pu#r(s3*8Tm-<*>P#Y=zkW+`p z+bla#V!Zin0a>Z$svifqb9PjO)(iR1oU2v#sUe4$t5-9OL&m-<7SKC?4ZgG@xpnsW|Rx zc~EF0Dmk^ksgCsuT%w8qXe+8iu^42muX};$kgy(KGo&BXkV+D&(HD8HDzxZNprIuL z%PxP%rltX{@MYe!jz=;57*#RGK^i>U`=ZdpEdLz-O`~ZEx+T(xp;Y1|aMGVEp^@vC zshu~!rh>7V-;D^t114NQ}(n5uF&vHUG_$61!tI^lrXP<$b zQUH0-?HS&51f@q)-AJwFDtVJ`*T;WgaW6eV_5)A?Ng6j4_$>nVHFfub+454fKd2k+ z8^`37dk`YEgNH-Pux^S1jCvh$Qg0WeBKlf9Z?fj~?(TruHjXwQ-N?=RNq~tXtn$hT zy#c!BJ0KVuQmMT?*J-FW+c(;GXA9Fr%X}Hm3q1y9!$Qj7On&jrSO$jw!b_Ri0q4R;m35vHUEBum*@i zIdk?*o$YHGx;^Pf_jq4|{mr#@cSL-@VUUpWe~`T%1cBkDyTPW}C3T8=c(tuo!>G6u z0RVm2#jbUerF}bG&;b+`bvJOXrG^YLzE7NJY&RN>OyzE{lR!D*;p}iD9^#|tRF$BK zpi0vC{{rA<<->rz7V-})v&A(V_hu7+n*oN5m01|aWE0(QxUu522KL5fk4Vwhc1&u; z+me7Mc(y=~VuvWEd(a)LCvrpgZ^ z0lWQe_>bFlil4Y-ck`*U1&W}iAinKAbN&JenNzG((1xmahJxgEn;zZ{K@~@Tt5)R@z1BF zNtk5(Eon_XG}F%;8`oE#AX?vsea6h-EIw*lr`zIihggEeAMqj@(|+;!*?YeoJ) zL7Nj@cRS~vEe_Q$`x9GLN$Y$`hv&e$2pVDfnW044`doVHHhZq6<2zwBJxBBj+5Y_-p-HOD}p2Py` zM&#-S;cmKPa{it?`=cH^+SYTBhFvtOFoL;@?k7k6V>9LWf#5RGrAUs|D6ZeVVu_8S zkIc>g_Nx?`5XK(qQjEJNOdZWIf8|x??u;VkX5o6c80_nX)1lFOg9;dS}keJ;DAS=NkvE4XzLOag(*JYP=|2;|W6-toh|6lQTO(&Kbk${VPH3Dgse8B-W)=>)rWR~c97$DE zkC|RI>T^Yc^~Se2p~V>G1JoP#h-U(aZ}HvznST2z86{)+VKy{TUJe8T&(;L zx{k-pawtqcljbPhh$h;sJvdM7myuI-XL}X22qYu@oE0r&0rw|FiPL#oqMhmQ@2v=| z0${QMsnmGZQc<7I2JzL#^~Rq7&0Bdlb;w9p{Q3UP@b=W&C|0tcdU(H3|4W;2CvqTu z!)paE<%n;Kvz^)`ChlJI+TruxTuVwIzBQ1T(FBe_lZ3gR@Y8YJDYePO%QeF*g7@v+ zUVFpC+fJkl#nWGVoNF!#ad6$yO%cZU_VF=uj|08T)D11V45>^$_o%>LWtD!FP4OU* z;c!#w?n=n`IZ&q<{8;?qJ$iQ5DC+;Zr6P(e8^sVzYzVuu3+kVn7A%K#OYm&uDzlMi ziLd_)Pd$Nwc^5GD*dy!4%}FoYcLg)A-8Ld}*NtTa^ZO2VuYW70@#qoNOu=a08tvqr@S58ceQey9mu)K^U*vJ;!{oLiFR1(3 z|C*6uZ>jd`DQ-YLD5FUg#yFCf$JjhGx$`XsBv{#TxI7TFsV z>9MFQz~R|)G9&&r6C<`a?sF;5%V2;jf}t<8l!2%tqFq}kaM_>Im@03&LbTKUDz-+|Y(O1Vpo4ZsSZEY2IK#fWnFrpv;aE?fh+a*wsdJ!0WvW}Vb zC{PCdju`Bt)-3++J=zy7vN!A`VLb!NJ$$p!3sIO3iy9yf9?}VZqm+VdANJAvh(Orz zpo#MgDq(|s3GG$v4eP7C)`#{AP_7yl!s$g^0-O`4rnOwW&DUbuSNiP|@$3QUbs&VI zm+PaJ$b47@&r0Y_glnnqfvtjMrCJ-GibRHZf3S}9-1-|Je6$V>=4sO~8=a_7)DdJ* zgm9vXgh|Bvt3d73`8;|nKYX(9U~x)|A_Aa$Eg3%rGnIwY3iZX~vuKz>mq2t^t#V?U zAKw@Mtk)X=t>Wds;?8NG;E7rj7HOfq$r6JgE5vXjH*aEQt^yyoT{zC93d6Bn6V5E8 zK@$?w9yM0g)J=w#(rtLy3JbiBbEUlui#?=>z5psuC01^g3A9_{+@md6fP5!ry|@{| zOM2xi{~R~0yn4avcdEtkAP@(18Q~bSvom}z!WJ!noKW}V^Bg)n@+2BtXWFL&PG&n$9_e7&U+4VvmWC5%1>ZaBPp zA%El?tNDg}@ZbyJqkTx1obL^Nt|5=qTIfw`I!#g)W}wyyVx9rdiVOqPD;8?Vhladj z@Mp`hHO|7W_nyiU-nYA?zxIUHgYNOyGIEMkmb(g6KK^yX9TCIWE1+yBlb8ympWwVu z<E3(J>%5-N=i_;q z^_Mp_Gh zPnl<}S~C4(30H-2=8+#{u5Cr|;Sujl$q9zceH;C%uyckRlpEL*3*?j=iMF)~I{UeA z-VZQxj*XfK_$JsHZk?-3)|m`~Q(Fn8y_;6xL97h&z-Fn~6?nWKOB!pf#UQnq7X z%4-No156UaX6~DErgm}svP?3?6IDER&dPZ|J4T@M*_R@N+SW%^&MLl7syKA|!i6;T z4q<0vZK{yb&ml2Gx>RLOequuDB|=+b`EcHrK%W%sE?;Yb2yWr~4E;9%JbD{nOh0~q zW*ZCd1!Rst9rpkh|8@DP$w};+8s$u9$xuc_>pwmexQfR19|qWt6!^I0qX-YR^3kIMU7v?$;Eh4pu!% z;SNQ+NWpwyNQm)q{FtlVBEtoluifY)ogggT50RYbihL~s(T&X$$}Rh#EZ&^i9Sg5B z>Uk88dFjY%iM&xtjwnEk`DDWUqy_WnZr2YS4rnBXjR6ezp8uLPU7LL0_q&DXg-7`! zP&_ zrWsK`pGA$@IB_~CpmxHwa1oNysve1y`>m7LOz1C+2#Y&$K#thjvH^6tMDFDNN4msR zO40ehQm|axdkS}!f^lq5(Wh<=s^1<@zwVUbQrueKGp>ewuDe!;>tC}|2sXZRXR=)9 zO(KNfz@OAA#erHyE}l=ESC?ML9J=t^G-fZ}d7A)ww9))=z{4UYITozPDECO}$ZDx< zM4&DFyu^?Gf)yCpdl-iccKb(jx-b4IMQJ@qDpB8=ZON55h^i`E*u)Fn6&NW@6wCP) z_&us@_qe<$M6BvX54C_r5SAggPheDZNcn6&WJIrISV3;HnYCUKP53RA3prdrYjqm< z)4`>&Jr>pY&o@E8pv;ahWLW}FA1cdV6R{Niu3Wrv&fR5229|d$S*8m8x$W(de*wyI zzPL6bQ=H7p8)$g|Cb54e%Kf4^Jp`$pw|v$CZXh)P(w(`yjLjc?hP<=1gN{q;m23XX zE9V_Ufn&m})bH@d%y7N`B^Q2x#4?E3<(w{W&0)>GbjLg@x)B!EZACoJKqO%Y1VM2E z>$M#(sRG$ddB9nsQ4ou$dH$uxGsB<$+wxJlTpd;VoBmqUSr-hDSLMZ?u$gUM*av+B z7FzFn*ZU86&GmHf1vGO@a;(}TBJ|^Vg#6XqU*ngC>&V?+kz1e$S)t``<*?0xOhU+B z1iY$Wk#57>gYfT%Hwnw}+`ZM>inOlVu?-69vbdBY6%RVEDzk!qABNh*yaibnA&k}F z)?}6Q0=1Pan@Gp*kR_5UUD1dwr7&*=HXc@xap>vfDG~=08M6fCXC|d8C!ghZN^s6x zzj$8FnUb9^!}*odEc`$ht9lRmT70A1%{S}r0*RH}SDv)HRTyV2{251n{+2V(h7<>- z12VBU__VAfvs)C*0^A;1BGbmGc4WIBWhR7?ewPmpaUS$b$?$y!vX%3^vZFR>xjd_3M#1Ge+>mV-~#Qvy`- z+UC!UCtsVX(t3(-WsG(zrM%sm#7kW_-g0s>Osd#9iL`ssyj|IW+}9sD_vg8$upW>PkfOMHdCQl;{3#9dB4(5x>kubKIM zlqJ=y3E8?!@oRT!VZU0`q{o}ue(TBvu&$=jBRaC`ouqG!%J6O++~wh{`!hMQ9tC1! zPeaKirwb_EwwJ~d_yj}u57GYvQHDN!=<|+B+=gACfGOGdR#R2+mB>3= zm8-ZuUKEfL`1&WklO@FCVl_^Z1dqjEr~ktH*TUR}E2q8Vu=Hquok2sH_|&iqu(`tN zJ5v_%g;Q(8sePctsAtMw!q|okUtxO8KSebyvGi?}6bn@?1Y^KRQ4;X!aJe|GU(1vw zUQUswGJwHE3u5g$ak=33;48ekEq#Qj{3wHObV>8{V-ojptAaT5lpily*h>jU>{l z-4b`SQrwtnMfYfHBvW8gQzYP?+Lat+#P%b|j95Ky!yCnGKzbHcOm>^W2v9l-*X26c z+kw?X=pd1!7IRcFYk$$XFq`7Qse*l8(j7#Shg~*P@X*yrh=YT-0_S!2S*yZG26NZ$4orrgvq4d!RP(fPAS1_GOwDtKh zK|WuxT98=^5s}RcA-xzZSux<@&$|njVmULEK+KlLE9V88RT<6v91e&gG5{h!uTAX< znR5N51-}7bVq+yQrj`FSM^(&n-b#31)Ht;FY;+uF(;-%^c0H#xm7Yke3UhVOz%K<* zt9^**fdO}?KCGFzx8$NJbxYJcAfG!BHmF$p~BSfxu`*y65wWgbi`?o>}0UZ=z{?@#3ur6+}j1BAl>v?JBQt+^txRgAmko;pLzNW)c#3qla z5r(n#`S@8Rgs{b<=C-5gdesqQMF4T5rpbWIVLq$(RGaS!C z7iOZX`&FMJGqjS#ec&2vJ;^7Fqmh*tZ;31)a*d++EfJSrIx1HfxTKS=$Y{U1(tBVC zu_mwyZ2ulLQnBc%8#N15ibuMu!gLC}BTlz;FBJ6J^?}HM z5~#rT8a#|(U^oZS+U4*IW%I9ns+Qw(YfVK2x)^~OcPg#6Jw zKFZg9w~$*Mq}eZ$ns)Vo19~DCRLO=nIrDyJjF#}9H#lF|&jHO#U3UFZn4Nd0qgqwaGFYPx{`S9g?wqsOPt`36cphMWcDb4t9oP>#gwt%i?{=w&g1edU<_I&h$;H!Kh zHXoJ-%nyOTS6}93i?gN0s$#Z)Hfo&h)YR1DL4{`gU}JO3V6ZhbRab)o+V;vwDvlc2 zvf3!=g_Y?t)a;zKQd2k3t3|?<`aE3dvih_DwTE@S!rv$O%L$?kbaQFl&5UtBC}-^7 zzH(mrUcA21{`;jOz#?=)oI{h!7KGe|tdf0NGw(=vY1oU1>1+N3fk~gWd+)wIC2{j! zd_R3mHhSV?r=f!YcRKts-RnU+OBdUh4hsPTj8lamHY&DB!CcJwW;mrD$PwFa2$_ZR zSBogIXzbi$utl49y7G7hl$JnFnU~V~EQYajLA$okY#P|Af=+U58q+ii=$E|@78KEy z!7u^odLzVnIWTuKIbPW39UTq1;&l>2JOf68w9@prY4%+!nhl!4^;5VMOd@rZK;X>1 zh|h1VhI^Ps{{k(Rbd%^u34ZPxh}RtcHX@KpvnHu)9DZ_J_{zprZNh8Q2lFK*H9t6Z zc_B-xd$Rj#b)r;WLeg8w6H>z;q)>o_h9grGl#h;d3^i}K!y;rypsS98&TBAZ!(-KY zqYH|zWTh4mzSDYz#xG*K0CdFK=<6G>;$>9X?FTl&A+DF3iZr;|E#1X^>!Erc51DR{ z=QQ8|GphK3uTE$A(-BrNS=THhO6rg&xPv8U7F*J#RclaIz(BM8)n~eB>8y z5~ipS6XiJMqXW}j1c>4t9Vub!IE733r>X)_bK;2%@CXiAv`=iI7~vzXC@Ao-MR)^y zL@7Wy_iTP#jW<83<@sZ)*x@hFn}n^4BQQN=$8t2zo?Duj45RpB-KTiJ-om|PSr|lQ zk_JTR0lpk)snvB1Z9T7%Z>2UqL-{{Qw)}tMyXvp5<*;>m!S6LwG3MMd zJCurkD3}INW!ms;22y=%Z4FmTZmLVoxNxQB5#L%O{0Hbr`}6%jlEvK;9hV8e_puCL z`nVbR-bdN~eyPf591x2a){s%p%Wv#U$};vuK5qjFaC%m+)sCt}yVSC6ySq|)t-cKd zj|l9Y1q?VYv+L$do>wYR!#cFpC!CiEJ!g;h??geB{5O5FM=owKMBMoAm0StgL`VkK zC8hH|14fUP4CTwdv!;~CsTTEZD260>Gt-i>sJuT4dNmc$c3G^Vybnj|)A2cvnmZf+ z*2&$mk+SsE=27a@=!vFKelXed#gXo{(?&#wYNT<{)tm977W378mHhoOCR?e!%b}KasO0|6B*=R?UNHd8$AO_0x z)Z8zaSV18$9FUV^MoaoghQPwlafYK;=-U2J!**?=MRc-G8EnaZ*5F0X?+u!j)fZY7 zv$RIx+@RMDK;}d3O$^v*WSBQ)&ksHuhGS!RNS{Gqm*ZD2tH4Dg6y4QW>2c>}blQWa zH6@i@gEPkhM%j-(UlOI=IBTK3ThA)ZM>XX3+!w#=4iwN2zQ4Zi5yXGH5ze~R4t9c81( zRi{zcu@ePsRKmo9yVcL?$JE+8w0mywhx7j*1nR}?3)Sp1m@|vKlHRPT-S{OHP;8Q~ zT<=G>mj}ctPe$0&p$FR=3AX>_Le&J`CP~}yWzYmk*ix&)nCe7N%2?`#&e|=6Y)t&w zGg~hgy(Mj!=$PvjpnQK@D7?Gl^C3)a&M0D3CHa|U@KaG~rd8(;0&S~!;rlO#C;EpN zLPk7a=Zl|Q|47@7k4lE7s9v@P#?UCOvV;w6V!)4b9v3my-zL5VH7gFj^zwsy=T_I$U+^tj0eLz~S`j381CyS+}W?MY

    vrKfnVS0)8beY<=k)PN{t!1=+SYo|Y3Jcgz)12G(6KAm z1@mJNpOf!Pjbp+LXInPz`mK+{13YiMkk%L|=ULfBx&&jv2+mPvO2tond$%k{50-=A zKSBaGik7p&cx0c2#;bsA+Sw%q?VK z`#)PY*;Y&lAuswt86@B6t}FFzMA5&4j(TAnN_{$1*s;RetEug+opz4|^i)b`v6gMf z^uN|uX6dpl)V4pc1=Q(npv3**H8gD5XKOm{6aHLs$$=KL>5an27#=vDLt&3bM;YL- znYJ|v@gNL_AWqKoiD|P>=GyZ7;PY7zzIP6C$WYn4AF!JrVqxr+9?>z-7dDO@rf2Dp zKE5~|qfhAwN`^vT_w(L#zx+g|!oj^JvVeAol^5;u8(uOta&FBBbU(9F|2=a@L}FA2 zk`8~MkO6DekjeSnf+~}=pb_K8s7+zAN(>0ar08{<#Us)*CvjheZoeLk0`$5Z_qibv zEN%_$VIwe%kBP}|g#K$zGg#;U7d=}Z)`z?m$C!UMj!efP*O}U*LOXh`e0diFpa(iS zK5#SN`(>Ngi@A8I9{7Hf=;ECx=BPsdY;gmJZ2HNZ+{5W~Fuj(z@o~gCB_92bFMpZ0 zZ1mN;dZWKe@`n~64Vt=PhpdxnF|96K{Q^<|Su)hh{-$uD#zfd%=Ng!iD1-9`_ltip zb4t#^EV2VZqL@~WVr|DKbE11eLrryeW3YyMWdVKe+YDBIwTfNn{1s&(x^?8Ph<d1?>JYFe?B8Xjf14@#Oml_2%Xw_x2n~dDi%_Wk{Yk$31WR#D z)&*Kj16x9(vx-2~MZ4`>t9K;_Lu`|Rf8!x1@+IYdq#sZJnedSs|L*y0G4KBcHw9DM zKm{njTT&98B@2uL2EjFca6UeP(d9Tpvy)L&CNtZ);2284f`Ko&Gi-mFJbWb^TE- z+D89092>g!{q9*J#62=*FTdU83lCfQI5gy?lp{*O#YYG2{!@kJ|Hz71O}S=>ZROcM zt_*k4&D16s_$!f!f6HKHq9$}He~O(2H2_g>O3qT%>7ssn_NgVc-z07ETq`T7DV%xq zRdXaOGO!PpxDf~#Of5GRW&`KV1n;fAmyX32`J5?hj>F%6V|+u2UcD=PS9Ly8a5FT2 zb$%!b<{`Nc+<(r`Q)ed@k>*3>D`>RB0-hUIf3XpU(EMBWQvCr`et$E?2**s=1@;Ul zqus!Cu5Ge$wQ#z!ki3#u-a2rDg)Q>NcwrXjDu)6|ly9usfs@`q19juDt0Qa7kc%RY z%lrXbaj02T==r+6TQ_Oc2R`veVG8l>`z>5e{CgKZZW}W z1YgfYc!gKpl{7$bb!?ReT_WSV^XmBNAR>FM$3e&BBY6=|w_fu|fRo{Cvk}FJQ5&A5 z3mB?LFq7`E<2YnujX=>Tt-L7QghV{}$)ZlDL<6D)0ltm+-PPE9=TmvnuU7KCoRMp#0bYWQx z76!TGC6~~d#gdy!TBj@HREfVd_w^21PaeNBkbLUXT-O85?KXvgx(fNl`+rRNYgDY9 zEjWuMeCs(_XbVuL_FB=!CsmvlpBmQB-%-}J5n2*_iC5pt<1-8z|447^BR@XaGoT&+ zSO?__%BdcR4;jeWkFq)6FaFJGC!GCt$ZzldVs{W&@>WKbICt^jXS8rThB_0u8hdMD zY=rnR%rP}yRlZrRtPPniB?%y(=e%=d2UH7WIp);!r=kB`9;x&x8VFgKH~u#*J3Q1u z2Z2YfOn@!M_Gs&8nU`I{SJU1h=^dkBDY#j!tIV$n1tIcUlk$bk(z}rk`~SXb{Plr- zR<|m)s#l3zd6Av>)3HVQx&ht!w0V{n*@BI*!dHs}zl;>FuPr7RZ^1JO)+Ws#@sckA z_jk!Ot9f_p!mx#TQ&iX5=%oAx#ZAan_?Ie-t5j?5xlel{Hv4KIhYoa|1sd7S_BLo(}J#sY~nqG&lv)t)6K3|EvxP49;q`vta$eJb0mXbb^e9u3}}LbTqo!kA=ITI zl&S7grQH{EP7h!}VWMq#3{o5P1~l|u_IwgHZ7=?xWPl2mb2H_7_N$wch2I~j*cip4 zLznEFkn39<&^JdrvVTz-BDb8MCDNa@HHRVP>C%VD2dEOrAxpYA0q4Kz@cY5%BaSpf zbkK>+@HYK}&7J-;Dfq}#%UnTI3ik4<(J`g$<%|N4FodU%22Ws2DA8UkxcJ2T+mz}| zPp$7ybP^?uq7CXVUwyeDp6;qm#qW-^Gb_LHw&)%MqMaASf#y^%IzN%1MsW5EnN|%x zy{Vz_hbPjVACcb(D+y6>kkZb_^%|`wu*L)Hu6VwlKV4OEV|^NQ#L4gzZGAFX;3_1{ zK0Qu-xBG2PK#ixNWpFF+Sd*UWfOfzWh9^Q=?H+%|sjAhKN(>pd-H^R{qf9hBmD~-9 zd#(dgb-IIEQEqtdLDt1a>_xhjmAeAIey>KRKTWa)Bj85LAb*i-L-QJ)V#KAA(zZL- zuvfkE$FTouW*(U?9T;RxRMg(ZX8`Y%hzY6;xO5uNsX1jY1oH7*EaHF(tRh6N>fYyv zeJWGk5FIuho**46uKaH%*(AI#;45&?*ZP@z#cnnElQi1>g;XYDxf&AwJCgY!Yp{dd z<4&;S6~8+*i3`suOe2(oz#+Hl6?3zmEc!HL9g!!TI&pV43rGKjBB27BbJDa)9ecJW zO@(e+!4FTTOAy-PzqW?vWBM#!11zidAK!dx!Qef)WDAKWX009>1&o|SV$AYjvHc*! z`c;vytmxYC$@7O#)FjHv16|iUSxKyvQ{Y{NfZh|x+HOstt$>~h z@66h=F*=;yxmnq0it~guD*;`G*IvHGI=z;$@WEdhsYh=67x-H0_vlbOsxW16c!=1! z^<*tr1bh@uFd~~J!OM_-3t(I$)$E|G$x7xEG$ ziCT8|ntg?HM3MD*|BD@%@isM+4TqDT=}FWd%ciQ|mp;+^sspzDW)Xw)h}zg{vazmd z{a_)p;lc$EOGH>@AD>u_=~^{I0hjCe4+t{?8R6*k%RA0|JD|3{rI1|6LS-1OG}rO7 zf90NyXsTa~W2UAH(-A~$J28TNI}ACUF0-?IbIgAPfVoI{u?t!jezw6+86qM6A5@S& zCp`n?^je^tLTk^1j_Vjxg_N_DrI-(D9-bp}D8-ttgY?)mL=mIw&0!wYBoVCTVvKJ3 zjzOeY6)f#iV|i-LmB>oDxsh2^LyD2KU$A|E z`Qxx~8SSSu8p%J^Om?i-JPj>CZPezF?1BcwCX3iZ9c)SC4NRpVec2ISzO-_#Oc9yv4LxMHi=IV8|~^+O4y z=AK}3p`RUYd@>^2OK0-uvwsJIaF+wZ?T1;Up)$G6l^SjBgR6p7ns@)(2DB_SMe>gy z78vbZ68!zz^9^&e3jpm|V^8Hqqp?LSKDGSqCuZz9XMv3zfkfKE1Nc1{8r-!{ZB2Uy z8sBL2h2_Pa7BRL$?BZBZa(Q^$^JXavP{j2tiEk*C zbv@&H$6px}aVq5HVjTGA+GXafxmeir3)~vb=pSU*+Uxt-i~)~lMnWZH)0+!MVW@*@ zdb1UGK4S)&&H}nesf`UWQ_vsVF6D_gu{4bDBDX=hTx?;7BVRMm_G*E~3~p&rQH6x% zl!$|Iib?pbe|%;r-E4n?byM2;Ux9?etV`-XO9@doG2P&WgyA8_K_Ca{M)acS;1y(?YSqpLcl zi1^nB?tgLD^Eul7?>ro(ojbS9iUMwJ9_uAN1S$>A6v$m(YN+jaXtYaE25VoDw|@)> z2}vP%n)RtUH(ogNDsKm9M!HIt87_Kdg!+{ITkobT-Iu^X3vdopv90~J;c!#t*8XS` z5FocnuLb9eAsznD@~C;;$U>w@?0y{K_19ZnCOoytE4xfRm2j<7OC*!H(V+SSk-O#H z<@>X{Pw_Ac@ZQi_xCk;dwg;XO?p7-G(_L#c6P#3P0q+!b`C3yn{dA~m@(thFw3wJI zG@$;NtjV841=8mmH(~vkKHE}X2!Z*x2jxYp!XmTSLmFQuJ=Qn2x}uo0{`^;H$JRU; zKCj`%%Fw|=x$4~ylzQcl*s9-7j+}W-Lwy(n|R6J*csO+4k6w;qk zKqq*q$Pc%8HE?Y&@8f*1psRxVThgj+KQAbMIM1fB(R5YIJ;nJ&={0hvwNvQ9G{XUi zX(iF!G^VB=;P~~`(Ed}`_m=18+T~HFoA~qkx`8`PVd}aFN<~zLZh*sz%OwfD4+6cA zBgtSmIT(J>R<^&!ka`GYD=+c?gOaTKsy7uTn%?=`1=#eIpf2OqdzMIct_!Rc4C(2X z%a^WS&m;=L@TxF<6NPk4#H&<-TpTi5&5ie(gK487E-e^1byVSzmc6W7w-qIxTKISE zd96s(p$fp=Kj;4w{z7^HeLSG1{ozfE*FF*4eC1wBC-E_s!H~d?qk6Mdi+=`VF~eFi zt3$fT=n~!|Da|@F1IToWQP&b^SI&EevhnJRg5bL)u?FSNWZT9a7ZN` z@b~d@S40Px#Wpm?n!JXBRL`H29#xqRFJPpEH9p!cm@(f)od@48zd+CuB3FiSCz4rHM?m%a$y z51XP{L2bEzEdRlSe{RqK<)D8iLzOHH`|H$$ho94QtDT(UHsbGkNAD8&EupSOcY={E z*+k@ViF|uw12s{OSPVdovuvx5Ps5IR<}kb^t#!VxGLyWplLTc&ANW1`hiFxgeTZLa z$G3_y>2K)E|FibI-f;n9fVfcMYS(bk$CLHDT%Qx$8|&e3^Ww%4>>A7EAlh05yHTgQ z&hi@ftP3G*}AyQ5zVw?8rIN40Q3^}4eGMd>kZJ>G$p>?;dzU2$H{0XA`1 z4oR2h78b3Qe$nQj9hs8)A?_Fa#6I;oH_V^v5y%h4jwKH&pHg6{d;1* z<%6kQb0f!XX<3Rcvf{LK;I7|^^%H8k%rR$6WJ{bUSz_GY;;D}o?#8hrgTGpDz1+m3 zCt{K7G28!HQdJ97m-uxQI$kv1(t5{e78a&>{}&RFa-mY~I!-~$j?pja)0-8G>ti2= zg^j|g`Yj)Pg((1_<#$CUyUxy=R{OaQlNK#~GgP*_-ePq;Krkb1HohY?eoN{2jzY`d z-tm%6d_Vczqz`0U=wvSuN;GW15(e$zZ!bd;+m3ra4Ws&*>jl%e(KZ(45f*-jhKYxL z`iKK+q2)6dBX5V5UNS_-E$fu;{)$-lT}#S2W!xpe=J4kvvuwMh9{ZqZKOeqQo$sV- zQP5x<7^{XZt(q8hDmS>}i-bmtZiloht9VBCX>dUOb@6V?+ExA0fg>Y~>pk2LmkxR1d^Uyd1K&-xyXPDRlhOupLdu6=Ss{6d zN?Q?p-F3#}4)qSWSvMy-J zWau?N&+8@JgF~O4x#8ujjCeBX;mjDjUbQ7P`|C3@L5*|}-S_#ovP#aZQ`?qV_D~J; z4J-&g&^B|9nz+-d* ziP(49>%bI4Z7X|tu(r{$Z?`y9*4byyWGvtV(=PFx2_hCDUX;)`Iq{k6feMoWhA@Kr z>k`G6_qXmKhyWX>dLe^QiqU4d%JI>rLpv{2r>%hc?9Ve{O=2AXI1hqET29@-GK0{y z{_~!@!Xf`n`rpN%!hukC8P1z)M-dAB;X4u6T`q^>hn-qN2=D1lWhM0KZ8U3j8fRs&E`5A}@L_yM zuex<0=k!6a5|&NMjr#lHR5H}PBCKT|9D-%?AH7V_$IsVUzif7>^v8^WOYaIB>5gbK zWR;)s^u>;-&5IX#;C2%^&>zUxSbgQDjfcO;IiTf$4hV0h=|X0GcUCz!c(T@zr{hqX zis2zd`_-3?DnS{5G*W}WN5NE6LQN|fmwb1YZldg@HWG@?fN>SJ4;v!W-eCp)?tFZ9 z(p(7Xigfhz7EmQ|-YHH;;%CsO#Kxfkp4Lm2DC;N};5>96#<)eDhN^i@Bt;_Q0-Q71B#-LIRJ_z?SYbr6WAkypb5L$I*UU!0ecu(xB#Qn~O2n9rMU0V^!t~&AgtS6|1YDU%eIN@3xQtLdy78?5VcF>g4W5jy6P(t zuZ$#=Wf~GNNIWRV+4ow!{{<})?rO`FXXEAzGPfQ4Y{nq(z{crUr?5MfpHO8$-y=z* zoGN$~?D#k*7B!vv*@dKG^7Cpw!baE2Nsaugc#%eG<4lL(t&KC8jW;rNbeMyaF_0L| zpzm(`_Z9QILS-*&-wj_FHSiTAV1sx(9bv*Q$KQ_2<|MM@i}0wBWQfzl=TQRoK9O5s z=kO%h?~TUH)gZ~b0vfVyaBmv(v-JGNZBJIGw*~HZ=v1apEveA0S+^Zu-bYau7%eL5 zxL9T4$ciBSX-TA+91E?nH!szZQ4;OLEq$#Q$^kUjLCs#?nU%fcS0&#K&sr3mL)QS( zg>(Ibzw|lT8uJT*R2Q!s?6PV;$TTI=jtPLhVMv;?$fZtXW=+Dzknd=B?6=M{DFn-$ zafm4IOLRepNaAYVT_?~rD>nIgu_F%K$ ztG@KKcgTe}Q-yAOddpb}-NGXvg|PT;eSNv2foD?cg5d0}yU))wlX=z|c$pCH!sE!F z1`MyT#j>e4q+BK{v8EZ4VL^H4puqH)JgUAB!qKY{QJmdq6xij&s*%5BBSrK^EpFv1 zu?^}6xW_FgL&gKNt<>m4Vs?2qQXl>!`WF?1t7usD`=ZxDDcdhS5DBKS#vt0jK0~c{ zg=h}Zb$?zeY07A7psc~dJ)#b=9963q&c!a^s@kcIOU&*M{4Gcho{S&WeZw3Dy}nm3 zJ#3hrVAW2DS6$Syjx7D7I^idRbEN6G58mn~OKb9I_jMHF^S7(2L-DNyv&J!t0KS0z zJz@S_$j@@#X(rtdFYOm_T(+0aK(E4a1Be8^gqjg}dQ-FuU$sM)7{5~*8@D#oG>ku_ zW+o@W;QpgiR_jO!Nf#)KYrXKRaTDZlB9U}%L1B19QS+YdxsduD8?omD%6nIje}Rp0 z=1aBW&ndbCT+8VKC*_78o~j1#LZrq#utb$O^TZ(p>aKi9xiSA+4zV_gS3WzDyK83h z33v9Mb2|TRh*O&3uiH-&)2Vq;&v<|77(&)}8m_;r0p^-KrCuMCH=jn;Boh~TSSezv zFf)5&r%8A3ugV5Kcd4s%@To{{bI4?9Dtc)FiUIuUFr74KNm-)@<1H|AMt=EY6ZQUhtuhU*T1xl50qeb4nRY*pWS>m#>7D-OfDrHm2LTP9Xv0(-iicd95fO0vKi8P zTTS7mAeKPua1C1^y*tECpOLh zb^+Vq&bOm)m!}IKqIY1=odQ<`@u1i;TYUy?qjUIx-#)#nT_2#}uH9 z&(F)Jm2W)t?T^Ybd1Qr_8AQ`(3V_4+Uc7e;YaL(qL(RK$k+~Vj+Jj*n-4Wn=y~#o6 zanFg_OSf(o#hc#xb*R-E%-~5w5j-iN)qdKPvR7>}miGp>{i*oy;%8U>{shOLQw>Z~ z@J6M+iB^}YiHnmo-)|L%4M;hZxaDyGU^bL|fZ27$EgDTm-e)H+-s*P5JX~ybDb+qw zg-G$-f5hVr^5R_-gXV!)%ldetYn?Ecewf6)rw)y%$}@+J9J9;S=~&?-glkhhDu{XCxh0<{w3JOP@g?4>@oOO z=#$J>Eixxem2T*re!MB3Wh~oa*P%NIE|51W`rqqSv>tq!xYgUkdO|R!rC?NH{1Xxs zH`(-*o+%zf8s9B3>>lrz3nl$ZqC0=FcvtQ5DTQqy(7VXj(VM_HRa8XG^*f8 zSV?;)1*_TAGKO^?{BvfKp$b>@zB9;dDFwfyDjMsMzwl@Ja+@8RVGAhf3KIR4p+hqB zxb3|be}KgTk8pDK*dgQMBn#E+*#a8&%Leua@SdUk)T*WO?GViVy^xp5>ph}DQ+zA zzXHakXAYTSUxTH~dY6a#?Y{cJ#9esU!rf!~qi(D4bdd-_V2Q{A@q4)c9FZk+yY0&7 z1FfBQvGGWTMp#gqTP}JZGiCrPAv$#5yrY8DrDjWbDYEIUr#1;U zH~SX&e*}a6$!CE=i^5P*={2q^6D-syY|3TSx3pAD&*nD7KZdTk>iR-&MQdw%KLIrLP z7@Kj^S-(}hVB0Iv=hb3tCcMtSH}T?B*@0mzfj)N9!g4)zUttBPG?2&g=_&bV6 zC~!Mk&##H)wZo3fitlD^54e&J2p}$6EF@($hSB~w!+fgfrnva-D-zy;sSg9K(89NV(JJc2GTDwY)l{c&8ENVRj-FPipokJS-cYcgv1Tdck%+p-}!V~LbXNoNTtinE!Yuk6-Ra^Juknft(n z8hcI>F^0SY(BW=w`XD`vZ6FqmxiHbY^pCBBgYs;D{n&G~d|0Vns`C|=f3?zGAfm(v zcF99n%0>BqeSd-rl}}Mx+09?Jb3H8hO*tORgk<%DFFboZfmQC3Df4ol$~AioapI*| zP^w(pH^6$EwiI0&*6#lO;hY_xs|Lw}2Us@fc~Zm~1R`!|Y!+W;R=++w=;DTF!)`-N zG?EgPiM&sF{SYrguymwG*X%mmuV&UGaGs94Y^)UYI5gmwrc48wGkx^4;plQFcbw!q z$4NOHUCb#!POW0>6<(n!^Teay7V0}{a@wuL??N}Gq0MFhXnDjcJb>9uVi~2(cfcYT zeBkd!8mSjaHET<23_X9BOY~dp!#)j?S2yZXIwMtK@TICL7~{*l2MdHPkl3%;hzwJE z>q7Xuu?;GhN^=(mlH`jPP(K8l(!;FGXi9x>LOYbc=*RCMx`yl5${!AUHC5LLUT3@7 zdC}AbIOB5F)r&P1w@07icVK7VE`BT~ zUVXY>sbF^nx}Lmhmbezi$C@-4*AK7Um%#of!*Lh~q>#iCV#LSkH-#AMsOjb**)dVg zb;Kc#nFAVHG+`w#eMZS~=MwKlay+$1w=po^0F$tSSn4h&ry+68@?sO?Kt#2Id&(0f zKZb|kyM7vUx5;uPOAa;inYZG%Rb)#_=vb5EuXSegX;{Ff)RkH9E4|a4sP{7Y1(p` zHTefF*{@mOR&k{Stjuj`^VC`;*83TzY_R z`$QQ$iT&CQF%gV4&kL*V>-x$+{@Q&8(SnKD6PveKA-U%}5B_{qe|xa^X^C;tgx-bN zBNPQScyN|nFnvwKyDV^Vx+)ybA9g5;4u<{d!t+)bzyD+}Tu(}={Pc|Vg1yv2ol*iu z0BrnK^}+?Mk_U^>>?t1+Z=`1B0;)8$!O)e?m#J)2g75qLPDj?*gtTW#YrgIO&t|W* zuz;aF-^?9!J4%@A1m)(^XTF&yG%L{Tui!X6@g$4^&eK^_7(~Co)f|Y)YaEqQ;YQKF-$%%XR$<*Ah%4;=tIVW@Lsh~TF+wPv9$z2t#PD( z+g@0bzD#P))v1N)VB7)4(P=#(Qa}oJ&0SOqE$|L1BMk7sMXMah%h#ZktgxUrF2kDf z+sR~;91de1F&PY<33y)a!#vqdX?6;=?)|hj`WzCE1V---4}(1LqN8txZt`&e$TOJoFW z!$Y6ruZ-4bhoi*VnE8&2(=Y?`oW4q=2bo@|5NX>hZnAk0&gSg>RV5dC?fJl8=d7;g z$Ynw1NcV44%X^7@vn-h^zFUZCf+dnlV3T!t-Cr{QKYkm9&;B#<2HZ55Be0*jW|H!5 znyxO%g=Btq8kHQHn05Z#K#1H#+u!*p=4$9l*kP}9ghQP(m(7BZY5VPUXD74u2eT|1 z0-eBrWSb(OX41UUtFi9>>f!0}wONDtz7Ju3+)#c5K!wwEJ1fc{JsxWWz)sJNOkFMGCR@#+WgEI#~B|eIfI1rVb_vp}BPz z1RK+h6x5m`*;&pyqTzUiSM#Z7cTzZRUC~PbQ}J^Qo%}@f&0F0rjrUSHp!AEsHDCsTv`2y&|GBea3)VK=ltBxv^Y<(xfI zu*INSfgh4o98T#Emn_FXtEH_${359VFH#_@HUb}Ryqxb8AE{<-d#MGB>0Ai_WP_0R zG-p$zVe5gu2doS(M#{5m^FV1>C1i%XU4Q2r&gnl)d+LT4G=0RBbMjb5bDGcwOX?5V z2cvka(wdk+$h;cIBUntHcPT*pDq1y#EQ2ke={?y7|IYc(Wq@m?k*~L)LJS#E5)@CR z&kEShd#DK+lRAY(oQ0lBkR072vy=}|&Z;KPe8Gj``OA4pO$) zi*-k*@8kXt(pcelC*?x7lujI{q@aGC?Y!s1`)L-sD^jqZ3`{17f`Q^W)BS~oP!KPs zt_<_gei*SbZueWkLUIp`zwi(tiX!zVA@y^a9m4pLqhk&z+w!8vN}%FF3enmuY)kh=L$_K9=_8+LMa^r|7JsqWa!0eg}r`25APQkrEiG zp}Sk~D_sIoQYs87;4r|@NDSRdry|`YAku=A5-K1fqW<3V{_9#Uk-2lvbM~|MXPat| zDq$dEnkQh4Et0I_xlpK)l*a;ZJym*orGT#Wh%~is9m6&P%18@Wjfczy15envI(Lc| zn{K)s8q=`hNjRIvD2Qf|Dw+K^zr>0LgI-8e!W^hZBX*9wn9@oc_TRI1aK-AO4f%{O zcF3=#M%?0KMi2qZy&qNJi$P+CzcP2pZM9jj1;Uqj5+%+y&%3l%)48h8}_8s@)8ofOfPpXRR+sB;QXwe*V#6*nL zER#EZEn(sS<=ZOuRcDEpt#*VOzwsa z-24IkGFTT9nrzdhe0))T;D5;>NipypuR?;8aIqC%9|mACxU_1{6( z&bmG)u@Y?Erypct=3jc}hMUza3JB-3otsvNIf?~|VqqvITgC2JL?aJfk&H#U5Qfa( zfvYzYLVQirc`ZsC(34C%lRYJ1I#fW=8-o@|O6*A$VGkh%V0)`2Q}I`{^u%-0Dy^{M zJIQIq?0C`mo92 zlDXNlytR0JLv{X6zrFxs6R~ZE5(BpU)U-CiWP|dX&v#FlTS|w-!9nML@0}9npU;Y? zz7!(xNM~&ljK8Q&*z28?tuB`2kdD!G||@=-LPhTHbE*p#`!997c*msy9VN z>U${Sx2nOKnE3nCnhwqC*Kj!HzuS)9w>s-eGdZ>2O+!3gaqXH;G>#lK27X0qewm{m~5vsg-i;ceOIJ9~AnAA(su?legEmTauUO z2=409#&Q@QlbJ1m54omZ^J{KOzi9v3->(;02gI&a*@1E~;r%Bl_8SlAZfFRLN>cT+ z{!Vt49_B2?9nv|BW}v9TF(F3;@#`yY&;?WRK3M^a1v+F$0SUSZ85H!kZJ*PKw-XRO zSa%RFSED9#>O$iw!yW1Uf_QAKZH$+{*-OR+KeC~G8%AR|N9b3GXf0gZQ|Q0VrI#;j z7N1k_5Aje9e2V69EOQI!mmOy-qU{<9|AZFNN!%q?uRp>m zgI~T2m#ewj9|?H^8bRQKZFSbIaGrWzV41y_ub18}9V0Ue5+Xg0mnLL=j>Pkkg3=gEZT4mo?|MOKFAPky{=PJ0Yji2geLzk za{Yj8h2%<7Kkg#G8r}H;kK?lH0D~AXa^vTp;>FBJ42Agf4L;4^Q*uK0v-aUhh7+~7 z3%E054MbDLbi>Jkk}lfNClGLT867v6CF}zsmVT@RG78f@pc7BJ2|mhsf`4n7~Z6XEA9C_c@dJ|cHgJ;U$!;kM+HnDCBnQzq?ZPXvsp8D5ZZxk%b^S&iD+8pEMznquxpG2C0&T z$EIlG0Khp=|E%o>KgAp{X2Vwx&DuX{mN~&A`9OK&+S}D~sZ5`Oc$F+ly1G0(S*wo| zND&4zD3{~?_^3%YoSVD88W%$iKTMqcpD~ucH z?{X=U2IFdr4qdnT%z)(%lu;LDlplN4`y~zJqwoOq zHZ?+@AzE4#-?P{(YiqSJNE`Xz511#Ghx;h1~mOM38aj{L)ctk=}!386Xa(0B&pJ0pYy+iHLpMxpa zfnt+?Zy*ymu0&dXs;h5@sMd~HHSFbKc`#TFlJ+i$_moqQ2DvtUCepoi18FE?_nug3 zWQJye5k`uYBfKn6E2| zS&?5sdM(E^fT>x3sxzfu;m2q1xg@2wq()~}gz{%XK6ngOq@-Q*P`s(a7wdAjc7rbo zYJNFFNPo|UE0Ka_WX}0q5$_g_{4Jc zy+p&Z@Ko)JQxSOb*An!0OmZ1m36&$>=ekpftSP#=Tw%lc(7c-hg}b_A7Bhh|`88qv zeBo|0#|J=p66dwXS+d#g5~hDCSdf65icU-xb}lP|6|3(%IK#b$9@KWgW$IL9LWAv< zFF)oEt?|+!wG@gb-C^0u-hWxYQko0*JqLUrEqaJABGqz$oy?eoKJ59CL$VSXLx>=O`D?Wuwk+=!oi1vFJy+( zii6K5-AiJ)3tq?7apBKYGGPj6=so+Gm8HR#-RWO@vn zTex5jdc?>>pWG!2v$0J}XMD=e)_0We$lZlST4|_GV|RdB?EE|pp}T)fVU4H<^ z-xH>WV^RjaxV>6;h@D-nxka%>KcM$k{1S5ilq?VNsuOsR>o=bX1O>gL(W7s)`&^NV zc40qdp{V*y7r0BwXXP)XL?f28%BqDB>^uujdHPKa<|;5g>O*&BO4u>tKP(B`g{{6| zLtU}2$ByrS;7DFaMyDgq_J5?+V@~HH*`tPY5KBOxAxEIelle4LNA3bp0*fb7_t)#6 z)wdWQ*_i3`Ao-eV0+_*jZx;s!dq3UDDE^|8O|m)m&?Oma+e$(ZB zDx16oZAzuiI;*Rs}PzK&ubi4WBqTCYTUe zGo#M;lWSQPMw+DAkV76;Q{@c$ur)G(uQQLegqI>{#_OFN^%4LeaRLZvK>yKd8s*O0 z3P@hTsBWC&V%mV`oZu`FaFyDmq6DnMW*s6dAnF}eF}smtM~@{mZ?Co z>-DV=$dnLg^m5B3Yb&^Xqw^&rZh9RI`=qvPtUDZ$NZAjWgVfsvXthf`Q8W|#wD`HJ z4CPNIMlY#OF9F!f3&dP3ipJiQPi!a!4@I`|i@CJ>M?S4Y)9a*7&@$bt`B+oO+|WY( zRc%#`vjxShj!#0d5O}Szxx1uulw(&1?LshwmF=$kmm3GjW3M>};U_1&nH)8CAD5__NH$j zDSd0m<;#p$x+Bz3P&|O<)%z3A-^l(V>$a#6jyPSa^%{WV+*c?)D>P)NjX|56IDc&o zdU}p}04?AqWzY1FFI@1AB3G~m75dIOun?tk0kCh6dbqtc{9U}55-!Zrq);TkmCE(lmUj>& zF<$4>q|Yvs*xLg--ayvp@U)R-v-8Z~)NQ5jPdwcj2x(4R^HHLSU>S3WAlO=-GBCk)RuJQX6dYMayoa=6LQFji9;e+GW`WDKkbb zOhnI~IZDleN2ZSzBcx8l#_?3R#KqM1rTUpCY%<9}%n|!*M#{v?$~BlUYhV|INHsSf z=nv0kvrztxx3WJly?4`vZZFbd%M>m%mZNJ>8F22w?p^Lr5u=tECy4ik(#3gV3~@7- zTR~$DtPPsIfdi55D?IB_gebV~lQ(~e*C(P@)~r@%XL`;B1FaUf4glM8@&XZIy#UX*0!ILJ!269gZNX&*I%Z>%D z8aj|C4q<5W z5`hIxCpf!yNJBV;`!R$jsqpzMr|KroU$!4`hTQ@Z*98-HiST+9XEJT3C^3Ok>1RuG*?p5ozc=(^h&bPQ{8}zIdOjwqPNG+QZCnd`> zzuTa>NGGCJOkcDN`RXHqklu@RR@%XY;HjKzlmc}`cIi?FN|TAPR%x`>w@HZ=L09c| zwtUBnhu@`GD^WKd*p;m9DRNK(K^RTu;@a-=CO2CVSX&O%Tb>r_Sf=Ncd_wcB{j?v> zY+<^tCp|H*FX5gH;V1y=9ZiQ9Ge6c;o>qtZu4krLb?2m9AW$}BQzG95iC$!(xt3Xn zwd>z?A_%>4K_@7dhIhPJ>qth(yRdDvxSEW8kEXB`E#}aEU0<8+f|fe6aHY2_(t(4@ zqO$W{vL4ureXi2lz7wj2y2|(Cq$`JtrbK5(qgj01rh}VFOf0V6A@we7f;E6)2=7)QP|kydV5>ha?{=Kt^RHgN~|X38fa+ zk2ip8VU8%=jZoz-k+RNw9xeS$ts2pFH7oegY-u|ui*K&+>bQG9ts8y{qtBA2taen8 zg{hrr#ZC0*Imm?}nJUDNf;`09cV$Yhnt)Jsk}~xCoJo{tlth50!KVEUbN;IrP87TO>`ik_^ z*t1k0D;W8@-P*E)%tJScD)2UD_^&*GPM8O2T5EFKB2~CO8aF8L%xn`aCf+;(pTCpa zH%7>Mv*p{%8}e9F71RnLaz=yc1yU!S8S&5ySN6DAR+4lS6)x1S(G~&g6E!zH9m34? zVEuI+PEpWVoa1P!gc^=W%-b?$x+`gtdjKtnP%P$t9DQOq{)C77Hl$PdGO%-e``KM? z5=By0U_VK|zj?AbC6A%xa4+1OY^?1{#TG5XY>pfDb$I>Z;Y2|_{;GEq9y`l7YxJ-J zGBTn5j;Obx0u7JZuSIdcdU#-_)D{G`!mO|B%+pVH50K+b*0+oHJ<#nTH9ibQ%GFTV z#GBWN27EU^TyXsuVLr{;*e@Jhbo>kOTz*i}pR57lk-4zx+v`eN90?D6}$|ME4#>xilBeJNqGJ09q$*CNM;BVSKo^jZ@0z35~p)hymMfhB|2 zUMh{|)!pVm*$a-ZY8^*FF9>@99=i8WuKYXf__`UJG}o(o5>tJ8V# zPLThTrZ0ER)nisY-F)`KMVHV*Ycpp?!R3uqZRm$)tBEh2lE3QSbjGQQtOeqo(R{CQ z3RZ4j(!Dja+g4)VVjaTG*E%<&dIo9kLJM(Ip{ow*x%Q7<$8}j!ZC>0$E5Pxx{ z1Lq-Jt+`V5Y|3YIUk#KrF$6NgU5fWfWo`ND$IsEfnA}ka_TOpJAfS_cSU*pzFHfm; zzhkV^Sx>};l?{CQy(KMy6UTDZSC*HTb)>X*-JN3Q%Kat|`B2rJX(Ei;%w3JwwLkv{ zA*-VRqBh{XczaX>;EuEq?7QsGP8pUe?5vpH-!+uH>H*(G2aU|c&Ph!|!lK0-qo2>q zm!skcP=Jo8cd*euYB@VOHs1H6`W~TS**8hkLeLPsNc!n{;McGhUQUaIX7Rc`d*v=4 z+aM)v#y|CJzZ8@#$S4%X#5n#Df;U3g9<^~unl3pFYnABR%uu>@u0{Eb7MF9dErE(fWV)E*YV%+J;sb4v!==fTZr$BkP%}5!Bjx!Ry;?#VH=;`kO70xvED5B zr2QNlm=)k0n4-TaQoLvOP6rVg1sB7R8lSsQ?0TTi7k$5*)E{r|anGQ|rbUsa4e{jC zQ57nDH&6|jqGzl?rxv*GRC0C4XVZy!tBp{bbTLBT`!WcHSYgr1heo=^8|ryfn8My% zJW-2Q_WZuHKZXGF*IU%UQG!@+IWS_5V5bL->D z`be-|OOykDh%k#e7@5L8FY?UYV|wfeE$a>EyZ*jIZxQb(N!IH zp#)JcPIclzlC`5~D^rf55YQT8!e>Qk`qiCNcf5MLRg4gc>Z1+yxQV{m-mV$=wK$xr3O;H`)0-?49KW*}6T!A}z?iNV=Znl-yFB z(#@1G3p6ZI7?yu;afF?Hs%RQ~vl?)w*9Y8a&Z?cAX%IQhOkNnF!M+Qwc5tBrGSBRw zq@Iu1`Y!BTJ8jRTBraJrna?zfCHVY0*;ZRI@Gtg;dqNDYt|Iw?6b> zO=-d~dz1t{wzBg@Q&*fZTmeziw`)p7@%&dsr;u5`q*V0L(cK8@i zsV2diJfua2X2{Av+IM^Q3FfgWfi!fe^fs4^47ATYE&MrQcPd(B8xW0RxkXoD@F26_ zF@>UdF!>SYl|X_1A>rs*Bc8nbcoJ^sOuWy%keu~=!%n2yX!(l$JD&2WM=NZ}D+87z zKyx`-^P_=NZqePwjqnL7o&;k`BqWy2@>~jo4$XMqayLPXjXM-u1o=eeI`sVuWe?eoja=C6n`A~Ad9$?@oWuI@RJ!J}qHKFs z1Y49pZVL>37c;-g;7d4~ymM=}x z?!06FsQ4}vUkTeVO<5kcmW!8m{rmaQBm%;+cz(*%pIVcOPR51h+ru+**Inf~g$QM_M}^jkNPjFjL6w@H*H4dNa0%yDEK86|FWRB&}WkS zBl#JBO9^a(o$j9lfEeldtvE~5=vE)n zhCZQbTOZufT(8G4Jx#H1(GL99#^SCXy`WNw=C$|HqGzM*Aq#c;Y&-XjGX+d9{*SNo zp(hp^)#|)i3IBt({YBy{-VA4#@CS1CdI?7N<0%x)%DkNYbVrn_w3Mb!>o!qX#Q&h| z=a&hJbHrIq=e~(HG;E4#*Ooojno5Hyt1k-o47}Qx!(KfHTJ+`anG% ziCVMLiKu%DM>Y2Q7Ml3X!E+hB$WpIdufwaIf|$N+hSBzTfFmol&Ana6ypEA)H}tFb zV zgpSa{R_g@s4#e){LpZ@l{Q+y@S}n|(im>$n6ji~)#O<;FcDcNoDtE_cNI;fAfO3Y( zp55jf_nuDH>ljye_Hc0GYKfOBxptvYTRZ6n^mXb}eK9%*DEec1yMZ#Lpo?LPu(%*2 z){d2K>CKa5sQjMF9KSsq{EDjJ)FyIXoNHyk$35zWazCKAt_~RZ)~qS>-2$f>l11kXP=q_ zN-1u=XpLVh@L=1|Z!w(l$BSHomAtbSm-nsdMHks$zB@6B>3%BC(sgqMrbM}#jhkee zB`%heuc}9*Mp}LQn77e8)e69qns9RCsfupiq5>P7!*RiPW4ARm_(k+q#$x&=nmcey zySmfe{h(-|`8H|0(a&t_^k_vJ^|rXM0zD3a_5Yi&&viGNC~z#NpsH+gAW6)S zQOG4h<)FyiM?i)D!T=TfSTX5k6`RL<7jmNxNZz@HHu=f5Pd?j1co z{4R7o)c!iZ7BCFQmN?Xm&0Z9UM@ANBwXg8(1*R0er#H6#sk=#A0!pRsBTbt9S$TYl;8qQP$wK{=Q6C*Y ztD!mZJn*pAz-MOXqb#IDm&{gf&56A06{CZdPak+0t_9#&4tA)ieW|^=GpX_0t z(HT=D0Tyha%#3GAb&O^dD5H8-abhphI|9hdGq9O3D?Njd!oEqVRpszXs?#{Jwf{6e z0KAnhgYTK9-AaY3oDN%!Uzy4igSqn_Mwk@_XVh6(#?Lv(Vc{wg6;gk{(~JiEaWDf=SU&jtw(9yJ4tHJWzYIqo-SX$pi#CKO%o zY}+Mm0Z{_G^T6k0O1W!De`T4Y4~Zxi2DleqDgj+V*uy)Pz0PA4;mP%b99XbruWKD` z>pl4IPHF{j)CPU=V}1yLAD#d7q!EmgK$jI#+o88M5ojYwedw9fIrC9I^?@lVKQzGX@v3<{)HT|b#`dZNUiBOO%}KwmfAFk6vJobijdiTO`DFyeE9 zG{Q>Q0ko)QJzdZ_a%*enw8GgRg+v~D!VbU1_al_qnG_~!MhyXC#F=R$une!#CSG$D zDlE1o1$?ah;`=Ej{{G6p7boRhJ@ZkL%YaiuKgma?)NAj#U+DAZJ}h7k?5oIZUr&^$ zmDMonsu8Gb2dz~K5s-=VhqKn8-uT=9wkU5K`0B-Hu7g@;gx&Z`Cn{w$*?++EK5@Xw z&egf^3bw5f8$jz4UN9Ci3>caCVyn#?P3l+~2Ttk8eP!ml3`t7xjt!$#&_Mqn(L zCWy);^bbgoz@pgqq9AJrtF!RNDlAR^mqq%BDmmmzFXf{PtXNDQ?EkC`xrP9Sa#oPq z61!5lE-W_li=yDsG(?Kx7ES+>oP0U$Pe3*G8tC!=o~XJcKW=swBo8_=9VCk}rFK;h z6%Ap8dANIE;s^YwhUJaAY?dT=3ArDKf}N;W&#_G&Tff4e!-6F7q1s4C=G)+w#C#Dy zW|RJ$l8GbQXy!O8jaKCxl)WUWhikq6@KRZ1xP308ZW1@~|2XNZqz?9`B9PNu%c1iA z{Q3Z_WD)WiY!qo_7qxE;Q5Vz=<67|- zSh)f#DcUfEInoq}%e+|=4nxLr46#4TyR!MyN{b7-zO16Toef6CD5*cbuG-8dFq_zQ zE!anMD%Ueia6O`K+C@jW$B9VD6FE1;`(R87*6kS`FiQ7GW$m>2{L2)CP;7Ye^6q5= zUR4Fhx{K;K?1!3%{?eS0-;X^B44<@!jVk8&%0>rOr3#1YkwU~K!PV<>`RB+z7(ND~2)+NLN_ zWHP<$1r|8(dQZxfup+|z>&=|Yb+y{M(YY zdn=``L-n!J9K2-U3a9|Xc(fxZh*O7=9{X*yJg6|RK0;jM}h*RuK$_j{MxTsEiDS9(^eB7y$f}mjgZIt8Ou`( z|2!V>;(Ii3heMGg;|q!@m%d3iJ*Ge5(_^~D`NLcL`aY!!CU1FXq-oSpWL|C@@`vwa z@NUR^E8KURY9#G=5qGWc@Nv@p^M3~-! zhAO&ouScSEU9d?S)2AXPyo^mG!D0NdGdkYTtIDhFZ!$GQ0R@b+rV-Yqob$Ud)SHL98?0;s05h- ziQo9HZz0l5EiMcyTG~AEW^bZZfOs)NtSinFl&yKrud-j--JPJZJDP(2zS)2R)p~L^ z>8@_N0apw3G#_Qvf^ri#KD-oy-P+p-;$tCa+$vVn^e>gLekge|3b3~ulIpec64FBp zWlt+OmtDZF{>*?-?(?qsU^U}EH}#(1l+nomQt~OiYm9ZaLfZXFmPVSwIPf)#yHa7x zuovDRa|Fpj!PvOmpU+ir%B|3n^1tJ2XXH>4Fv3r~ePbSZS%H|`m4L2voKW6NJ;x{nk&yiuN?$<@`!YVb|0C;@RYh$p2-qlP^eIr ze(S!vRK9x~dOjQJ0vi8MOfPfKUJ@T_(q1d->aVRID*w3ImE*~MoD6Aqk4x_e!S@Pk8j6)aO7G;gHw)gZD zCOdAodZT|-4Y+8hnja$awFBVj(hN3!3|9i0Gl=aRv4#yAQ^LGgm2;-5*owE6F#7;t z+?)>?G=e|>yIY7xD@jOr>rzuUd5Zjm-ZFcRGW-B{rC{-Q1s?y%(pP!ein^=PY>eBvTo9fN@pMu`_ZF7sRfT1te`2zlXnY@@K+&>& zBQy{&t~2JW*eB0LWvzGly4bJwK-LyUX~yo4ID0>?toOydl+CjF<+>~74amwyot)Oz zU(d-naOrf6mW3Z8lZ6@O8r{I6o)e~HZscJQ1^qMe2FY7w?(i#tCjI7Am@~#}d-`SmmyX{Jc zW5NWL!(|(5ldSX*N~fFX8$Fuw8pQ0;eC3C%S>q7qAT`pH;B~!5?78DDls6B%zbSEK zD@;UpyvIG%1zF{^y!}p2a;p(fT${Tlg@f3p^%e(~GRyk*;yn8zls~v8yi<1nE;qrRh@&D1iu;&g!;xkf2=a z=FA(a6a)M7c#e=gObepygdbeD(WP|2p+|?{wap;0mmYF)f14!3Xc5`1O159t=oXbrYug@#Vz!prCn5P2VEM@+`)p<( zfd}t9x1{|%@iK?_qm17RmB9%I9R{r zg+Hki=AZ%6LBRc=7evoQSk;BIYRE`_enzq&3!~Rh5#itnNSM~8)V6kv?*+sEU5H*= zJe|!9vwLo>)K8jXi<=L^Zq32%^U4uZ+_$uD^VjQdFAzRd0`~^DFKqC=gg|9j#l)S$ z3Ii@DjFH6 zFsm=%JJ0LpQyHkkiigvmn0Eg5n6Glaf519MfTs5%hc&5poCeL8I#)>y=W>B;BL#t} zHX4I72&tx;kS%3$5B>QiA6l8aeM{<3JN!=3Qz@I<)VE5;wu+))C^lgm)kw|iri^wWovDaky8LiX!`O*R0J*O%2s|zj!)zI=Gcn+ zdO*?2KAQE0?Y!kbVRl00c$@s6+!scOf~R|GUr>zeYgNBHcQF|YACC0)!<}|*M^y*x zU%i1G+ItKVmixj}(9}|zdWdJzv}MXP-T)|Q!hdo#wj-`DC_?vMjj#c_`#3>sl$XR< zp*Gh-#%F}RhSL*v!jZ1`%rO=l+o0mPJT^?1+@ZZOx`(1*v>2F}nP9wNk_ESL1!_EK z$At>tOG5oW4n=I|Wmu)#BLl`|Va|^vhx9upzZ5J|NhgNB)CTMJ{}gliqeKIr2|^4O z#%xYv=!a^+QB~m-JRWQ{SLdM|R(B~WmfDs?Q4mu_j27)~Yg%S2Qn}h){CV80@y1?E zK>^%TE3O)L9Ri6D(Y}r^%VXzEi41Tj&@(vg@QdW&q{8h?V#;#5Vv- zdDzczZl)sOnZ_oXqE0(o^N~|i;F^x%`v8ikD@>_R`@lqtEVDA|w3;bSj@0*n0_HG$DbGj#CnF%7oL8`T)H{*zRKQ16y0IvhQ8?? zgr61X2nB9Fj+G{CSGscfuvw`p(m zY4U?KdoDG2M6~4qypV2hu#;zFG2N_12gGXfx=%*$H$eOyt^;wp4B}~Z<8^193|Wr) z)70STn_$pw&a={xLJ)N_wzRtXk>cMXkDpeRlWc=!lof# z=R=vVblA7Sp|^<(Mea`X;eYV@eVlOj!HWD3*%I53kzm}aDV3-FEIvJsHbA->eOJRaUy zQ5oZAU6v;+p@RN?ai&CnWo7=`9QiAll;`cs$3`@$d-he!!G$Xm>`;!0^<4UKHbF&F z$;~Rd)_QEtQQKx80j-z0=#leGKUS)(QO3N+i9ev1O{&P`$s;_4xg4J4#H8;CrgVNm z(LOh*&Yk4f{5kfca#B*^Ipk3y3nN#0*Ynzk7TIt8`JHl^IG-a z!~X8u97Zuv)RE73FS`Lzq$OGutaCtj150b<3rtmUc0iJ~QBm($lvoS*Eii{GSlSPh`sx1HIpT)1gzVJ-Oq>$e#WTuhGq?kKIc|2F ziV4TCNCQS=8VouWMux+_=_Ok&1b8HloDdjw%C#N>)9$0 zqJ+=`0_GL$lNMQK?^A1P#7d-P5|@2V-p2j{6b>%?E|0YIy-Ib$wPYW2<>7T*%#%(N z6R=shuib0dM-W=je&^KmZcl;%3zI&#T zk7o9E_Ccd+`Sa+EH`#_w|1K;@_en_o-*rG(Qs8oT^IS%BZ&8DDS~hNMGvpKNX-9!6 zA){eq?hb)^RhVGPKp9TB5@W=7OoD(118bbUu*%$7i_X7vN=3_#Q}QNXkv2?P`}?2- z(c&32C*x^(Dt!&mbxPAYJZ2tgjv8O#$?wLPRO|Lyki-$@>QGsIveL_Pi7yp=l?P~a z2A(nY3B2rY5f$Q#y@9PEui}i^-WWmd8lKjU+q1b{A7ih`7d_lNcykW6B#A0qH26x`=vtuR&?0EL?bAYTL0jkIE=K`=* zW=dbcIkkMmKd@v;`124cTrmHnq@!jxs2_wT0QeiJl5CqM-B>F5Tf8trDP<5(HncXYiYJ}q>zMx7Mk}*hUJad$g0fW} z*@-e}>r7ch-kj`Q;Q%nw8TlKpqNGOEl|=?9)zcx>*WmvPU&6)8XER1)*&Aza+?(NO z*NK4Gtde*_b{D<>{io;B_Zj>_VBx%sVOebYGy@qtN@Abq2log907WpS;t1d11xWrMxEBR;q&BzO~J>(1i zyRsYl&2(s*sLpVn7{29EBMYsUBsJgr4<&8?$IGwGw$Vm0rv^M8L;DKW7OL4M>@*Hm zY!de-ZyL+=bABe6ZupSSwPJL#3Rhbs=0SHSpP1= z13d|rGW>HwYRSe4sYTjaA5!g>aHMf#?~p_MiBZ1wNtc7k^CY)Iv2g&M$vAI)4lvgw z3dRv!@3j@(+cPGfugCG{iyOc4I@^CZ&r5ESV^`#{By($7I9+r}^5xDHuKGJupj4 z#@wdg*{6vSwbo)Uz zu4pvoAQlDB^-B5@jrBGWwtZc`qhJNhDs@5JIsUu8s{tK?4YJj~A8#t#2B0Vi#2bB^ z>FZTo1y!?je)`KaekJ*u1|*7{$Avq-$yU$UK9e5l?J+~g=AB5vUr#?n_U`G%D=dLq z>BFjqGS9DgvQ&fnDnCh~NHyDYuKBc7Z2xY3X0;j{H1IrvvAgcEhLC_%@JHuxPqI}h z)71$!S5tYo;6t$UJQ1?5NV#Gibk29Sq8Ao(iL{)9YqDC_o2FqR%ikj2Mo>qWgm}pP>H{+tAsqL&>%HaPmZGYgq z>D{IQAXQf@^ak&O9&JIc3Zf^U_C9bU!XZ;557Ws(oy* zlW|4o+Zl_ZIr@egZtBWAw6Sm4hK4cV>Vwt|gMh_g_)o4MFu0Fj{Xw$2(n{Zxol?+l zGXUev>gFJXxp4zSnMC@l+blR_@+cQAk?bdeE|I(U1Echz?6uwZzPmP%l+f7I!;{*E z*DQkU>dZiWD2fph4`pEpDy&zyTNK7@p0{;D(hl;p)|MH6wTJp=r`UM$DET;T&j;0_jQtOlyDv=~cHh-C)3Vzd-#;nB(omuy>DvH0_x4E9AIgCUQ8`1+= zO%^xImmyG=#DZ(;jrtqhX!IZi+tfp>Rv@_w3?Iy22Mv4~r3#`LTqCGOvI|ll*7&#) zAp6Z%euApzpW!z?ZHYqgr+?MzlPZg5z9Kt~tbyw_cx z+9jY>C}rH?L>4AjEa98K^4-vf&G_*Rwa1huH$TZ~?*w^3?r6bmp|3eJ?ZNI!oK?K> zIuZOG9qr`APUAkH>4q#@K*2zb$B}dtQS9x+Z?fC!(-b|@k0N!(Nl%!izZ_eum<>p~IGy9XR|8~lyBNo@X^uGk{nu2UZ6_g+ zTP^o%rL|p}2x8v0R^@;4^XyOc)wsrIFL8d)Z&MC(2uIssr5#yV-Pt4}^|-p&^a6LZSpS_#if-+uUiECw!y&Zz#7mn4hz9J(%~!G&a%et= zI#X%wh1XlYd_T+a6Hjh&NAKxAC7;l;>PW7igul6~LEl1N2F4?TZkaZYSsJw9i<^oyc{&zL?#RFJBF%WX8eO7+Uzk7*k|bf30$tx11WLmtN%yQdB;=z|55yNugkTv zvTn9}?U8HmuI!OLBiW;D!XOn%WS^%)s~EpaMKw6K8I;DsJ+`H#asE3gDGt7SRqJzj;azs1UW$I!?4Se z=i7}stoIzd#%aNKQg~bP(*WPc6fI4SK}7bgM?X0;ptHD^ZR%1)*bKY3%JB9}a4q01 z>z+_;IWW?&I3J{n2BuRo2N#Bcg2F~Y$lX~S>O$v%9%pK8%>jq7>5fpPBqRCvAau-I zz9Fto|Cz&6Rr6wleg}8y(rlSYbL+N@ENy1%cz^^s0P?+I(6UYmObH-Wy;gv$(Dzos z{*#8zufO?wNEZ0xC09TiF=?#~Hb)$^8QG_lpFpYX5HoY0qeh$=A?6Ty%S_=d6+n{*t@1Xo*XXrQ2x<9D~UC!u_fJN%x#Om1No#5mA zJvZ>NsmTBbfe5`Dv`&hEUNM^R1g56I#(@3N+SA&>`x>^4DHlH){TO9#vV)_wt5|FO zJzFnXU6Jsx>VXH;m8d`>KMvb^mCz&=Rj48y&vVpCy3-pZYUru7fSa*$1PD} z2c>84M`jFM58rSjAT#!r{G-CY6yA+$)&F^8NY<>pkl8M&Q zMSX*aRAz!N43a$9q%`I+oJMnv;(VTeBn2+UqY*S9uR@Jw$?=MJ2{By|$qTeP9sIx! zBQOSiQzKHy$lt`9BYDh&22AhV)en|b^L>%UcmC{eA&vqgIl?y@VB;M| z*h4i5wky*ugcWPu%>dlc;EoMeQ~1?^n}UiabhFjv1Wg_AMFLx(u+_jvP^XOQ*^=+x z1C%hve+GrhfzbnNrgzvhQn>qJDs8#YQ^BzbrKE@$p|n?c;n827E)(_-!1i@5#oM^M z;$v_5pImt`Q%;~yG4x12HqF7UK2h^>SErd z{d7xtA6eRH*v&9MkSBidTmV2J8eKG=i-y{rSTMO{zi}pc2bQkZo0g_EWMnw*?Od0D zmfisNvq2R>$dlt$LV9iKFn$76zK`~vI(PRS?I?I64fS)n?BkXtJ%{eV|AQ(A(X3$m z_hfhkgG&w&ym*+|QD44J==6Z}+jX1HzfHEXbAG@W3*8$2&dmW}(l>IfHpoPdw>u@$ zysJ06O$!B<<_Ii^oxdz1{1^zt%G2@C?o@;JIp|_U9Y4vj7|DsX|5i(;LUSN;j6@*VV(|Qe0 zr010%7+~!J>7kYWM7C)I05-@nvh-8u5EM7CYA!2)1#Nq5OGC8{I#usYM{@+kmUc|^6^$}E z{k;5ulR{bfO%~p7s%&4;{F-OB>=cD23Dq(47-ZdO$$Wj?!79d33!5oj?Zy|J%X87Z z{TlGmT$eaLG*$|-Hm??p-VjSLXnvDr#}0d8??EEFwRL0M1c^vf>%)of+iQ?53HKcM zPZKE6A=eQM+No!$6)qilUGY0YJ zG3oju*CZmm&ZcdoH#u9R)ogv1;fWUoq8r29W+AP>XkOku0mkpv4B_ohJAY|G4{m{G zUup_fvmUyYe~&C;EUo=2v#5Z=e)BFqptCaK2X6iAoms=gdl;L)U&@?-D9#cf?Y|q% z>Ggs)+<&M_f649Ar5sSVJhb$weTyK_)}pVJoz0`vCARzObwj+^icXTn$AI-^V=|eh z!Iw_Xy!Nd~l#D}j%Jq!-u`$T1EwA_6Sn{Wq4a=;;j-BIkVG1{zbFiILa|l5?JL;gy z_mW?D^5^^YjA`f6^I z>-o+L9DEV4K5GJ1Iq-Al(U5YbcWwASZN82)*{_F;!Hm$1a8XY{)9U!wjt^D zlsiZo7TA-tH7D<)G>o--VfZR;RraKvj#I&Q`-d{nb`Le04$$6^H>*mZ5#4*QT#8Aq zhUN#79wcyUihF5uTlx$y1WjpXe$92}PwV=i+goj12S%;E#~_>BdG6SXzyF{;b552R zqZN_8A3y;Q!Y{+MO8v&Ia=i|R#NjDq5B3G&LMBCyP4~=`DVgxU)AgzSJEq_)V>Ak; zc;Y4f4{{9Iv+e@kZsSn`=FN*vkM>B)1Uext4STyZPT9PAlCA-N)5V_W{JD};EqvBt z!9)t1jy(mtAW`jg3npht$0D{mLVN8j%xP>+*|zrYsxv{!Trnf<>a&eXlvREqFjt9f zS;TFxnaT_qX;t0~Bj25h-8M59pAC%+Nh3(zw9HUtB>S;;8f)4UuEb9KfktEdA~+ zJ^2GyFOmW&%OWF1kHX9aCae5TV4`5^H$^FzIt)KSaF!~Y#uc&9N%gYJ(zW=2b|d6B z!7`$Bd<~i8*q=mGNlD=}s~XAbf(&3X|MlyB-_$ct&L?xP3GHm|%Q*UoaRm^1i7nXL zcx3UOTHNrGS#wLw@I5>`$5`Vm5I8AnoD(U}r^(EHm<%P5++P|RzJAE3Oss66FOUcS z@jPx;oRN&a(7!+6PGxfTt8jMrRM6s+LF%2d?Qh&mBQFqX>8-t3&CS5HY{ftqcmk$I z(TfnQs#AWA+V=yEv~$bIoZvLsKP3dg>sNwFJgBSF3w}tu$^SLtyKE0RO< z!Gjzu%2V=7%BCeWhf)RWfI}fgNK?8rj(NYoH*ixR?!lken|(?64dA;&vSk(u*C?a>1^F7 zuNNmUOHU5I$=`Nl-3Ypt2q}R1P-I)6-QR?DGaK!#WJPnDAdVe8avB8e@7r$rE*nu_-8+r^N|E2o}t z*wSmgptz`|Ja3bknr~3Di^=h{f=SnTfKabEWd40-kpPPHw60x&DTr#(I)q9?^;|wW z2Rd}MixBA(br33YIG5*kPa@SvGvnfsTqd|nw_YAa5%cc-`Q~BGGRV&F9}ON z6~w)RJTnswQ%K8@*6>cut6g+({*Bm)t0#|^qguXN4M`7e zp>-23Jp!tpO`L3S_w^{4fFBW&*wJsH?0#;RueIgyPCOJvM69!Kyr>q|;$_e=Pp<_N zOM_^_ydN>>+I05wDf1XiN?^hABf=2bqXy8VN;O6F_R2$Rxk@rXQ*21?1VjCiZO%u)ZU^snGQY zi5-2y3PWTS%Wc(Dv4xTq6~_`PW6+I~LCv0?X8N`bf?}BEUc6{l;LS9NsSTQ0Si+_W znk|X}oBpG`&{urrM#BAmZt8m=_3}UqC-+thn--pDIzP~AZ|XO&?OYH=5c*rSY`3K{ z0#D%iaaMftFqrB`Kd7#_>uH58@@AuBin-)I_#6{@i2FnUk4|54YnCM*I-cD~zOCoI zZY0X(mqnW<1+o)}&r&fa$=LAUvmZT=-%rFCmtuZTB82EYo|Z3G$*bmnvvi1mAtz9R zl!g-cjc@OMpHLXrE&KHJu*XLcRQ)02)S0c%=$8u`dh;H;x~!fmY#1ZzdTVV*yS0bO z=8!da*fQ=@M;7D|sjzJWS5&WsVw1zBQ$)sHm3@tAYl&1I?-Wv~_f5>Nia&bh*^m;y z@Kbdb`W`!{G~FZb-vZN|eiM5bDgYb`iBaHCba>l;xvIPYviPls=jFGGSd6asR{MSDbwHvVqoi$ed%+=?Ng zz~j7H6bQ%NG*40rVUQ3X;J%W*8^9Ja(m?+MP7s7)b5gm`bx787S6# zoY8FP3qd3zvSaVnS3!Z)zT5f_g%|w_1>B}^Ox5gv3pDQWttfU1rZ z%((4UJd!m0mQwXdK#{^n2v`;6*S%k6)i8U{<52CXW=6@a0VEUKmPxNaN(7#O&SJ$2DOn^RV{cp{wmL z?V~IH3pM%h(Hg4)oHP9^%>O|;78a=iil;K#qpgiC%L(n*VpDUWW`$^gJDbC~{QLdk z>35TRbXj_}H_y2mpV~SjEX^#Bqmn6g522WOZR+-2huKyrXVtWL#oicIWy{}a?uS6~c z(86N3{lRp+*f~#sW7KAOTppsi;khy%S5i~~>E`&+mVo=nX~bCe5`x{RImN!OTNiW(IRu9jiq~La+VT9@iuPn+9_AfI;lnG)8*jq0@C@TPqpbqVt4 z)RqTlTPJCUGp7SSJjowN(&xRcb%l|7EV|%^^(@rf?0H7VxRDARt&gnt(_LH+MjvfA z@)sQ!_cdpP?YplNUJ=;B{VMN@zqdVeNw}w)(VG{C1{qxoMBoa6m!N;QY|l5xHuHhh zuP@I7gPK`+-B|LNg>1X&$V5C7~hPO!at3G3Y8^dpFtaxV0`tq8z zTs4-mC~VO0Poj{tQi&m#hFyzuOVdR+M`%Mf%D23&KyE z;qYTA2i@Z+8WGi_2i+Ls$CyL)AHeHQ;ONqd96||eBh2S?n8UM8x|2u7yZ>N-2_x$$ z+V@f|+R^Egq8TS?{9X}d&4Njnvd6y)t{?Vrh?$(dQ8a_*l*3h(;^ zUEUS6Jlz%5q#MnorRh%5-3FKAAsep(-A7J4!~3MkB)r9sn})>dvlfYx3?5L+6aN!d zpZ_4&HVa311xG5ZV}CnE5NfuQinCGAe4pAU*v6dYkNbrv*aolT1|ziLD|H^UaIZtc z2VKV|>*WcF^HAY;_%fJl=GzI11UemIww8hN%Ab$_O;9Ji`CIg z-)B|`Jgp8%2~+1e0*4hW^f?&~^)Bf>NSgN`yy4TE9-+5-6P^g6N4RboG;Ig|oPV0ta$Bmq)ynPj?$Mwm?OM1aWO2&sQp4&%PlI zS0`aH%76NPZjg*$QYpF`XU-*Ybrd^CAmf*lcd|&5Bqfui(bD_54KVuj|3N{V^I~I4 zo0O}2njCc?a>V<{wQE_29s6CU%+4jxrf8SSuodhLtQUIBwF;x`DeZL*qWkw#9~3&9*=+)=%eQGzgitN@09 zbks55*n?IBC-k6JkN+V46$%bv3NtAwDF}W0-aXjpCYI5F9TZM zoDX0!8ivXx;v=8eJQRlNTLsAszbGh_Fk@pV$@hTxTJx<^iaKd2u6QV&rU)n{l+z84 zIu5j>fu=<->+6~rWo=go~uZ*^O*MSZe#8`7B}$hsE* z(7y@YWjr^13XSK{y8<03N-9$t#;;DDIiW(!B+{>AFG-H-2CAKYHVc+~$e~q9XJcAg zh|Mw9jYO4TjKDI8Gdegpta3KWEEW0Xu%vKZGn|Fpa7of8UCR1qOHj>}WSuiOgp?(S z2z;;)qU?9VwAT1%p}~LRg78u5rMFJxNxKrFW!{U^KDX7L> zwmp9;Es>ss;mNO?3}z}VwOhAFMxsbz1NLE)q&EzumP9z5J`zv_$ruVE|JQ$>DUF0cWi_-(dD?VbOdY3BgP=qxa1O@X@f@%+Q1 zppJ$#asVqeUlZZjb$bLo>?;QQ`llBn;TNvGfOg=KtJ?mzOFNj~T-Au0YK$ zO~(ekI(luX6V5i*xltc5Zrpj#R0QE6-dQ4yHKk+E62d6cNWCMilR9<4vLT+QgMJA$ zSS|N7pfu|oNfHmZt)0it^!>%zNpGX)-n<>3p!pR$V8IH4oWE>}8#tX3yS#s!j=qwY zq);LQtL^cazV{7dO3m4{W|6rrrpkiqdTi{*q)Q5g7uWmQ@MwRZ_Z2tj!&_12X|^?i zxzJS3mAlvY7=5@e7tB|@MjZ7*CXElDd^{o|-ydsJUCL(Dr(djSoJ}9P=p%30X(NbT zudBLr#W_v!_)fhpyP2!hCRrD1565tGpy`y+G+;U1N(TGXj6g#^@DWsht31j?Z>heU zuF|MSkUt2Wcx{4XT3p{fW}3%-MwU)jDT?%LN1URmnF(CnNpqhrwLN=s(K<1*Qk5U` z=_Wrl*M2GvF65Rt|EH$ij>{(|Cy;qb7rjjy9DT4z5HgISKtJcYW*l+JgW^kT6T`Yn zHcdZ4M93GLKJ%ii+_lMOHZ7nOI7Q&xQHvj(`Zw?jB#o+cHad&CTEL^J>gMrNgDmg_ zJFakAWR(~&%obwgh(p+(#^TiQkGQQ}{yQQi@%@+D!Uy)#rod7Zb9s>Am&T z9UQ$#p<=67lmAT}>qU9H7*WsxeyP_xj4A)Ao@`LXbnM&7sq6M$f59P>l>#g}LVK(KN z+T#tS-zl*21;4?T!cPc>z$>p@5>Csjzl@y6FHkG|9D~dypExK+XHBKQA#>Rhxn`^{X3Fc#9c^e@QjE^`bkyXmcTJak}~q`?W>Z=M~{?! zC=0YapsZz;WuGT^K^fPi1wQ0&&6XCsZ`V2jBp&8!(yNAd1GKD^;hKDv0}Rq$lA|H+ z(*$%&W_0>xM{~y*#TPeycmiH-9xpt(agIDBQZm?PyE%KbyK+pfJrrJ=5&tu9DKy0J zkcvKW7A7;t8)Iz7^UPvizaHTu zk>*+dexAp(MOnq`)|Cb$yniHJB0KZnAka{_<&=D+YicZ`terO~EC0 zj70fC7Zt*`@2l7HuZ4S~j6vZFy)IMxGX~^93BDShvaEM|R3wQqGU2ZWblhtyl$`0K z7*eTf?i(rX!ewWSxZ!;zouO{tP%Av#?9v;#VR06bKdKop_NMLVE#cV%#whxyWksiE zoWQnA?MPv=r6h!;wu&~+7DXEmR{0OQfvnL~{Yxjdq#v#|SCp)`AlY3oC{#0kt5nX@ zZ3jEpq}FN%E92?my5SaZ{9b8guoZz*=3V|KtUvkOytQjW`3G>NCn=A#l&1UR7Sxj0 z7Q_CTe{mIp_c4rLzrP@_FmVS`4|Z&B?P!U6>B0uNAugLTq=^RbCP*xkf5%bD-qi4N zleTkUU;RDj^S8v3C;6Jtz)h%?c_-*A%I58LX9rs6v0JrK@|;@oz0N9uIMn5<`J&I{ zM2)elBy)MMv;_xQ1WNGhb<&V<|Jg%HvVeI}#j9)jlINv`;ERTUx%1-k-3lK&hlF*e z;C@7c@-(%BA9%n~Aey=m73!oLt7k;2J8VwFk`<+X=wi|IqGrJwSqeot?-kH-3*D=F=w6%3Mybva zQIYt_+uxy5(4G%{s60v9D6};q-y{7(*twS7?5Z>6&CNU~`JZr7$X%IXnYF~GP`^c6 zxFdScs-}4`*?+?~$glvYby~P8pcwWmJEh@aBm#cuEwdE@8zPPEO%}H5Y2-}$yjgF> zG@7{!>2nFPPPm@k%#gq8?k##)6d*d!B^~Np23-2`a%vD&Yk;o8fd!8z!=APK6JA`z zCCE10>)g%<;b;-|_Xd^dLwDD7pD>P6j4`P-hi;}*B8J=lox*9Up7)pM!@1HM4zO`B zG6^(f$$xQ+ZTjONE6-cm$Bs|Q9A;K;_??cUZg0MqJnWocu-o!|u+p;XQrgS$O+*Bq zNM_)+;4h7UXWyD1mohh5(HFGICOe7cPHKP#3W8f+ctU9`!Ac>Owc^(T^Eh6{4FH_} z_t;>KVml^3q-v(#dElmZ#LMhFPBj2B2+Fn{q4|OK-5j$G@@sKUTPlt*!^xOI zJ!MJnw?(zeyl5G27(lb3W`Q%|g#nZJks#7&RSFdn>Ub!1mVoXuR^TJmZp9b~H!W|2 z4aN;-4g8{^A(_e@FCRnf3_I|V-71=AxeJ#{BOkvbVA^`mBu#;`0cJ0lm*q+8Q`pF` ztn(k#9xJhkZJC7kmhyydY$qwaeDLo*x!2i7B#yzLI(9z2AneeS7vWOAH|{@3B=x^| z|C&&R`)-WaLN_DICoJy^(D*mF={H$4kkhOUN4-@E4dY!Hw)S+;F zsQvk66Vki;qWLuC~}^$uzA$@sQ6ngrr`6z$6K`iCLc`2zaF*Cpb5%V-*$&>FF)exJwq~ zRb;H7tkbx7eKD}0TMOKo)S2??%Fc)Xd|uOR;I@_MhjziO>%H>z!eH5Be+yq%trvHw z`%4Q$q#?l*i(U9zaHQdj5r47p&evD?__kdkw2rZ1`QPOpF!MC<4JC1M+4ZTd3PDwhV zJ_@T8=%bVFz=}0}1a7 zPp!mk2r|I!frCDqx<Y-KusDUnHTN@@MDoMi_8G7nf0jbMCnZv3wb+`3(lT7E$jMd!k?Da!+JOf5 z-jM$cXU4owFafs6SOBj3;rQMuC7&Ljy&Vf5l8!#cI%+?1P|dZWt2(Z>FQHq(|B3Pq z++u>qzuyDKRKUV-3PTfU!!$A?vUO4mtT#R2n+-2t|K6k2MITnC%e`<#i_24AL*r0) zDZc0$8SNJ|d`)KlFOk>{RuE^! zeeHYGrObKS^v2=E^GzRs63mkJpl=fl%$r_pn*Z}2UbY$z*jsijPJxNbGt_MJ`)*VN z>cEk~J74M+!Vvb`RPWz)Uq6oSHPAP!u#u)K?y8=GM4VP!tWyu)9u4+N8_0z+wTE95 zZ0me}RZ`*Wnsk2o*wyU-*m})+q7So9eLXtt$7%5_b;>P{7RwwGG7^w*BoWPO4`K2? zmzS3>7mFhl4)L2eDg0kfF}`m1R+l?iWhbrUdCsfrIx4UMX{dz%8R=wI5`N%L^QAOx zdMWlZB4tR3V68R-!{{fLR>lj84XXkLXL7x$JRl0i&zxbYN2js>SZ5*C#;c2;;xyyZ z3s87qL8Rr2*5&KTF+Lpmcv?E(Q}%L-PCSsHzzD-s^tq;JP0Qje#x#~D9~_Hby9dU9 z^AI0uN-}xXMy^E@erWg8R}d4@sqZMFc5nt&+gElzbDGe*6KEu_6+@m2f|q@65SYFe zzFRjLOxJv`EZRvScSA^w191d>F3YGx-9&kV$os!ty9#$nx`u}jX< z{(nC`i7aS%uXm%N4D|H?Y&OaLcc+nUMj0mrh004mb)JP^&5pRu$_PW;xypwbWL7P8 zKa783oCN5-*Buz?m&*Pz(2^0ZE=-69r_>M!`9jYKw2qcbcSv|^)J%Q>w)Wm5W;K>5 z8gtH5;@+iHf#`HOC`d#bvc zpT+^`Om-S$OJ!~DbCpMFfVh1=JB}(0&V0gf)C|f3nCjxG8b+eG;Z?GSfhJl}T7pNU z^g8OyyRQf$9nI;ZZlBn(*k1qC6Yn2CtEn-$Div)}Z=I<*bG8UVBX<-fVHwL3p@qS{ zIX~+Z-tQfB5lOS|J|X9!BnJ48T?&!M9Z4Pfjo`@Z_5IdxKH36k$b4GVGhmuQl4X;1 z<6;tyf~$Q9`=LA=ytk#Ja}GboP7_!*0~RdMe=F?S=+{EojS;?7kNoDjgybi}@^6pM zc*Pnouf}kEh31NnN3mK1qi+2w<#IhGs#k85i_?G?r6-0nNSqc;tOz^ZLt`YO^Sk4| zLZH#8{L|%f)=TXK#SIHqQ_a}NHj>qu0|556>lrTCl$jyK>Y$j_CufxUU;;zk)c~a? zNdUI`o79X!*1d1Ct(93vg&I}2vN+)4t8!+w4dnJ2RrvEQLVU`rIAUb8$N%wsIAD}CjWF){ zj2r5XBaQkWEjNo`jjP`AAltSGoa0mKfj+E9{2=o3U9(qTvRtG$yZb-r?leI(ZRX}) z>a#F*hR{LqfZK+CX>LwA{$}_^Z5{Z))wiS_>6vBzLfzqWpJo)NMv@2A#5aAy%3v$-@=r{5WIPt?{)MCjv%T z3dk}8yUFO#(-a42R=XE)D3fn#Wot<^O(JX?^dfhSF5H5U0fYo?kxhy zgXt(Q3Z=C*xISGOTjQo`7iyt(F^>T4NA$~T(rb3hY8~mRA4!^oQWj$qo{XMAuIoPX zM5^FPCQr z{SRXDkahJ#BFfgmn3-mW$&J#WSYSbMj2Q*3`Ov*8_tOFZJA_HHqUGoAZIynZf1#dF zGKX9-^aJ2n$r@I#y6+;Zc2>ze&KzRa&J7veff_?$nx?QG&KG(k0)li+xN&lh6C7pI z{nZ7fSb4h#jFVcsNq$(cj%N-s=Ub{-Pd7#WUF40NK!A%E!=+J12RcczP{soZor~zG zVF{f*98=B1Fs8t%TH~uH(v1FO+LA#Qr@VwKbrNE1|3R!9zjN~+*kioM{s937Q~bW) zEQ$rGyj74;^+H^2|Np8g-_!eVyW($crk7nx?_&KzpwBMZTsEpvG`DT0W?}y!f`hR! zMfSs2Wy}{EdXi#P**rgg3?Nz6L;I^$RZ?eajGiU(wOecn}x?Zsi~E}0gudgPf7 za3AQP^!69h0#h1rOdG6SryF2v@V-#Dp>$Mx<@QZ$T#yr+BYTq z?gy}{H3YiSVOB{55+(h^#n=YEOQM{l3-;~K{$82qA?{pHeobX6E9P+->JX8ub@NEo z!uU{}p`?)kaAMy2C4aj;50Vi2R;b{Zghvs{KPBH7Ny<^d8oXb}Tn6fT@;HILJr+sb zd0tM_D_TK8t+;+fq*(P$ja%+}RwK+7+HVLUS`~;C)~kEVg&an61$K<{o3ZF9#lHYN zKq3MCY3=@!alf9KE$|yDWH4%B1)aewN4rC?gWc=kCB^fn$e)0XHjS}Hi;kVtg19WEC%}ZHKyDgafY&mG%aiR14}nfJ4;Zr{%9SfHZtQJAAbw?|23me zn(UCmymR($+ymys9o_KnR=LAH%68VZ32rbPrRrv>GbHIdY|wb*$9zd;KqsWXI50*W zr8C}09E({Eb1{z-Kc%m|IA2$}1bKFN%Vh9so@cCoRk_2OQDqn}IW_}KG|+dl#C^<) zq8zH~dsFHUR1RaNI_^@Uv52f~Wc!wiN(EE4;Xkj+t~ISNW3tCYSTfAwSxWfahmr;F z0JT9@X_zH8absh>MA)WCO!Cb=L+F%9UD=hOm?{jc%8GEiF8&43 zmqK0LkaNElpy2nlO8Ugj-|s;oUzYJJG4^-Wc7eXST>16njrWgX;|krS9@!&&5iIh10i> z&k>H+iA6uR2$)A09;O(kNaSIehW`ZpvW*1|_%bwcB$tQgl`KkP^<&hN%d44^=SzPtNbXc2d!WaTI_lySvk~W z&?UPT<8Mp!_s&KM*Usi+-~2auj+GEB>5vT0qL|Nr5= z#czILN{n2gmEnzD=rx?jtV$56#HiD3@zj394}L0{+S{Vvmq{|on}C6jx8)!1XhUaOuR5?lqw%u#! zK2_fC^i@ek{ko`bA~`3@pg3MK`a|7xVS((MC!3tXVLYIHEPho{#|9<);<<{IOz@0{ zO_tmVm6kQ<0waw4Q4PjDV7w5t9diCL54qgFQ;$N8@NAF_F~SUY+*yvuhhG(mq~ZJV zm34Yqy4yDdy-INCENK{{!%HhoHY0_7&*A5T`inoD7g4R4{1-dC9q<+qGtMF{scBZzhuO45fac6lO z&;ErVhPmU~uZzwLGv5;-C-{#9_d$GboQ{|luBYe>s2fN~4i1k}g6q5&_c*7@%ZKBn zc3EnFV~*vy&%@b1stA%?ji$|js0h%Q>QL`xUDtqlvluObgo0TZCTnV($^S{h@$2v3 zgg(l8;vbR>!Z?i?RKuJI%$}Iz&4S529Rg*(#ben;*V$M6I-Ja0eW^Iw3z31efiT!W zSoD3L&D)ejcW@4IVxHh1UCY`-DVWSmKehGbM=alxt{3>-*bf6v_exe47Rdmy)%h3p z4VwW5jxC=UfOYyxnHlssIlbVHr;VxRQ2!U!f96nsDl^5xvw4SzZyk?hs=SxW{+YwH z=x!dH?zJ$lML;DvmMfS{Rfl75-_kHBr8eexMOv+qmz%b{WPi!6kFY#L59({~_B|Bq z;koE~y*73`jSI^cO?*?x33Wgal%fO_7>(d(!*TV* z`+G^MnZYa96y7?K$c+pDsMn>R@lOX3c^7UuqR%5mU#Yi{EggV*jG>>_ACx^0vJl}6rWa6D>b{;ht6@AWkcv$&?|@EdxYxZc+dd5;jOyF-)k{^tjpSG zmDYPbuN_R{^?!_EEQ_WLM=Ny{Q5NHxH^?gex1Wb!@X>E`It6k5-``TO@F;Nf6)(Ig zlmxk>8>B%P9wkx-JnRHOm39(Ov_!B9V-SjtOmyOgl@=yp3Og8tFhaXwU09Z~Y%`*8KB-A}w2X$>71hbCmx5E7oymefg6 zYA4x8H86=~1LYJbSC@KY;(m(DzX+iRMsNDqw6pI2KN@HM#d@!41O+qS%+yl*{o(GA z>D^jJUACddLJ^R-W$D=q4nJc_VjzKiNHorxX5w_PEjyNkypl(xq0ju2f+<5vfq9@@ zM=bxj1c1+Wsg3Z1fFRUNdF=oVFI=B}kV9 z3aU66J<`WmQ$yXV4N#NV#nVPR$3bC0UkXn7n?vW~I|6`wUR&6Ja*<1E0!p1XY?@-V z83ZlPbpR9+?yZAtNg@M`uzhV`qfK*HN4^H%0ed7Ubf1Npth%r&ue!Kc45|0H4HMCW zklTPB^va&fV=Z3BSL$LQ?_zeUuk&T9mJQAB-v|%5O&)JF%5q)dM}=kJ2)8h#Yn)}W z#L)*So(APgz~7nt`&cY1>%Ezm@c4olM7+cnLx$y6rXvTxF;{N4*mLjk0HrMg3mYcc z?|av^$ChjfPa6n<1xDZop3-`v#uI;e$MmJm?WmVeK5>>1SO@9qt`b5=%X10 zm?&~0`N(U*rv&8fZT;5xij>jKu)W(?LXB?6q@Y${$9=nZ91+JUU z6-zIZBow}X2A@xcZknwh38l)nrd5`5FJK@0Q&s5cb*g-?I9@g|A-bIe7=500Q+zVS z?wH0n8B)TQcnQq6vHqP}pN%~W0uRj-!mfY~i7&QDSboF1a_ZJlyO%ZMu=t4@jt;-Y zul$AquTX+N&S7Ofw|b2HBM9$Zb7^Syl>Z5s@M_$!ab2&+95-Iwt)6^fp;VdnM%nYjbBJUJ zJl=CBMY!kIWg|YyF4N$h#rwu9r2qO4k`)49l8{E4baI;&D@WAaT)wcv`=-5~ba1E) zulZym*^fQ4Gf@u=o-o3mDk&+Io#$HmdI|36l8!v;MxmpCS}xlheV5{S%vYqZUh3w< zM*N)@Si5ad|I3}peEoiXm7#5J?p+HfDnX?u)R!RbfJD+==aF+8Kch9nR1W9Nr)#p_ zb_G?SEQCj`o(_XY2F&;AQ<1NEHnWUhZ-gupF_46)!awmwZre&hr5Y7H{T% z5D^)pgX(@d-)N<0I~wa z-KFes%Yy38ptI2Ph+zN`Kt&z>2W_5K)E`=3d7QhVTZ=K+F2NJ2UQM%$ApNvajJ+Ov zZi}T)c8=GJince$5| zQ3=-CTDBQSeX*#z$F%o(F!kFxuFI|;a zUjh$G4`WQQQcOQGEcK)GLUo!Nqt}viECYbfD~l-|#A(Dw6E4XOc|RXOM0az5Ww}_& zWzTdqRev&!YU$jo+BRM9*Dqte=(*FB@0}6J`YKltOFaE73ep?1FAI!0(o+dYOfK|% zv_8r7T_>4Ia-08){?bG`X(#k?<~R~r4T%y(LsQ>GXYz$|Wl9N};h}x7CImI#V~lC` zBUcBVtmq?`XB9K37t`v+onuK&=C44{?{XsoR3d~90*2|EIp({7683CjUft(YL3^@v z&%J;yWq&fx&UcEC3$`*<$%o-`W38}OvrV>$-ja4TB(gyF4jsd_ccep7AN=nCSqw>q zOTo?*^CNK zI%obo+T6@$7u=g8a9?0jNzgBE-*>9KrS_A+Q42q5Zv_=k{l7ERFuuH@OJ_!Z*A6VJ z5z+c7I&wPf=)HBnaS!;U#PbkqTlZ^%r!v>SeB84!jTdXi@1zr#xA z--(T*qc#SjFYecVAu#KQms;uOUKQ)+)vz!8ZvLvpwtY~{+eIm3GcO1j%tgwXf1|`= zNN3$2Oo!eP=M(Y2Lp~x+k_y14GSgMD2{t%{d%}m1oRQUvW36f6NgJv&d#GgRF5@IF z`ekVx>Vqt5AID|J^(b-DR%=bU^ik0& zT{cl|xe)aZ+plE^^tzh~s@`3hCb^nkJS4e(!F8E)-;8Sw{OED#M%9oJ$wRl#d=2~d z6wi3gCi7;KlAcLHX1+*)8d6A$|HMn&EykcD+UfPn19KVUV{*r2K;L#lCdh)2^oH$) zdxi?WcWFy|(;^Jyt9yWiPd!({On*qr>iSKvDM>eV4#dN6RV<9Jebt2Yn9{^r zf&?W?>#B~Er2KggmI}-d-~Iyiu$D#hMlI91%4^_1GCEX~9 zT=mppF1$PB&apmYj@k)3c%@tt`CfW`T9+b=)VLS$wJX}%WR~SDw}VK7bzomxYMdTNNY=S zAP1}SF3}cyuD6fRq1I(w>BvLsQ-Z182txM! z7ws1`sPT9pw!kYwG-AT2p#Llo|PTSCMvS`tn6eqP#H-^H9k?ANjl$I`tVJ?IUQ#5t{5vV>p86?@amxavFmhnG?>7poi& zwuoI4Sb^U?HIvQwvg}F_K2$x6UZV^~J2M#)UuF>C!KXnOmH{HdXNBKF?{>Kwmx)xQ zCU#OH5`@l29`5WdcCihqu9MbK0g4FNY$k@eQITp@0G2&yR?d?7?$mk*Ty<%rI_Ij? z=JCgsG)#F9OES7gUWZMBVoQBGC?Em)Ii;kashpI{yu=k02Pz#p&*$;!=do0EdMvVq zJ8*$4cpQe+fGcciiep%jAf6AGF-@tC2 zmuxs8pDmg(AYLfTlac0>fnj?wBz+-W#x(4xqGl%zu;;{wEfW@LK;2h!?y&Msu^41S zo%XI#PI;%CC6?zoygppR7)M4hfOwy36e*9MFFS)uNl2a|LhwkWh%G+dE#ylKhTP3h z(CTKz45!AQ0U}nVPR0Jxb@04yQ<9%a0?}UF~4RX@x~!R>_kQs!vBY! zr6_nN!_6z9@Nj1(b`ar85H;wkM2<*c!!+I}Wc2QM3jBd%Af9dunV^i~EJ?JLpp^$m z@VGVUDxwT`Du5iT;oSqL57AYEiix2!#xr zTBe5kr-9+3>RI>n1X*OLM|S{5ENH;wObJ89o><`(h}8-nhw#gK0`0B~7UMnN(+rV~ z9=9*6+iNIp>|vN6r%UEU_eId#q!vC5VT9k*QMNfLrLka=mP~Lt^n>0FX+-0MLc0)5xhK{_1e#q<@$%mEM@U$ML z2FeQ_28m`NJ(Q{cJ%ZgGC;rjxZx{~aK_cs{Qr8udT+@Bi9-{|9kc;HO-R5*zdp5=# zY3zovHze)=&8}Kf1gn4BTlIZP4l7eZF>r2_32ht8``apWP$ymAGPm8HKyMs^B{OG& zJCRJut>K9Ki%H;t`UQQ&W)YBa`>ru9bwbuq#p`Dmk*UDmm#0#y1X-)?s)G!YILfNa62Q z+rtI7Yh^^BLiCTcfnFp7#b*W=#u5kPQXhbEAU>;!Jylzj{6FdGxs?4Q*|bnJLQ(tC~z4)Ch=xxJ4iMy>!qG^A7QssfKMw45% z?Yq90d^U&h*iGx@b{n){6-?>UX5_uCW<31wb8o7gt<%6*`8m+>>(G`ZRrCB6tsjx! zJI*n~aG)?wBDUu$NZoq>$o*CSv#BK4wHEc5j%uJ6NVH_vl$?X^FYzGxBJ$l|Rc3`# zJLeUP*a+a2OR3#jEagk2WDqQS)|+Qip5*>8F!1;R#RZwQo?unLyuUc2Vij|p+gZv0 z&!yw^cLQu<(uDsO^}Y6F(=O@wVzc{>Z+@acC$fonr<68Zw=g`HjURT*!q@QL?b z&a%Xqf$~21zi?mt8UCifG0Z~V& zeTBTS_MSw>Nj0j|x!YM<5BExpj9)nkRkbpkX`%JA`VpEkjG*G z=Y{8}4Rk5>ZoRa$WAtdPKbh%AbK`#^IXvxjD*AVA{sD_XxY8pJSr>mA$;A@#$j2V`xnezN?r})x;?M}x?z%%L*RRyi zZqyR;LiVU8fcG(ZL;T(*4U1l@pCTyQEasO`ye$FaM8kPK;c{!^OpBLMiFz`hD8oh1 zC9#)}o{9M#l%2XerX~=$6^3v5X2x=*Jp5RB1;IKll`%oq$TG5#4sNC~0^Jc)i*?A(!b6MmAsg!=DXOIXWY*%V+ke3BNC0R3~( zH4Z8S4i=dBf*eC1xg|-@U;LMTiMLGlU_s8(Kj0*;3%M9>c9G$6>c^Z`1`~#|46gth zgbJxYY>j1xAb+=Se0(&#u?@x6U9qO38dTQcgM<+DkvNL`*6A0FWv#k5GLLJ1Hn*{% zW?AbAepID{<&dM-b-naQ?(upfJMXKnoSV0&-SX9tKHdZE@(QDF4O!($ZIWqk3b&sd zzNMQ~IFdYMStT%cNq^a)e~!0a*f`{Vs)8$|2M^=%u JzJ@q&b(8WLd!HiL5Vd+y z6|A8w$wO$&dek|plEW&ADu3qn7QfISNNB4lOJlE%wN_8=;!%0m+a~%9A&#JCZNXYcG_JJI(c?ZV7a| zp{_t~3#M+bk}{YId1mAOaj4BTj0gUV<%sh|9pN!7rXOVVVuaE)?pMMWYv&E3o5R16 zV~FXmAHn>v_B_gH#Km&8hY#Z}buy>*RnMmZ`n+y%Isb#V110^-N9_6^zpU!j(4T=s zqGWaLrHA+De>9kj-gKB2qnK+*sStlU60QFQLnA4t$2!_}OXx>Ls>O zdq5XOz47pERG~yHin6lEb`J zqG9Uznw_ell0d)$7taPXmvvg|g*2QUPk44!>*N zEeMxh&={y}%+PT(7#^f*fh@4MWqo-G7DhC*e@94SV^T`@>Mi3kKJZeP(*1CH(4pgO zsowS8cbIidwbh2Ekaz;-%y~)S7ib>iyzb*V8YJG$j+jA+ZUT?7$m*YpO-8LB_7TpZ zbvz-|y`ozyHg+PQ0i@%D+O8r51#=-rbFU5~wHd~pI_;gcf0xMqKAj9U2 zKd*waktK36r6=(B@uH)yri!Ts>jVk?vEdj(#7>KmPdUV6HWt_qT<~=C8H0DWWRYXs zT)SSm3z7!`^TDN5h0!Ey^B5w(xz$HnnbIG!B@vL-NSvg!{OlLAY_*}(%6+?l$2B$$o_`w!PA7!QDbXby;kF`WO*9R5r0W&^?%a~r z^!aacX4CJMVRfTl!+(6mxJBqu?p6cNJ*yrY9!Pn3skj9d=VBa-MWgUwt29hUq~AP7 z#F+8C>;Cu=Tw~p1+q93Q6NnwfJJn#e-^gEwyiy4{B43!i=`uNMAe}+W;hg4oC z6m~3QtqEMfte+HGSEY)dx>(QREQp08@M`U)G^pisvZ_*#k_a8kPGr?Luytc%<$%&& zeZ_j~1~t1CM$$s&NHJA{ILvin2<*-S)%S~{zb

    Jx+yLZ?b?AYp2sp4%5iv?Z%H3 z^PBL@RRVeDsLj#|;S^J*_5I{EKv*3{-~-kd{J|<3edaTTQ$j92K1|QTYE0X%)S`#By%lx06=I07(+xhFJ-BF@Pb5w zd?e+vY&M>3LBR9=a72v-J31i`uUE z#lG@8>fP5GrohL*JWmt!VJeSHRBSU|c#e_BT&Qbc_lTOgTneu<&Jkyo1qE0OhR5K_ z_QjQ!^$J#Fb#uM?w3}HbPL#+f4+a7OG>+_XC=z&#%F7P4UeCr&fi~C1##)}Cff@N^ z*RK=uEtE{gpbLOO5%W`ZyoX+5)9!*rpRyF3$q;I!PFJ7JjybRKE+0V zhD`~5$9x3TjpHYQ&n|X43l&2kO1OV>c)2oWRxRG!{x!Kzm?*pw1YWAfH;s@R8e)TU z?|2rtxDw5r=XEt7~+-^)oPpQB{q!t+xV|1dLU8$!sFrSzT znh4(YhQD-aPgCAN^7dObymXsm;X?2+Mt_DIVlJ(>qk9_`8C)+2S=2s75BW_|6?a%KKdi8|t_lWud_|yb{*JqS+D|yxkhFf9V_$wXbVaz( zr_po_1`#R<47l&4g@B?O{Xa)F2fBhL&khPhga``Fkqs#012GG|?0-8AH4kM*AGDE^ z>Afc)4zQGD9KUKlsQ7MR?Btbn_JLHYPAyo}CeIwPUUCP4X1_R}C>qKT1ByZg?S(TuCwLhw>Y zi7V^)0Sx@zq~Z^5JBJ1?STIHeS+reX5dT_A4uGKP|AV}=4(j)o4tMp+O=Kksnpokp zND+eQ49eai%u$uFA|r29O&(8LH|eV<_kPtdH$}0ObMxWY{n(Yi2NDUzz`Tgam&2qA0a<*Pb z@GebN$fnU>4%@gD$bg(c7^3s|5yJBzyjSfwPZRgp#-tsJ9K6IzVE*7|l|iWq^^nz-zFV_O@0^xh*f3 zU)BnZyO%(akgDU`$)F~#Z8F+^a{x6Db*z_u3p}gl)dM!XT7&ErH;okuy?S|*<^+^{ zbjkfk;`8_J1AoediR3Q_QIK z{PvM2=~#pyc3n0ytJhgKma4k?^?QnjY-<+K0{8Fa6{L4N*T-mL;bEv1K6B5YBJW-^ zpI7K9kw&{1P*YJ^NMI0}JL;iU?>r-ii6DK~|6D*YnV zAkBd7r?q!+>IG)z7EX`a2nGR%J3Ovm0UJ1(yB>Kc-L3QW#YX8#P2KIpipT`x{UNRm zzT8qO$4Shii0YMKtY+e>aazc?Qyj$* zb|}`5-uCfPnGZ^rR`PsA_5%up;1l=O560SinT_0LMPm)@KDjM?5R=b`f>l`U=96Tc z+zatX)h%3&*77O!b_ftd(ad}5F3cg!En2UuNHepp&75K+IHptb3Jh*q4a+t9NLeEu z=eVKC04s`J@hIP7C3~2#BSHh@n2h2)e~E~B9GsN2+frSjPhNkQ?=GZ+;{j*KMd(?z z{IG}b#X_TILcyK3@sK^X4^it>rwH3FI)=CE69DbKdukK`R*j(i`NshH()scp>F zpUD+dO0z|=dd2N96Y(>)IX%xkehTV7X#Yk4%jSe@>+AXiiT*o#>MP!d*o!^$o5ySsTBrpH+2>~9n4F0|?p0(b?6Wd=f@DmtPv3$4QJ=Adq0F40ATV2-#@ z^W`KeEv)7{ZC7n}KXM;DQ-{^@RRHE~-d#GjTky>EDlHwDPX zZtVo;bx)Q%hz_^x_=l>QX?c>-Je1NzkeQ*L zI~Wnn8F%Zqcr^b4pk4YN&OVlOhtaG5VdK@@MMWxdMqP+Zk5`X->p|UB3!uj|3=af- zSR_acX2fn`BMxVZ5LH{BaAx}xbl_gG_DMLpD5{Za%b27!J{6wJy`f?PnO&r8N_Wnl z46x>=4fpH|-UJF0rBISn#-n7}ZZ#QnnG)sAO%||m>;Zc?olV|S&H-v)A|Fq~KoExG!zB@WEC#u?foyR|`~>h2gN3QO5UcpXy97o# zCT|S%c88qE21-`U6NK%8d;E?qqI0A|_ut~d-HmnWZL+PXs#~l0{E$SQhCSx9Xa7Be#-ivH5{>#SskRRh(2Liz=Z^#}LugX{X3OF5<=%WUC zsl1s+ItDbmxc~;VvyzJU#ntI}V|wQu)u6OG$dnr`9HypResguSJr>hV1$J|}^}k#{ z3LPQQ<5bGzdp&B6o-n;!%Y_s2`}bQ-FS*f_lf5j|#a00R5{Pvr0f&jHX9-sMW}YFW;Bjp^5{k zBb`igrLmymqMAM8<6e3H2t40;2cTq2kxbL?!a={>^+ zjxJLF8+CwBZf^q##-%o1{qH)+i3A;TRv40uz`;_)6Vt8C!Mfq)TFTdT5o)?k)_z$Q zsahrUAUH9B?vODLXh8~1ovdd`8q(D3kLqD>-Qv1I?@AQ`(YUv(sc@z9!pLQmGk6CS zFLIE!`4x()iGLzz{YJ*`$rFoKV4t55xRNhO?{)90)u@`N_3GnA3y(#Up$3Gdq?q07 zdoDBOMv<8!h8n#7VlanyMf0_>ijvDm9CUBnDfTT-ftH`S+uxBjzfqGmQ;GBvc)!`o4}6t>~!m`cRqpt50!N~I~I&TNIRTqCb=^bj;}YvZd;NW;HB^409n0gH*jS9%YYaxb*9#Y zk4CicGWo2_A=0z}eqgIyN;$@(!m8Gt`n0q{(#IXFCdq+$n8)GT+eyD)jT}4=Mw6d3 zY~b{R4n5dmkM9NE9hgq%dbV~r?w+th_5BgWKeD?&G(g#{0AoBya*lsE*M!7)RjRO5 z?G5hJ#8TSpQBrrfRyvCYp^L<8`y*wBPeF8-w5_zb;PRnZ(s0I|G`I&RY+0QZPElO5 zUUtG-w=X5TF8@&NUtR(UgA${hzn4A`P{tcm$wn0}ks{M`exTTvg0wm4NyLSxycOp) ztFE|z;Jav>s$6EP?t*Lu`7pjOc>3wV`0H3I!|Z91F4yJ6;obmXEIl&{cV0P^fMlw! z#(cA>ZM~2#E=)L06@$xde<^=Q;+mtzrHm)vE7(@(76S%z&z5@xg#*6C|_h|(W|C!#;iH%B2y8>U^0S~RJr^V!<*ocD~P(6!anNT=x)Gn zK__e7VP*nBL0r5Y|0R)aBrUtg&r-S!ebtiO&5SuB!pg}1b-;Z&AlD26ZVabAN1ppI z{5#xyMJm*@Ac>oMs1#;w=R}m{jrP8k_nvEd!RP^x8TWt~9IcwI^!5|gAC9S@WS?&$Jv9(hE89Z%fMM&|?STY0U|0MzB9P%9JtpX><3Kdk!O{ z@Y?Ti2Xq$xQ;*Ya-p^#$^k6n&rY?p4&%0Azx9}JrF7|&2_a`z1ijgfd2=PAk%;!U; zH9i0Rt}hkUW=SoUw+sXHbb`{NX1X)`KeqREdwik#-;<%S=(oqSqun?IwX|}@PwT5J zkGRRTkquydySo{At3O?wx=)fmxK5@2(Cy&E8?$mqohCMMPTIA{VrTJbGMBT`B|dwt zY`S7#z(gfF)k>^M&#iWc9_Bou(5 zURVyT=>57^_|NJrdOSEjPakI&vxi}9)|%eYh;F=BcXh$-qkun@zO9JZ#Fs;sITu(n z#MK=F(kv~t6>z40auIv!Y#@w%%9F1CdBBi za;rp6c~N%EhaHR&9QAnR46S%QmAsFYQxWfp%!z9;R{qa09DcEbRo@x!Y+NcPSXCju z9PEn*-|kdj@g#ec$+?K{Wr+>{Vk0%h;Ej(Zuc}uYhfuDHBm+$${Vf#0_m^Ds%PW%# zN5H@lWwKP^+_Bs%x>^wU^KU`$=|P;^6kH%AACuuDHr~x!6N%?yd$}X0XbMpCdhvJX zuWLRUf+8}zIV>%2G_Ufw{A+OJhQAggiyVDmfK2AJPq!L%$`uS;ZYs9Gv9(+T9;Q5o{O^H-lo68mftc+&^iWCaE8|0}eJ3u&K~enz^z7 zRqwNM%~&vcXK0E1 z)GjZTcNxFl{260a(QNm8MCUaQE!YF(9tFXydO`aaHdY#4e6Np9!)$K7x5xR41|ANT zlKIMs7hlNMKG=#lF4g8M{+YW&(!sx|%@c?Klf_HLgDm+y92Lm;FIwn4yV7id)bSr} zd$BJ@CR1^fq)e5yrM@+J5^?c)0KMx5YR?YZPz@S~w8rqZ*dQ;lb{pKjW*bJHi* z>s9*6W7r%Fb{3uF1hp=-4_+xgX)Ik-bo+4wY$b%N!|RsAQ|q}3i|A;tU+~f}hF^?CoCy^vCWc-u;w2yieIAULc9iQxu=O57#CFk;2d9*10xy-Iptc za~}^zK*8Tbd>RY_AEKn?+Rr|#ef=CD+?FM6DCuVgPDH3|c9i>3#qyAnuS@rp>*JsD z4eVVH-0q^B0*bqEWNgB-3M}r{X5)gqW-aQT-8dwU2~gimk6@ObqJB(y%V~xr)@jqS zl(nC67&f!pZ0Y@kl%}S4(KT6p9=X}LC4%Z7kTB{ccZ+#cBIBOR2ir3ryxDr7EtlX^ z^t_l-jO=f-t56^BP+w!vY>T;LcFUYda&#Tr3yBCo>|B}}iT~%ssHytX>RQ*9pLM#% z4}VXxNG&p_sCM@>)sxMLcg`3eAqw)%cwm-Ymh!KkPU&ww%HaoI1-n;zaJwaTm z!a#9=JV5tgZ{O^dNtN6%bz;JVoW7bE%sJpvqwL`R+`h<@&TyWapWcGoc{ru@@Y}=H zlJ+uhRv~C2Y~#NyJp8=RfZ3;5_B(F5bwZw`{O2O*Y~;L_9J=v%&ous_88uJnj@KZU z2tin&wOr0aStVfJQX0kQ1yNjEc>v&~FriFXw%WX!f5bby$QQ0%s;{18H+^&(szjw_H?+M{SppoH68w07l8IY}v+?z`hos7@?>KSpK)OmYW!A z<66+ZKCnW52gr$`CwyS{FS^{74hkRi2k!kKqXtHmKeQDHl7EL&{1E6JaA+4oXJG_O zVqxn~d~_j`&X}!4;5;5D^4_&xH1ml|_5n>qONR}Po~HvgAbHZ-YD#&UcFz!Iz@?yb zTO*9I8M^6|(Lfk>9gI0DyOeGnQ0Va?%^Pyl=*o3B;_VN`$~B zkWGGIcDKYqt9HBvGx?st$I&nq%NX$0>uRhar+V%e41{kJkR3$i2W~*m5&!Y-T(X=F zM}AF&Rl`3ub^k}F!2TexE+E)GsLxGZ;6fGx_5NBno;I9U`L9>E*mC!Ch_4F-61N@& zE@3aszwEPNcc1BH~bprdw<*e@oaq~6+%k}Dz>brPN24ea>2W@XIDhT#j4Az9Y z>mk9!8(>?<&NWZMGMR$c)wQt&CoyidnnI7^Ty@i_FgR!u6-iMXS@O4T)PUQN%Q`Yo zX!6{(IxeMN-Qi}@>-gi|(T_ph5%74e3tic^`nxdNX5LwP?xnMDSM$u@UlwHFvbVYi zrW_9{PKj0mRF^-fI^!rtZ`u(X@jMqGww^2OMoQY}P9)HNviCoTufM5_ZE!SMf5#2G zz&ymu07*JcJlKA_=H|@j%`IJzS{v#&^LqrNbb~G3g0O1+ET>@>2Iw6)DDxyIvncZ$ zjC$9aFE))#tMRDxhKg9~)x&=5O^K`i^}gWRVW4q8C?hT(s+~EnJ@tzqi~f4c{ui0M zc>-{dY}GwgF0$l7C1$B-J;sZlkx6HbRZ91AZb^M8f{I8?uH)fM zWD`UMQ?vqFT|!dnf%4bTG~_n8)Y=Q@*X%OM~2grP;x#CpCfP-56o67%qHLN zYt+Go7EK-y$&HJt0N3}Szvk(%8ePQfeTqj{Igh)6X6P7pEsvOJ)wsjg;ytijPHaFB znx4e~h4Ez-t&8ux=>B$1-IG&!eWy1$sfBJ+7~tYJPt|p3PZNyR12TqH zMp^j01epv~y~Hw0jG(QBc7+{3=(jXv7~4%btJ3!6D`R|H&(d}qz_jTu{zr$3uFCMc zGm_kehqg=Jcoe)~HA9mTB((N;6kV(&=s5PhUg%5$@q{Nw3Vc+h9O5U)HnzU@=0{$L zna(6HE>D55iv^s@!HA7=n?pRwfFY6ogBbd}2rqvRuh#sbI7t z?(hPGL-t;K;1a=aExAdeX|a#JP#MLyP7uf$g%7)lsQL=0Ihb5EyOiHQAn1~tdqg!u z;A-7N?D6Pp;kPFvvN>#WhNyxBfpKpcogyQ<&2o^rSUoabn|wxBiA8?_tD_fBGzFXU z&X-gk_rT-TKsxP~SDzhq?|#XqCz)ks6Gyt?`VI5t$ah)oi9^WH`IScCi{RF7PA$Gq zrsloid^0JWyh;H^@taxl#p%L}QrartR)GmlJ)X263{wkyB3WoF#wByLIsKWet4dZW zEgz-`#Z%xtT>MJH)3Bw>i+UO5Pb4=1bX?E@>w0K;n#9^op%7QFxNRdH2VW|tgHZITN)Bwz5nTy zoG;58O){{+5X?>tCRf>EbOa3<`DGnK#qD~Bem#1uhRX?V}owolb^of>}#l{rK6s# zdJU=_VmN~b`ox%h&BeL?BwuE}=P{D*EI(9Q2oBDuRPt3LV%$K$)VpCf&iC6k+_Y#A z7K`C8oNDeoAIM5_S1-=GV;qRgz98UHd=*E2iJEA@OuWo}DZhA9>Q`gxv-BNscDKn& z*)sM@jh!NA>IDA&`flCL4xv8(WbR`4IP#>!I?~t!=|3R`>9N=^n?5#b5Hzdg5N<3H zku5xLhF$8?+@HY?v&050skczv=cz3Is(C1{IJ*bh;B%rz0^oG^{HE zupph~NE+Y7EA(KCteZ`%T?2;hVt6tm@>I*UX9 z2OWRW8NFD~YkoCQb{v9UqA4l=uQ9|;k(V>!dN{k z4=HU0#8(QzJwCQty6{Jbath&Gf_R&i?C~)5UCUO2Xxz5k)!52ZHE&!@*VMy=Q^x}Z z7)~Y=cgRafm2*rD^j}XU-}TDzsd@af7^@G8hm%#Strj7qzsBl% zLCX`@`#&}wD^o4w$hss$SPe+jTKB65m}ZLkcQ0h`!zA?e(0i#B1gWv#ZywP%3MFW0 z?u}G2vb|2u)J$vL*#%GUt?+KX8aZ}nsJqxU5aA4dxZu|o50d8d0!C6d z&ON7XecA#9Px<)dz!Y-rP_^^s?e&9S$+f4l?pf@A02kM;o*(zpUEoow%>7hKJ%{MN zo2n?(3*ynYbUk($JVQ$58nw>`dO$6OGyClodl3Oy(KoB0iB@v>X7SNdJuAglYq5#3 z^&=*+^EV|s7&^u&tzv@>^(;2|2kvgp+RhuLrR0*Y==4#7uRjkcANT7?QH_%iHoOZM zKI;oEr92;I_fu{BOM@>NsKYiIF(=s4ll&2kDHu8rd8uc&Wfp%gWdA#@RYmtee-8g}a9$g*bJ$$fS+sF1fTKu{}RLKhd4r=4I3Xa@Y=qg!> z#EZ$9Qs?*|hH-Oj{gB|>=S(rh0hUWkY0X-4Xy4z*{g4ss-b<1zY#3#2P#*l&88*+7 znGd;XeGOM_5*Nezqbo+ndE~PA#WwK({>3^dJf{;Y|KXsllc!`8t^8m4ut^@30K?O& z9SsMcxv4iou5td>8Z=%Mvrm_(OSF-MgywG!7@{k_>R{{+Xb zu0PgswqsIZu2*^u8=0|45NMbDA0+iof2nAE_?y8k+SBd_BdIdzZ31U_)O!>nC@lwE z!Lh7WlcUq2zqo+%AUD(@Ab*7T3tnoYtlW&;4_QbI5aKXOEvTo`CI(ipXdVI*oHz!B8B0W%A@-f} zuz)k)LMb_VCGG=~+%g9ikeS-$e~%ViDstI!2G6e5c+PPSbN7I3XR5t=DAn%{2e;`b6s(P;`$=RcQ0HGgqF(`N(wjG*v9 zDJY!=J?JvieDs5LxnUT;N)YE6``9EqlF;oc@5nsVIM}MR^B+MR)!>Kc3-dd6VH;g6 zbV&n%9PaAzEr$lgzE8+^`;4a7tBg!OQGzCCOGSdQ6>$gKRUde^`)PSLVQjhVCg$5w}&ycOMTEHu`=ifx!;_01~r&xZWfAK8y^H_w2_aNctmh( zh3kZid|y@KU6X%i_bBh#ZqfH#N1kh=`NMD&CVHXsL%2@{Ycr!wyHAuvG_K@m04>7x ze(KJc7vtKdw8UOt;_nc(A7MbEUl3V(VAEr}c`ckMd+?!7lxfLZgW3P88E!ud%J%8XZ#^U{Q5bgiBBQ z?Bu&I;8>De4g*q;U>2CWPQ`JBd|9sCcopBl6+h&uSqGyg5jn zKM3T%egq?DY7}H0(EoQu?Qj1LE5~oE*yTzdjPBK3oHARIwwP2W{VBJ;>5YhhznNn!X;>@Ja1{$b_IT?spA5 zw&OzAWTTU{b{dIbqRqQ_rTH{z__LZ<@TXXIwR0Wu8j&+({!G=?Ra&Iu0G92 zm+7Byy5rO3zhS6--Q=F4;$w*+gEMtnIWoDuI!9vJl<2LCkIha)Y z>(!to+WG)6u-`bXfhc*>b3ay@E&P4=G@n_&z0&!odkxPR^OQ?ze`Q`;kIuKZu1ve1 zVE`L^d{Q3j=0dBp8Fx~Ec7XC%bJKYAyqz!c zw%blN-2u7e%hwu6BOl`r8Q~rk@&>oqTlX}^5Bz{=1%F=Vsh{C0?G)Ji+pEIb9xq1Y z+o660o!Py0Y!e`69#xfFyf2e?E~YM}>i0tuG3e`+RBulE^?RPQsN46L6Q{^u#BH{r zE^s~ZGUnXHMb!Y4mlMVGiQ(?GiNLEti)Ag>PC9XL7MmWhk_acnxMm=vxkya@6gJLu zt-Wob`T!u6tm!vBDzJ*(+aN|-JHDFgr?y55r1u@4d%_w<&wi9ut+p>l(5AYnlx20` zx+sBo`|E5v}Rhm=2ny-cD|;1GrtAl0+hNH?Pg71WGMn6cv zN?5bRJFV;sB7^^tztR4&Z$MCz1tct{f$4 zDR-ah0Z%PnuB@P5w2g2tL=%7dXQuAB%&Yy%yY4{!x4;g=V|UZIhqemAx>jg?CtO5Q zrLpPg6iWfATtW{mBJc^HKN?#(?)!eXxGnW4wD&oh?+A9~PtuHJ6?U6=%o}7g9X|#i zyB?6R@%$BJm8|)zqUA2P-l~SFgzi{aO5-Tfxx|mg0eNPeR`=}Nt}YCJ3U4q z2JD+#N1{FmT^W##JO64IH>gqgyW7bZ^psQhA#I_QWtJ~i?#}VQzF#U1n-GH~&pc+} z+f-BDoGG~DljAyOM&%1hv6!$w;K4BtCi@I-_bPr+-2*2?QNS%Wrqh5#$usW(qQ1XlW(M&=BP5ewuTF%kyAOHUJ_IbSB+S&$h z6&W|oS!T&UT(sOFG2CY9VaUJBHzJ@!EXM^e3RCPZj@VB7EEYXrstX~rEo$cJ^-Mb| z46=P|WQ$+@OorSt(qv-om;xgr&AaHCF@wgxz&-VRm+?sIDKS;n%%%O=;C)xcA7MkK zl(M~dciXO~Oqg7`HjkhfA%X%r=TDN2n20y_~wru zr)@GCGQYlAFmx7BXhLkjp;zX6U0)?=T&j#Y`ei1DhL39xOU_uJr?75KfU^#nEgi1jY0Pb8+|1FgGlR13}7p$s!b63PSwcwQbCm z)kFOj4+GF1J$yw~aHo=$@Z_+})9vc(v9p9OiDR(%W-~h5d))Rsipj8n#-zh1voH{7 zyhGE>r)!rTFkh*WUqD_1xa^gS5m-Z~ zy3RrI)9jrLeT8EE)>x0IPG^rcFmyf5`}8ni0Perq(&Q4D!%W}!9f_iaLYsv8E8~L* z^SttQ4TkrL!$c1Rh}q1$$2_?qXMD7$f64bIeoINT;R~pF_oUs21SO}oN!+<&D2bPa z_(g(33ni;pYoJI_`v$1zbsROIv>l5Il<$Ww7rWg!^?X|R$NHYs{_0rh&g1W(X4EY? zBwIm9ZDmr^Cgfpg_D{F8{g?1zu&sG8|5p5vo94(V`Q!92UNnWko|-O%o9$C-JBuZK zN^&;SwrO!o+oQSIF(OUP3u;-X*_RUM^^wqy!iVH_A#F)SAgIsLP2!g;?jl-c3~$N7 zN&L_MLtl59@UgG)cbTDVRx)$wm4d$$aTJmLDt6+^Tmo~p#y!3?69xjwkT?8QVF<7M zcS}BJ_drt>TF5_2O6Nz%>Fcb(z;-ER)pV)|DAqnP<`F$22yET-omfDW@JEbm4^>q+ z-!b_ak1!3@WA$bj4&m!1gAE@ymTomTsL3F@fy4iX7gG3V=FqPyFhk)9v>;2Z0O`ev zgz}CK@5Go|d1-GbtHw;xLar{RN_!|@0mdNPTB}PBB@(!hp#U0(_vOa(k8~YXR@*lY zE&s9wM|-9afhb$DPfx26Rjc(wbFcy20%lAK!ygWTmzb9yQwe3CyAL7;!{WGLD9pVE z5RW`8zMEq0>FL^hrO8l>_+cICyI|reitk?w>zgdDk(_0j`BF-{aDl9wnvalzf7NPk z+O1wGirA214*|aw+4YVb(ZD{e>?|dT<71$W{B#>@G*~>tH6A7FOLnaxES{YzX@0Uj zBwM5RkN!;dZp?gQHDX&1i!%TA&D@Oo>~bdm2%}uIC)CoWYb86%_v5a69ZQlFcLI+W z67WDqw!ezwtyLy2P|Z{s2>tEofqSRabnJqE>95+lNY0g=;>yR>yCw2Q*~1~o0|h9f z!`~3>u7t8fdEkZx@4{U$>=sJJ?l(hC7U(8I)5`4?MThhk{(V_DK1_!2wT|q~1HN6* z>*DQ4V_(>$WVPZ{vrp4LVQBmc<;`UM-T=^Oh90YIROVoXK`HePlNj3dF|glrcy|x7 z4_G_fRa1h7wC#`HOcq!usmDnKQ?hiZ7i}A%uW0w1j%`ti=P6!2LhK~pofCncS!F8yPNAwJJ8Npn4e z7fX<#?hovb&O&%lC6?Y)sxma%A^NjS%diSXNmU3!!UyKI5F2Uy+qm4PlTS3KTtIbv z%c0NRcs0OkA9cYM{lrIF3Ec2MSjG!+{mB@;)6XgF8br_{{?&yH!qK;MH<0L_lk#Mi z_R=-64ZzYkdl0B4Dp7g)yT$0mQQ$CO#3Q+ZC|P&&%}=RE3VP$04iXS&mX%2;S%>3; ze_qkxHeoB5m6ffq#QLKw+2?}AcJKJ$7MzZ?FWrcj*odMr{sW|GcI!E`j#8PD08l~VZ?3ep_~yr)J?(b4_3>p)||-` zrU#u|m!1B+kIJ=tEgZ78zcR;y;VjD=shtAGdSx~FFZLe{qE#r115i4F$%^X)E>?a~ zuN3R19>0p(r>6n3ymm3&zzE7y@{xKlrz1g$kGf*a{$ouO7Iqo|&a7K6)ICqaBTH2I zbnRle>yXmBWB;EwXpro^d}$l_d!P-z<4X+(KN$}&%BB&|%OPEQF!I8KWcTO9?^IpnV(HDlyyFPczLurxRmw_-{IMk`!2=B zLSMyQAJJzHsUjGk5QIQGt^Zh+_I|fi7#*&&X$Wv&!6pZ8u9VW#t~RL9i*#w-A36q} z(5PV!I46D3)JfI+AnBVBlH#oF5X0bZ8T(uELrhPlLIx4e8ObOHgAud(e2HskR#nME zZqzt%sahw{sCxo3bw+aS|B-aw@l?O>AAg_0F^;`C<~e01E6y5!+D(X9{2mcuIu%BzTyPuC405S&!2(B zFCDqdwfDvB=@MV3SaT6$#gJ6_r{+d>l%?tQ@r?`I8#hbgu)~8Qq@3nDUKLZFE5#@|1r-E*SvRW>s^(VFm*cn$30|-udG7HE$1n!+`YvLD5%Q&b zAO35=J8ISS*J6i@WstcF+0C4y`D@&?K%hXs0|B%Om9V;!6?yniNe%gJrm27q`sLYI>Lbm{i;uwsHv=wxmD@Ia;`YWi zf$k4(uiGn8ss*|2pJiXGr)EgfQHIHv;0jpo&e%7^mHelakhF10;?gKGUBagV<6gXJ zvMh7;o6yL>7a~Xei7>d#2HR=XdKb}>KaPS(u&eWo^R$x#0knbMibW^!N&YO!s_!Vr zjBm?DJy-7Wm3>$v(opt&X&*Gz0fx3En2GYsMh5g%~BSzPe*uRHCO!Wh;2*s zNM9}C6kBw+#eyxh0929HPj8_Yi$8}ilqcCTN-Al_^t^yXbglV1a64-jCncYjM!X1N zfg1YRaui6j<9uh-kZCcY03f~0ui-|(_l}rG=;wC29~<=mxhJzjd<{^4*MlE612^T5 zwEgZoJeasDI92h}c#LvfKGUj!jS|$2c3j0B=Y@WD^z@g;eGN#ye6!mJ;SoCyw7y-p zYH>ihku%-u4T@i7-YYLS(`T85{>W+F&0dh9uLce9fI?z8qEFqU(xNo+LTDmMDHsiP z1%j;zR^mfQP)|T?Vxei&&m5`vBLVGuYOtU#0j)flw7|~ISf?~Kf=I_A;8z_tk_Z%( zkFhjg63&yvgz1`$5GUav$PVYpl4?Y#{+$~fXBV*w&~MjQqTGq_EY}b1IyFl4>0}j- zSK*Miy&V>n_G=uIi39!r3HD2Hy8+MR)-at6dkb@xq;-Q4GEB|v9reMiXSqmf)u3Svww#C~@ z!2Z5}0eP9j7#ulc=W612m?h?a06Vayms+thR$d0PQa(pHEUq&)A4U2mt#~MoqHu*@ zET-0Co86ncAro(jbcc&>PMmu>a$1#3O}S@O%z%X!5Hlw0+R~p$5#oiSp1*$JDiyal z6PVu%?j)1M_Q{#vwx@_Pb!Keed1cbb0NCqD#;cjmBv6h}m{)QgEK%bXiE0dJaEJDH0z$)bE)Y_dG6=q;0lZfqCP+=anyL~ykZ~9yM z!&p=R7R~2x-2&@AtFWN7p<0^PuaGTJ<$@KFY;duvsF*mN^E&Jop zr^xNdxlA*SoMqG?S6$fe379Mzwc$>(@ShZ|R*~}b)0r}CEfRuLq^5nak?XYJDFLa} z>6*uebxn8$d=a95QN^Ss7GvhpTA-p8}aHh2;#Iz5UcTeR2ch7Yd&P zTBK>Z06Fw`#oFnY=zIR_UwHN7!DY5j!g*%Iyt?))cB=B!)i9}iZI}S`Yd{z zo^CdpkLV8*s!pAXYvGO$iS!>P{$YTe({RdC#tlI&Bl^)KpHBtw-|4C}iq<<^P!11Q_f<4(8UQ|@No`O0*5_6PcQM9~G@v}zN6z^h2K z`U>+q69X-tmODr>4jmbI=;%Geq=!s4yFV7Hr z-WDyJi4^UI&R;Svz*H)$T;Gu0qDX}xsZ~O}Foq)Km`TOiu)S{E){A$Q4P%BwNdK); zsHsxPR6QEX+7{9ie)x#T>~HE>LGjF{|9qDXXHAy^(hiS4@Uo@$r3UX=16ZuoJ;bV= zL;+$~FUhg-y1Irx-xSuhL$yE#E-3VRgBz~F(O>RgJ=`XKpK=LShx0Xi8c2j>@Ixxc z3)EdNQV}thdU24hES8(!-rf!s;u3q3r{THXL_$eGCMU4{8|;Ui_dMXTOZ-m!LJk`) zrrcSY9bOR>Dq_|iUovpmEFkGr4c|R7j;F>b>5l*+A>7D}lhFjSIku%Zo=#d4Z61P7 z^S0cHhUVwD5-Zblwbx7-`R>THlNr*31xXTdXCLhi5tq6{>tNABS}9kBp}b$5?$oLQ zY0Kf_wxqO~_Pg70cwg1Re~=yIIQ}5X2c3$Ruf?yC&(xj5d^Ncm69(*Zs#3#AoNfWn zGIz07XzegL1ow9Yp}pC+Pf}kPXU#BLZNCYvu6<1w@KtfuXrw>6+Sv;P*5{< zpDpJ`nWLGS%R%ztygN|oZ)g!Z_2m@2579c_cZC)6o>m`yH|+Stg$%2Z&fanEpWh@} z{f;&B#1ve8<02bzEmeF)2HsYEENgxyr1A(meu$wrQ6sHZgILoGr?8UWXst({uW1^~ z@HY?Y!wc#4Q$zx&8Os}x$FFEJ@2B-lIdw`Mf*`G)zrVIgqIKW1uiN1c*9O)+JJ>+P zmoZ>G5lX`J7HeFU?La*_bAuuI(t^LKm{mQI=^xaVwvd*RZ7f*vH87KDEk;my{Tl$$oKJ^?b8h8;yqk+R$ruj2+l^6R)#0nZYtB>L@u8`%>2%F z8f`pac|hcrgc3Flkko|OS9+!8pIP9C?A_{A9xrAAbco;1b)Q6b!jNLOxe|*$Dh$lc zTP^!LC43H;&X_3KIl68QpP}%^xNNYrr+s{cOHb;!W2gVR5>?nw;VYN-X-Qz$gpG{8 z4VXTZo&=POmix>7S@JDNE=jQz_^pW4e z?t97~y3j*J8InfYc&BD)Tr@Np-v3H@iFzEuT+F*4TaioUTOz|(2M=t4s95ZUckJcI zvkC3uRU?rTP-Ub&m+6NQmhhBe+N$hMPm3{Xr6(M^(Cbt4<>w!7tcVxvW$;&>mLgPY zZ{aonKz%~x4~7AafPItAbt0NRzP#S9&13Ybd8>&QG)6gc>4W%v2`)M#gIp@_&4tdN z0l}V1)2=PX zCPIdyJ<6Dgx_Y%ZnCgMB zyPHjuB^!l{-5O;qYr9IRj?obN_{cg8dCwI<}#Z{zSWW>geM82h>1d*KvR8E4_0h zV(~$Ax1-EphZW-uK<>p|kVK1X2Q=O?@(yUNoCf=;>Q1>E=}1D|lo*#?7|G6Nec=s^Y`MRvwfzLHli( zV#)TcE7U&=|1HSY1tEbT^8BOe#P?>EuUffATxN(wsOF|Z##io4>&Zx^%g&CE^^K6S zs3Vq-AeTgij1`sWf|5rhtZdf#IYkw417gEwj=uFgrDGI64)5!vJ5Xa|dyCjc5uZDH zmx4yz9c#3|6?p}7Ab^(iPQmi@vWt4?6zT-_oQo9Sb;9Nduvh7O-%VEuUtAvYA@ZLU zBi-pU>OH&!ua}7lXr}N6GJg)R-_@ed`1svh)+RHS)`tZzZPEPY3)3dD>;gu}9EL=0 ze3FAgZqK>NYL&Bn^^*N;T`&+5+0^2vi|@Lta2vz7@@U{zTZmw%c>8|~uS1W}{6wwj zfK+U%053$LdNnFXe>weJnyW7Dwfj~PquDE=tFbB1W!wE^U@YxkKw7#qAU^jLvuqJQ z5&d1MyKyldbj*@AVLj+aYaJCQO?N0vyJ|4DK^qgv)JB6{mq0mjDf>RckAq6s04CbV znYK@;9ME0hKYzmb`)VQhYIbR9X63xp!ZY^MBA#|B&!7{_f&EAiwu;!fN37l_&9HIi zDGPW|6`k;ie`l1;_1ygCf~IhqG*M(|rbjznFAF*)lT8Yi6x31uV8bWM1 z{568yRTAv}0d|iW3f6n;s@4DvWf8<1N(xrUznW~94mJk4C2sCXALj%wN2E8@DY4)Q z);DF~r(`=}JF=?mB#U0i_Fl;^QYRx^9Z2wwQmHBRaX0?;SwszVG7EicyQT)mvcEpRaSpx zJk=55gRB)DDTAD%5BpR?6%qb=33jdX#gl!VOtpm;GBBH^1jeuZs#aYOvrB@Y$v&M* zC}}Z6#>kl5Q(WO8|3LKa=OVC2Z}hb5`RMb?G~0@o>zlpYhk3w6J1DRi<4x_!dxc$K zSaMATUZsH_guc(i0*Mc;x;qs@Z}x0}W(4j{Sq&k0fT2gU-fCLv9*0OwaJm+mk;d9c zdQU)9oj^<<8}&?kdiFUxkeKAuwI)e0q$+U-{d!aRSP@Y%GtUjwDut$Y+soSvEGinV zt0&xyj$OYsn|uodMjGR8_4Zfmj|}#_u+M)>L4F5ySfZucLz`7O{bWm*V6}9*TPnQ= zy@~_j3ZDhz=6Y+^CMJB84z%mz&VkfDuIW|Oph%BsSjFpdygB8&k?OP>Nb|@aWjg!P zHIM^H7)nT`Cvnt(JvAsk^uev1gFsEM7j`0?JQ7&l7nfk}EmP3~7m$m{al7@plL;wl z{a{WiazEJGo%>|e^RV-OpE<;RGK*pSsQQ?lP{iusa9l;+qS8P;pp`fK*!>C?fvXAn zfPz{~Q$@;g-Q|P5x+|7-;hlP7*paycv*Qwb(k~z-AAe!ykkz7TnoZN=*#W6k)#h-( zeP9?KbK|P~sua`=Hr`En2wXL}QT4OsGsrmLzL%C6yVg!({F2_}!o_Sm9JhI7E?A0P zjG$nJ)l{Jbr1kXdou}{N+Ew$#OE7T9eah)gcE2DM!!a-uQiYr9{;|iJsW++{{@NV z#b690FsV--4n*K3_c;j*1sBE6AX$cA+yNomLH>!|B_cxi*rrc#=9WC;0*t1A&_jh; z(B}|fmL#7?fS{AGy{h}ixD5PDXh7{Q?Dr3Hp*}Pr$p4Xjq#g6k1?d{fUhz^Z5vup+ z4vpxxHa=5D(OkW#`@IO+aT2?{TCO#Ic2u-9I=NX$4R+=`R;p4@Jt#G-hEpOL(=9VM zYZegwg~;>zF|QhR2E8HujL1G+)^%mT!}SN40UZ6Xy+LQ81-*+))#O8h0n~e5XrD_n zCWrm*7prdq{GUk6(SSFsxQ0;}NY6FUd2VB-7Q%7y+p^^#m4F@pUW76Xm{HVaesh>QdcF0qO526n;8O zh4@%2gK@U%_SUD4j&C$kB`YU2NsOLiBiXk(kCh2#igS5&u`~n+pX+xI=8fqDuc9}73X1vPr_Lih$L~B z{UTJDcYW2_PU*ENTIhg#lQf8uBH(DTX%jnXhNQdcjG?_kD-=wQWT`L9w(6Q`v?F(j z^6E<oC1=o6#TDwLMu3ADXIWi2~XB_XBgL^lb{y zq`8V;cEDLbx}QL_$R-e=yg52%GR3aEep;rsNYQ^V z2>9(20&*UZgE>t)_RHst&;d}-rHf1dA!<%ub;o5Co{6}Uk`qk_vJ&I=b|NH^-nCMe zA~KPx#0dQ6{9^9$7qPmNd`fqmOc&$8tSg%_aSn^S?fWKOb*j2h{p;y%SypPL_-j=n zH}W>j&ShO2vt`YXCm)GujI`+tf-HR#~UT>J<9&zBIzSwSTX zc!ZCuB$eBaxqS(Uh}?wK%XyKzd!^Z=9L|10!qN&wp7$0evu`-?2FshVT=NX0Hk7r| zC3xKEpG6O!_Jf=>d$l_(vqo6Sx5>J|W}5s=Ro_qeiGqcyW2Cb$vdZNLh-88{+#Rs> zK?Xj_o(CI7*1Vf}#@FIFl@a%_NR}rd3R32B8+{Tx-4IidaO^(wfe9wD-ovz{T*Y7y zblTC*>bZEND|(z;8CxB@&0z{1(e)ebcPPvZDF>>KTRx(dFmku#0R^u!bh`fd{ot9R zh%I17n>c_?(!?6%)u>ABgn7U_SpLCxU0SFE@rqA)cp!s2tM4VW!R@NC2d}1Xps#WvMl|9@&4|B>NcZS?ryeTOCUxcLksPrFs0F;0G)WB;-f=YiWfCx=&;q08ElBr>a4#`7mr zLqG0o+-dflk1~ZyZ547OA}Ux5c0a$rNKaEvH-S_5Jp{BxJ6he#98)ARkL4Q7YdnIC z+nRds-}y``w`flF6R}YKNk+VJxEM6zD(A?RnURsI{Qk)&BHUMlCB!r6*V_y^)-q~Z zHlSQC72CQBX0Lv*wH~aOG)!&ZD#(N$R#73t#k4%q~9PzL7tLMy}9f-*j(UfAmpOvO@?NRXaB zA>ZK1Y82R&96;piwLURbvN}8dNL@{%Nz7rWnMR|sKxWg+8s%e*r~z^1zgwlBjpl=T zm&gdyI8O(whQNV!%N7#<%sRS1934Ppqb7a$-Nx>aINma?NR*64V{Xd8Ub^*02lQ{f zyDy)DtHi6p&K}h|ERN}UA8pQE-)v36A6TjZ@MWNQoE=_#@pQAazsQ><%noSPD>d{6 z=dT|U?NU@D3%Ptn!k3~nAzQoO%(HJi6kq2esm&*u>do%?4E3IYuD7xJzr4(8E1o-I z;LX$f+bNWe?_;Fkc4Ew0b$I_?YYZ~sV#riq#(_Xo#>k4RUq64j!*AGJxt90xK5pEd zft{%nHX#gMzvb318?y_7+fU#`k~SO8FKB+OLI=uJvHY${NzK0=$`HQ8CIi`bNDzh=nW{t6Hc+Sl%KT-^WCdp92z?~m8tFAaup;1 z{tkON={orjI?H(wy?OFCm%FhMeWX*Uh-DaoZg;1*&FMaAg%#i-GyHV z-zeKO2Zkbp>>Z9|Hm(o9(9vyxvIh;_h@p6i#K%8cmpOXZ8sb3zE48#EQv>z<*YCHv zZr*|k9bgL2?EWLpo~Ic0oLf2Na`qPd1D-(tH1Z7M4~==f#HgU*gWUf@X0)qM*6A?Q z;sCkRc$BIC*9vd^TI`V8fu~{IR{;E>ZkZ5b z-1BF?D+TRUmMQm%eI&qKl1jP2*?N?QygW>Slb}F7&6@)cYTRicjsdc+&l+gEzx4t{ zBuLyyY{OpAT#C0N6F^d}y?S(>VS;1+OL~>X%GBqqgGJwJuxtitnlNq~!kH}3shXcN zj=aGIOc%6%(zzSt6wKz+-YOMZrnJN;!=y&TO+n0Ja!j>g`-NxbRMJwgP!Y=fVZHfJ zE#{sxCJ&BtUXwK5&m2XZ?(f-=91fMhPpS|UE7*lGv;M79vHacnSXJs6}N>2#$%!NxExl;Gr3*$uDb+|{7bjX-J$JP`2D{i`j)Fwtv77NpgMvIwti z=~qvy$5hSIWLR!z+4)XDu~(M$8GopFJsKrd1!lYxYERHxsSHO1wf3e>eF zXn#KiX*xJ$@(T&g2kh9yWK8?v#WJB}Dd?&6rmOo4WhWDnlNWBtqp63W_`Uc-r1cDg>dEXKH*o3YL-57mS*VNmuuXD4O$TGmR zVzi3!m9XAOJi$mH_=ftvii6Y*#lC$*-&9Q$(Nco3?txA;-G%nFKUsw_tcX3e1B>o2JSB~n=3TxTG0p56z6z0{;t?ak{Y}%rlA>v#&OxrS zw1+S4n6m44$O%XnFhFwZyh2QLM$(0p{-j48E*g5#fXSyNaJ*GCGE-aMww?wXz67gg zRD$+U-`7mJpx=WgbVj^5cn%jS;SRf25eUBHdH%3Si5{QEmpgwh>je2Y!CxJ*LE zY4lJWFaWQV)p6ynq{{Z%`!%QRt?cK(Hz$%d1C$i&$VWG&EFi_8=&5JdWk$c8vh^q_ z0BXW4ZN=0s!PL38EbZGZj!J`?Pr{+RH`+DP$WJlNJ?Boj#2;;VqnP#q?tGax%EWS?;%dlckd+wTK)?0agpq(0WH~ufSN4#UJG>S8Amy zqryknV1pb_oxN2nS2Ze}&OfO!EJUV)>V@w3q3PbCc5H#SYwF`~7y$@0dlOYUZ;&ju z1AdKm=PCpEm&CNVv#e zlzpy!?@)Hmy{gyO7KmA9RQW+9iLoU;fm|=EfeGg#^4!LYSYl)IP9~q8c5{2xPvLSb z;ZmK4y%+tmLR^9lP!v)T;>tR9*$uBf4)g}76|lA7))%fq5cm)#4RP&splfOGR$^YR zaz8wm`Ni&?yKuULC{yncbP88+(fZWn*UnVgIbz1L;Yu|{FyycjHkwr7E$AE2?4DRJ zl=j8}Z_@#d!XH%RRG#-;d=r(W$+PToueSpaoLDnPuPzh%kiLS&v#g9k(aF0|d=!mW z(m33B7AtGqDKmTt?mD=Z^Vhg%P_EioUdhXVqyyB`-5!R`9<(1doU@|MsevWUD->4m zbO#S7NXOrp@Bp7K|3oQXB^%-=AifY8#ts~k1rHyp=6M8nFwG%KLNX{JzFKt^J=Yd!8SnAjF3L;N8u#Z^osHY z$|BYJn*;3j#&7`T5*!O2U2K!`AGS#QQ1zyfkRUP;=uo;Fm{okIUOI}07M$iW5&k3v zF(85a(X@{xq9X9m_oxKBFr?y;3|mT2BIN3m%hnR8qVv^ETWMV&JJO%GTe z1W#ofee7p>ggtPF_>!D)VCzw;={tcXFq?wQU2OLqQd_gAuzPC^Kj^VB2*R?bmB$d_DN5cq>i4s zA61(Q#f@Vnq4UgYN!f&D<1)zJ*r|w7tA?Evb z1O|GX$hEd=%+>^Z{6i~4=vOw!TPiQLvnI8Cmb=noJo(P96Acl7V!&+J zaWZ?^URRa6w{Gv2%O{5K8LVJ10}BT{YJi!i|7>SI#sBh@%!L<0sR$qNdh#*Rj;MUb z`Oq(Wu z(=mx>SsK0|BOM1T8KeJoqOCRt*hLh%<9J@_w8j!ShZ@=5P|Wl$^nEe8+I=i5hQaVn zD`@h_l`hFc>>)>)R^Q#1DwK)skOuo8F8YG+Lu5X?(WN+;YbeK|sWA{~dEFECsFque z1q$K@lUS1VFB=>6h28 zN!_7d-z#r+kTU|jda((ZZL90INDVKv3WM|%uLVLpb9JKtRJ^-^qcP2%4(lJrOn*<| zKd@Ecu^_ZecQ+6=kGa-S5%aK>xdR8k$p$G;-WpRJgQSrmt7S$GI8G*1r4$7#$Qq-$ z)6Lf8J-nfXAaWZN8*NVtLL6r-wG;+PqQ(d{nE3r2j{DjHu~u;yB=Sc`5vt??1)geY zLmSt4lAqHjh@T2Af<0zmZ~YKE5oHQyw=@h-G?C4!1#gXVr@Gx2J6uvp`!WRS^)O{ad3I3zh5QBtd#eZS8tSt*bUJ?pn= zDW;dMw!%Zu>=Uq~WBuy!Qz&<9rs?2tbfDlUvi5lnh0wm?FL!E6ltg~?-hY$GX^EzG zRkCFaNmV8bX@6t?0A@6)bK(sp;?I?%r}+Y9xin<1idE}>A+l5l$fL8*pNaphaUZSk zL=n>dZ_1!vzO8$1k?(AbuY!vHr!XK6JMrpt5t^sjRZPw~j*;_rdeX>0=o|&xU+PWu zoe7jXV#O@k7bc=dlz~OQk=GmS5t2xGosUiqz3!8{2#BUR6WPNkm){d9f75F1{z1Va zC{asrSJLTdV_Iy=>#B7r!=#VLtNvbt{(Q4^CC!JF?znJ5*yIM7s?UrWH(!eHt1RjJ z4T|IMK|i=I^4qES=-3ihNhj7H6`m|;aCVT?$^V~SAhl0q zuJy1ZmQ#7gKB>Qs_@|=-K$*UpynKtHD1RI)`TqO(R3epIzXx&^7yq0>(y2u4a}PQ60vLfS{wn?6<&B1ifzRt*B@y z!>GJDY8Qg$)L=o0z!oS>i$S?g(ah~aYiH7Wk0u#%y&#~}U}BH3y+z&mGnltGj~;a% z8|*49gU)Wr4ATUc)-$>6zS%N?l#G|LfE0$^=&-j+iHs6{oLZW){gT64nJ93z$j&L! z6Kxbk3$8ZqKPem2^WaBc5o^MzO0?8rC*smdF8)?cJz8lVRN-d9&m6ah;<4YW@}+#@ z&Zbm6gG`PzC`6|ve2aW`_Q8x3veoH@3~Y{UKU3t9mi@(~(S3(;3I*UVECO1w+aLNa z%;l@inflD8hqf0nxqo01d0mS^@0DanHh;SWH)vmPpD_lRk^Eb^6_Z?0a@aSWORz2O zN!z~F%1w(f^i=`>UhP7#Y@TuOHZAbSOZ=!Ptw4Sc80UrRXTxM3E#E_BBsVl>7=3oE zfh*?J6ilJJ!@QAqMpR=870pwthdu?faRg&R}VcW4vBph@2xqXu|>77E6WB z-SUp(_kVZU$K56dWb?uFs1hS#W47;@NmiP_mpvnOj%7{EZAEyi{SnjT+Q508mJ``j+7#i?rP$#@zIw$$o}e zdU#%1Sz4O#JHAd#b_P%}ouH2igliSH+4(alD2|}(NHyCS2z3SuJCBcnKmK^xct>m| z3o(?Ewn=2JbTfHy^L{5?b;ywU{C$6qQ=fx~Oz)*mkzpedi6$gFZXawd=nOqxQDVt& zr}@jiAz6_S(Nh#r*<*qiMfAU{S@oMqI2z3FgQHtkIMAtd(H^Bc;VBRp!>asE2FCT) z#7n3r!lpF)sDBBr0BrASFDp48Z?X0DE3?Yeud)~;b)Ces0I7oGM}H8lYp*;l7j#-y z&LG}Ea0BFenL??JLCwCsOB3b^&CnK=W2*>dob5@T3~1Ns-7iI(u;1dYK!RwD-?tKY z3u(vMzRGHUm``Yv@o)`j8)XP_@HWouHcT3frREW>77O(Azcu$1@xtt+Wo5iZ?$Mca zbgH}i>1m9vbk&I`m!Ye*x085PMN9cc0_@!rx_yh9-7G@a*NH6OP|!|d;;CZ-@*Q~5 zEfDC1uJ(EKXq4MsTwP8mH>aB7c--|8+;OyaQm~gE>29qJJFpg=q^1Bc~ zHhDc|DB)qW(PE2$UT{qq+~oDr&crCfT3P;f0w>SykuT;qypN=vks0Q20X5mN?BTjn zOWei4miC2)YdLR_X?0stf*+3+TteW#q~B4OJzX7&TTD!I1~Ql`7Jw$=_2n=D1= z4raeZoqzVSAd) zA=F#M%C8tX=5{2Rylq_ObaC64-@uHG28NY5L;eL!RzRu082eTNQYxWz)4p6-9 zpMZ2-jt3|T&=h6h$)nB^lxhF-?~3mYxk;f5&U{x=9s$z^+%!(#EU5<@y2_YG-9*$d zvgc_177vF=O-h699>}8dwom7UQ@nGwhZnCz~YxQ zOCnGGi(XQ~l=}V3PVPS=a~5H|1`9O1I+cU_+O#>#&T%)8`z`H$22; z{l@MrU8ooQGVDO$g&kepcKYD1^DEMznr=4e`YKPPy&nhyJ-(3{I#N}+V#-`B(G{%ft8iMjp=o2dq`cPQoPSh9qmZA zW)_qd0-IOHM>RDY$&P*J+U5&iXOjv}Y1z`lmj44V4CHrb4QR|B)RTSnWkmi~0MVN_ zt9x2=5MYm*2l@%&@mMfgdKmFd0yg1Qc7ll3i1?TUX}=uv2LqGw7eO|DsT zYLL-b-$G_UQY-N`hVv4vIF+q++xWMm+KSwl7ZmAoV~~&_*d9^huAO&8;*{_P`G%r0 z@Xk8t5|5>&TWtDOyt_9jcy_5VJfj_ITzV+bz(idTbLhIge?Dx~fWd?g*2Eb>Ct#IxPR zLqY{yx%UvTKJ^Hj9|DZV&WE^gEEJ)-RAyiNtf)%S24kikQ|P=JcX8-yC2v{N@wv`x z1Z`kQ)YaZeYD>;6?h4@Fu+Rj;jti;ZM1B{@dRXhb8I9AKfQN#2vGGnqnunJ0)xG_y z)Ok2Bg&h?!%Fv8u5TR~p=RLZv#KWx=Kml^frotfumL>1@!nD&#@tl&wbQ_=sIZ{M zbc^TEEBQ~AWUl)Z451B71~=6Qg^5P5buT1m8Lr;mcMfa^-~r6oM$hcx!Eljo|XS zN(|J>u+<$ayX&tMI&vARD^*`+XW>>Y2(X<=Sn)uM6aZ1lHU6qrqn=lSzs8?wC{HK6 z*P^+OGK$Epzb56!8wto=+_w*&f&ehDqh&FkcQUE` z+EIIz@G+u$a~99hqb^kdZFTI)wWrO);>e4XqxB+wXN;|$5?J$stAut+l8+3KZh*|z zkN}aed^1@cd>J>gZw_9d`W}=T!plNdhCW7kwaylCIWyVK88URlpc5e%#p2$V&bdhV z^jsJUrBz{;jYY#pyEJ8c7A#*t3f2n7VNllR(cF7*rqJig|~ z7LI?%tceZFG_enq)m3fXFV$Rz7k@@kZrpgIdf+3hX*714VlP(J4IU_u2o5YQ@p zaNce~c&h?lSYo_tE=yg0?x#4&L<%H!VG${xMCGLOKHWu0>4su4Aa@~_=cLj0A_A}~ z{kF5LiV2ohppj;gq54!su5#EQdMp|W6X#|Ssehjb36rDVRw-6MClmsQL{v?GcX~cIXB=tKBk&G&^1; z1*l#6kqp z>BsXAD)H8`IN3y8oj9g4g^K2tWdKq#eruzUB`*|)W^JOSP^{FvN#wep=@ukzaj-Oj ztD!ilr;Q(_2zpITm!t%7Y>XWYj+Y=}|MnI!yJZArVi{USig%0X&eu-S&torYrTl#c znTg0`&HAe!mW-d^Ir-3@G|gz94)hSOGC%3sDss&AJQ*wHd|qVKYtal~x?FY=?V_(F zE$(EK9w4ZV9F;|kmfL;Jj|1pB-tnRDgL&j>+!fJ8xG5-bUA9uf^MoiV{^l+7tIr_i z97YMziW58+Lne2)!)9b)UsK?zPG?2Zz3CdEJA#6s87HA-KoGj>b2VtlK&iA=+Ane! zihIy}9;>J?w3v8`^hn=RdzAr9wUxOxaP2b{@84Ct?%HAa%YX>j0DwD1Cl8jj?b@Y< zM}@QdUKhn>eiX4BW6BPY)iM?B@JvGwoldG|Vz>jc5S@dhsw8|ytk!l*14m!w zd`h715P8n?Mt5~1P~yi$ha8%C%s=fj5W~Yq8E+*&o%>VmY7!8~qB_lck7av0Q#@S1 z0YC}DsZtA{oMMtnMCy|-Ynfn^Kg;Wy0RlFd1Wq`k=Zh7H0VyCi$~pf2<@w?-?JE?; z>FS3jYDM|6ehZi5?y7|pkW((X27rOrbjh8ZI2^Ru=!cFqa;1Xh2cmQ!Tyx zsBh_yocxbdRQhHXVp*(ol)#rb%q!hJGuim44G zETq5$#iof;6gkso=w9#AjTF;}gmte=Y19^N1d(sq$?D~$K15XyyliIxo?dz3Q1Nye zk#8xVa#gku7frqRusXNlY1&g`!wxY1{n_@3Gtuy#{B|?OxAHLtFXq$Y?~G(u|2>L0 zda!UzL|`h+)!#N;_*(Z#RtbW(`%4+PvgGu@4wZcmUZd^4pO`op?tYXaCgG1)m_&-5 z(mcvyK-*TDAtO~Fp~ffBcT(k##=Q^U0+mMqry}31sej3J6`OMTX>C)cl}&z2-`CTS zuFdaKYEswtz%b+=d7UE3eo~JR9hhRwLYrcE^C#DaEFL|rJMh>7(eq5{MOm54hO**H z3$)P-(QM6PU!#EN*)xzYXLD(}M=F2C#W=r0&e%W(W}e1}{2mm8!k3%)K4ii!BdO6t zkl`|~e!ZfejTN&{ct)WOs1s7Xa6TjBv7})G&i1Br6VU6Ap{bK2b80RA&Gf_Tde_JZ zr%*fu5FiD&uN;Tgb+iDg|5j3kcB*B>wX#!MEgk<&ln@)d3GXlUeR)r_wrm|*HnFpc z0l?!^SsL6-7bQlAsg%H$4Sk^^#!}^$`DDEpiPxSq1*!{-Cf_0>1F9d7RugrIYE;vw z)2&a=u>l_qg)N;UBIHM@ib?P5>QM-Lc4!A(pi2&2D8p38AEEhMfN+4idKFh0MS|#; zC-G+w7kOc5n#W5^ZTJnccP}nFFN;QDcLhnsq0-(o_Z~ z*^M;G@SZrt6>g>qMP_b(39cOk80?abY=VEd&A`2}osZ9GXUtKwQhwU6($>%>|Dba5 zKUs)RomZ!(TCjxnrk=k;MAm;0%e>F@pDMa?O8r0Rhi%EH{-unn)xRI2e%L}$uUiSL z_IRC_BRzlPEYII==Qv=#sTc4Mnj|AEJ6~HwJ=5Dh{|~AwPq__oT2#^4RsF#D`J*lL z==-weVK;l!+HH^lNs7d*7OtPW8422hlVlk9#HB^?sgDQ8|NB!{G$K0cSvb4in}5)6 zImq&-1BxKrBK~8u@K0V=Bqj^A7OrsL1gw1U&(sLrjz!zee~`A`Z#mbW!3PJ=VvH+* zoCf?a>QH`8`A@0tOJluXd}>A$o8y-~X5?N3|(ARYy-FDUTciEQU@e-l)GdzVv573KH)Yq=$Y z@ARW8U$O819V{qmUWsT`NhxVc1HZ*O1x{&=YEaz+Kf11nS)fBd=x<@6H| zN^%CYaSo+x;44(ZZNFnWHfKL=DGWdISI0~vmfr#Gm&5fBGXDpG5-+ynO6##3t<(KIdlmnn*J7HiGiZP9 zCx3zJKJ=J=?@R1-|37GL;`VC^y3Kkl>Thn;-e-7CC~rN_pK_v38^Aq&d#m##w(XY3 zl|NrU?)C`&xda~w_0Hog^jV4$$eP}%oPM1fad%J=4?hmx6#LU$yOz+la72Pbes9gj z`KM3u=|^F0zYaqGHbCj2Ug4xySLj)niT|%qwYV zzn{WZuCTf_q`TOudu{Ggo7~4eaKxVwS@q%%HzMCZOIA5~^B<)7xPX({|A_P4Op5%I zH6lmL_u}S2`o5sx`PrKv-{omzFDGO;6PPtr66;21uts_e-A*NKMbcRO#Za_ z_GUoj+`(Om4l#SZN2u4it%*@hn3KhJ+K&r?0AuJMbi4Z-9DB`Xg)O!4Pk8l9ZabvN zYh%CJT_6@1`b3`f(=BWTXJ|A+n#=X{I5$ej5*+69DWvJEPaBpScYWIa?@)+NUSGXi z{2ssk-${pIUJ84Q`?s+U6S=Pax!mza@L|*!!*IpfIdJEnqsT?Qme1ArUwiKz)#TQ# zi@ykhioixd=}|#IDN1jFMBOwM0UII+Dri6iq<4ZSAYEi*p#=p6RGO5~BZ3r_&_QZ| z0HJq6AR*ip_Br3t`|Ul({r%1v_nv#l_YVU{PLRVMvCee4h?lCc!pP zjOXpNVh_HT_y>}@#!3PNdj^1O;L?33)k*`v| z#XHZRyv#5$H*G;L{o7TV^^4$?(#9%WkmbH&!^arqI`$dTbXVYEQIXsH=9!E0oJQl< zQhM2E_B=oH$a4t?_7Q=i=em}&oPsN#-sng>&E!mqp@Qx_1r-?d$l%g!af83;UPGvb+g z$r0b9R$yIKseIw_s$wZ4aP+l!x9n2U`v|A1 zOq&bz`Xo(IX~1?|?R1{Ax^6*!jBno||N0=?T3}p1*!7mNN>`A`(xDB#_SMuKT72#5 z{B}Tc~WDi~Qsm<57@RFq-im zZv?ej;Y(hh)*19see2nsTado6@D}ubdZYb($k~VFv=~G1DiE5i>B51(L_P}s=B-?C zkvtjmBCZ)&M0YCN7Icd~Z->!RXINF1+zPI+X>W+b>w~%~phT-LTiBxiLx>CC{T&X( z^>(4(WvEr{%~ZxS59l1Lut=`YV*J^gM4f%sn_?`Ue_G~5#TcsB$U+i~HE}aUHN>tO zvKHMoc44KrdT zE~QAZp*bLqD_8F-`U9HqPx3~@oKx!dFHNh9s$;v()QI1iN~!$`zL@S-nIiajswgv zTeTULl})>gdQ!U%2>Sp_2YXO1s#u-Z!k>fhI}xFFkW*D+CZJF*ntrQbz!CCq6$RI*?R>7QunSr8DMD<{ z9hR)G)b`HfJF_+P2OeybAmVAo!%}5Zmq>>+=taDP!e*RfCA&7y?xKS8{&%j!@L`up zjZoyw^hW2!;Ucv!c&^;%U}|?`PRs77#p)odsV|$SjX~JcSZA^1|_{6)?SY0 z!8mmPrbcH-E70!H-MTZ#l{O^+mlJ1J&8~k9RuQt=>tMm&Obcb0 zo)w2i{Fiyqcf*u1iz2X40gD1lN7?GlF5rM}EL17d6AUD?Vi?YWX`xy!^0@vn7lh~f z+WY4k>Lphnl+@KV_fK!MupCXTa4L0DuTzgfbUiiVdUi`^a5uktj5=`qu@t8}GXo^; zSRyQ22e^)kG$v({cbeXeh^6JHdLmMKO0$xdguaFt=7+Y^oz!!)|)MRsuTZ;P7 z=V4uG&ud&8T-=!=S5DZVCoTa8rt1hImxA^auEne^DDTK8n1d*(kpxO-R72Bms9L$L zM8LY1`Gmb2R6X15TxC<7Ww^v-4P9XA;s_5VTG7tM?DJ% zc{eb~h+&DV@xXx`JHZv-4xmrgfzKqTX76WpySa%7z$$}r2kzZ7NQ^|k1JgD5+fI*& zey)wN?Uf1*%H#E~xaku>!fftIN@3SInAlb;ynJBWuF<<$`Kfk)MkAn75AU%~?u~_8 zKXwN9{ToB@-&9V}Tm9>C&)XLU47FLm?Qj?S#4KtAB0^OrfMt*Pe5Y780;T~X%sRAi zGim^1Vy7DEdcdeFaG%po?~m`dAX~m(7d8+edoEabN#b3mGw(@e%(=#Zh|gcOQ##OH zCfaKWpjcP0gHJrBbbuo{hX$DT=7OBd`PZu4k?R1|YtnzaSxs*_7(D``e+@i9|1G4R z_UOSzHF)U7zUr5K5wzbV317uNq3kVa zj4{_?6|7(Y4M=Nx2_p?4P>^i=9eF34H!$?CNpRL5hgMI0+Jf4J(KF!mrk{2Lle?}l zVlo;{zH3TBS3vHHF}Cu5P7ieIiSqdhbS41s{uf}OJs<+vJ&4%=00$sdOz2Qtlo6DY z0(|Ig!!0P-H%LB<_$LmBo>17uV7+2=8hC^D1sW>qER0;k0M@KAp{XkKvmh)6Fs6mV zgc0augROu_*<;KiIb1Cp_dae5a%5m%Daw7OE$GRszDWkygp`FqOS?y$uSr8 zYTiOzv*_H76C_}1sce|gbRu2iMg8UpfZMW+YGxZQ7a^Xvn1hghCzVyTCbbq_+n%xg zX(l?c7d~Sb}={95p^}KgSdBK@8bqs#0luiD#UBdr1P&I{~zYSoA9N1 z{SinaoHn19XlZ{YU9618e?_QxD;B9TCG9Yhj1FvJDSI1>PCtLRL)PX&%z%tSc6%Bp zmq}as@ld)94s$8Spz7(_qC{YI?0+3h6o>*@8@jAAhmh>4_3K!IhaSvIQA-I-$oZ$2u@( zB6$eKK+saU{GAudtnDC=)l~!G>jV1kmJgq%6Eml~%8rL5%(UEns2}(JnKWkA_l@Sw zUXkbngoRWtlfT{bxdI?k&DUa9_5rWYy#*~QF-@j$;z~Pc@^;?7l8NxdHSHF~&>L#P zZaa!I|1ays&ZHL1GmK&-sd5k7}W(=_gy*GqC6|C2_r_?+A zhcfn7`eco|Y2BTLbex?zK5!_1Z>`z4Ns0~1oNZJN28X^0AzRP{Seqvxrya3nzSC^{ z5l0|PH^`}2d&VF1FP%(so4DKb=XU#FdxYuP&KYH|y=DF#u9?*8^8!Ar7aJygI(s2J zc-MzITTp6cD4ivP(h9zvHDU`o>B~jiZ3Glu`aX^W^dK^fvJXQvm7P0O_TWx&g8FNx zvchk1&=lGO(L5A@I(bQENPEeV<0nb2!H9#%D7SN#WhDEwLAe`=!m z_`cMtg_$dmZG~S4J+Ql8T&4b$v+9y{d{qb(;=%a5OKFm~>6pT0axvmfqz*(J5AR-=0Qu)^_??+`Cjr4DqZ zqC@6`ihuDEyZ?IMcmt1z22sl=JeqK|T`uFQLy?fd8>g~Z{D*oRBnXxN>?{BO{2|{N z;?;Ipl&zz?kp69}il^t_)!+_-mO_6uB{owj60L<{wW{5V^~c?euRnh^Dy251v6?wU zyPj(4v{a;j;Ev*W+yN`YAI-x^p-aDd@}K=wDIt`wq;VD9XSBJ$1MFLwRs`0~%;B}Z zc4vK@Oxm2^r((|C0?fjuk6YMY`&n#3FN`*e0a%hwrU2$k>`mxq;TF{DfD~=gfxJy? zH%(`K+U)4(oAXF(i3K>HSCi55l+Ku7mHpiZu+N{?m-${#IXfrjI5b=`0a^la=8kaD z!aM;j31~H4S+Cj3+OC2RUF`;Y->+G!9IL^y7_+s+fA@;3BMb7*-_37y^?EU|kM^## zdhGFv`Vh>|m`!X8V%NO31!2s&sN^k(cG;2QG$O9L{3ho;ugAw?XN?kWPxu_-I8q}i zt~XFKd<d^|^%W7GC3oA4~jbw|iF!CL4&f5%^ z5~u2J4&IH6DtC_Lp0a;w6t?jq$mB?T+8O7uk8^vfN(Izuby^cU>YguZ-&7aO{=m`yWdV36W^Et)h#zusAecEA6~+9kGOXhl8Ov3-8FNKqEJr)bk;E`3=nB1Vq&kFxqtY$CuLOTWfaP-vBB1qSZI2ST$e zBI1RK&oB78w)Y0ACv$=BMY%dnAnU5+;mM~Qbc7;_;^E|3ZF=9ukI_qCjZ}6k7Lj9< z_HnVLq_*r#U|@RqX<%-piy~9N+>G^rxjh^kHt_kD!Bv36_Z-sC?#pIz|I zGf2)Yj2wF61z%+iR49t!pXgOuHt%l|v-lF#QZFEj=VBT55nj#JTnvpE?rOu3x-l!CIJ4z3`NDnFby8@eVBx)g*)6=NDa$&qkq`8My{@#y1HN2~C zOf0r~!qsvhgV1-^!(`CRV7ywwqpD3Y=;Y8cxXYm6-y%QXS1*vZJ=fs)gWF85XAbL- zcyQhK3~B7(!zcQwT&gW_P4tMspeA3W%++P_*oCYlq89$X53{MC(GZouMXdTmi?+X(XcY7?xsXTj(Sae-$4`tLD+D%W9b5V$6w zbHCL0=bmwH3I@w6Y#!DVh%@=Q6mSDkO?$fMHvu!(u|}aWD}KCYq>cNIKuuhzs92(L zkU7Vxag}E>-V%BeX09d0E&=y$xJ^ixfQAYQ!SNx@#e*lEaGHKSI~?mRP`M^LDX(s$ zaWke*Ebi6KA45`I+tnspz*Zs!0J@;zIeJ_CSh>~07H8+9L(f%@Fe-ZL0rSfIoNM{P zON8rMC%~#aAawwVZ^I%;9WF5y49)Mh_d$it9zTAsB2$b|O6ySjdP0~vCEe7D$i*!g zT8L9ni5xsjf340PUbSZquWqP!U83@L2!IG7w$TZ23Au>l9Ea(LZtzmh?moUV1<#y>)JXmCCMUyi?Zt zVyJH0IIh%8P%6Jfm&s9I*e2%MLenWaNCclQk&RTm!-a1#md#t zd(jhh@2t32dkVAVq|Ke=k5u4$h<~K%6=o4dT$9{+nMid0g#qAZuSq2`+-&;|;AUsn zU3NF}kGxs53LanRSO}A|E(!M^G?zNoZ>=s;wU(92Sw2h(Pp<^puH_r&DraClGkq_| zsNefujNXUp&&>ZyV8G``NVL!BSuDNLVjk=x5gEolint4r@2-O?0E8&o4t;CFst*Ac zi5@$?Ntl<@o)?jCE2x5@8zuoE=YUu-H)bmUE1|f^2GUcYhJ)by8f27@(98S5J$wkN z7Uby@yjlG6_K~rfluO)1*I7w{gPd$y`X8Jq=V_`SLpe@+APLlSq@ebpPISJKdDQ8~ zWm+<>d;qI{Lt|*8JH*1m&|TBeRVnTa!F%-9cV8Dn$mce$hC}{_iL0-tIIgyJRtJGR zSx>UGLW&kRk4Udq2^C|7^mYzUSf9(>oEv0D6?vyby*2zW_2h@SlD$l6uZ{R|DCIua z8wjA#h4&w-zF?`k=puBp;X{XL=tl0=`=*;nF@u3*xuvQ||~e z57kMq&%WL96TL~jDp?%c_$v7|^32jM5;(lT_SWxg$8gcYVqUC`!XOZ;jQx?)>htsvj#fm9n0~RRCi+Tis-^es?An6VRUVexovK(B z-Y`v`1o9R29=g{O-Q!Il`#DWGM=o~rGI?II;(EcYOfr!+Er`g$?%-Du=Ra_0-eRsBd;wcK(i~j5YM#}0?O=}GcPuO zbozD8ce60{z0&eEwHk))0eDrBj9Y0g+)P8!KK5W8=Up+CNCCpa2REzr4FF+rVZae~ z#~ZVuhn;pzCNzg;JI$fW&2A5r=uRC75sS=P$ywHIKh#=Sa!)ovH(X{`;Wq{nWbO)- zYenw#+k!H!`lmcUT3R*&8cP+B2Z7L#Gw55i!f|}~(cvBL<~@QJTOIQFeAaHb_T|-U z**{+7jNc5)wy>jnVn$~-#|KKkK5Uol3e-}p=_UNohM7;5#Wgz@c#!dH_r6|oG1AiW zS+b7^%gM0Ix%1S{d>$TY7UO7;%B2rt^7Y$fSzNE;=6kx&Jt0u_;hJkiuH1Yd*rn81 z=y1WF{aMtT2%$)u_)+FMUAbuUhm`QtTV8fiV+~tTC70m2%XG z_O~yfIH4V!hacY?iE|^DSB&=R4&c9dPk60>SeE-*3Iua&v_lppWo{d_sW|wjqjse- zprKF9gY5C5g{VJNPc-g*k8{y~bJcIgn>A21Gj&IvpR2QS?jt8aZ|L8?p2$cjNha02 zOk;zbdBq(xBRiBYERiK~O=3jx(Jgi1)g6^pf$-1~o!};1*m%V=t-R|FOIG(ai(iKj zGf7?Lh>$gMwKW}S!fify~hTniiYEt%j$urq6Ae=g=i3`K7>cZ0=h3mRhJLElQqD>O3T z3!rSY2xO;NvQ-?aXR%;T&N*6Ryo~A_mL0(PQ+m8c848C+;UYH~Q`3egBUE;)&KcIC zcGbG5Pwn*e2bi!nf;|z<)8sAVd!wvxv}(xHL5HK>q&eFI%`rcl1DEGWy5N}7?U|IQ z-&gu0SBp#Z52mzCud}~*%-d8{eq2Vy31OO?LCAuLseEIW@_EJWa3fFDZCE^uugcpI z=RYoZ!&fB&wN!`KXki{OGSCZjl?{lAVcC9SzRHDdr;p9_F6Li>X+-Zz<&s-)4=&Ca z@ja{ggZ4w5P^!wEB*xu9%H*3}VH!p3laX_v87YtE&jw99J*7SvDB@KW(bF#Kp{e4E zOBQNXoDBebFzDhS=196#u>$uoW;6YFAv5n6vb-}XKWg&^dXNX2%sH~ z8QE-_JjbZXY|6A}5;bg~=*MvpHqrPrq7ZwsQOyAl4-=4B7Tk4YR&$q(W02)vD@4!^ zZ@p65K+Hb2YYjA;AxeF^uRf*Yf#!9i0$qHjIo}Mplcs?&T`NZqYX;NYRG=?9-A7Pc#8%^3*ygLvHx8rTgO<(7=4t~ek2b=gndxN8 z!!79Wbs$(303s#=DU1lL(78E0p?vgs!l|}(OAF_>W{c%&GxD(=;UHiPqik`@(*TzR zfTapg<$Q01EAE&{tN0k#6m)JDY%7+B)uc>iJY~A(K$RFEH{Jvx3g!pK^92U5{GYT# z0=afjZmrc|K+2zIK#ofEdn+mAiBf0oFj+iI?Gt{H5_FRKckFJTY@6kmP#}01L`RV}n_!&>-c>V*=AlW`6)+5k; zu+5a>0CB56bIL7yb(=md;D1!&%I&ipeK@sjnL(3kC#4j*!UQ!N+t-9~jjUrYb(tlL z?SK4ASpO|fT4~;IkN$B@1}hJT-6b}LUtFW#5Mn-<#GE2sYs=iKErn`p`LTA;h(RKRy`GL8BGwIrPY}&wUN@=6Kk6 zI8eNMPNJJG^fupB>B-m9fxUaL97XsX7{8cA6cAp`G5ys2TQ8;TtNqKgkRycB(Hrly zb%o=QqMcTnEx)cvuH54Jmz+@}Lhsq{#J-MjY(KQK)Y$tI7>?x@WQyxZ4U(SDlG{)4 zd}gD5vpmN@G;6oR)nu)Q5=>z}d$(DOar%7|GM)59|`&0wM*-eGEJ^kA$7<>p$(jqsc+Z^>>0h@y1u*`#Sei zjxKg(q@(ai6p+UXIP4P zB7D(G7(8ucx+nW>v~A)Y22sAwdtV0kgG zXWMZ`r^r_s`kBg(`6<(Itx}_Zq4zq?I60|)?HTp&tbdMO#QS#IYi>c@KPGYE^)-Rl%0Xxh7)(E*>u+^nA?lLA(qyW!T0IrFsvk{=gHJ@h`_ zCI2)J!qJm;{U~*@4r5kTn#}A0x<8;i=X=qMm(ZW_t!NTRHt3rf3_hCemya(O3^()i zc*V@Zw%;D>nE-C|caG!%7p&ebjp6sI64}8e@uOau>ffPuJ7x}(v}0ET+?eh8gwK(= zav#|bahsD@MZ&7ZoFSsxLDP4?`m$6m&0ll{%O+s3i>~yW2r`JfHfcZEahT<=nP#9C z&(9zP_8WTiELenM^oG1GGoJoao@!cP%$7|14=qEAqPGRfJ5jAc)d^5J{+(gD)!!q)db$0%Ub2jU3D)cRpS*{C5IYUT>PhrCpgoq`^;Sog*4 zB^E*6oES2pxPCK{wD~?6gTm88)#|krueg8PyhdJ$x?}Ji;2oK<>xl|%IzxK{H8^dV z+q8w{BFik&&OJ4d^P-;B59VL$Q&70%oDw>H-T#+bhhGdtx_Nd^q9}PgkFY8Ocvb|+ zr0oBcKj!{UK@acFM#Wt1U;(jm>8AP>n76r4?1~s@>-w{TkLl|-wATR9;K^5=Fz)4>qYVAfZrgE7rhK63O)v$UlSapC+KU?^GluctKPL`!5wjV)K}4bK5izTCNxZMFSOU> zv`0CP+2|Ni$j7FOmMg8qa$%uFq-dD46GsOJ$9H+4zmCS9K04o^JW?eI$fwZbSB{!D z1sSa>-r!P%9M@g+t3kI9>7MD0qNuo@-YK3h%+{l(b%U-;DeoIe{LMZ}TKm?nSMKjJ zps#%3tR0Fi33N{SO^Ue(m*++Riy2dZPDMA%&Th^QxTaLB<_dHxodqm(pJfL1W2Po7 z@O&E*ufUYLW7(=R_rp{Cv~kMS0|+r~Pgl%~RYyGmyyj&B2j%=0WR<+9UVJ}FGsjFJ zEf=Xj&`>V|fJc!jhIXM%R9|*g+WzY2FXgj}QpWX~e2N2=nj?YmxxqpEtS%v2n}jRW zEWu~igh1)R6)L-f!dFdADO#o&zN7!Vf&o0mCk`oKd17J8EA;FV@#>dRt10o=AIEHR zW{5e+6=foBvh4Ca4O>taGcA;(H?F`njXdCP#AB~fu|sHjT|A;pYFBp467Sv>5g${X zh4xbk{>FEgykm77=(%os_xc`Y)%b#Q_1(YvzuaHl-?psEN7PnpLFX&KU6QSc*qu*C zV?6G^FVB+9nn&_x(+ofKPTFUfTQzcg-0Y0pc@sa!&oSUXT|hB)R_@rS?N&5U${wBb z<}ZL%sY?W^V;&=xG^`kf?wk9ITK#}}`U1O|4+Px_=2qzjKj&dHCHtXn$HP`OiGD!}t(8u4<(#99e?H-GiXl>~)HQ^lB@uG>{dutfOF-o!vW z)1P@%A5H5G`j&0+20EFvu8H%;I=a-ZG_K}KEJ{9_U)kMX=wft7zIg279v^ooxgyE6 zn{ml&_o}^+WI<7fWH@VmLvB6Ug4GW$CxK(6E>-*6 z4|VySw!i7pCUaq~-}<2H9wPy^wn}^;+_YY00ibxYkgavdk)9$k<9FIRaHi{oP`5Xp6d6jy9GQK~b5%^L-`(ah0>ewc&APpBacB!2ViYdq!ld6pWSTGIBWEp7u&U+Hbd z%e17K^&aOW)GN`TKt zO+%M4E+?`BHkM?K#y2IQL=3>8+&CiFftu8lqka72IH$7Ez4@E%%Yi2a*us~!j-LbU z(>va%^n@#0keRdmoy$W+*BivSA!D|DVCK()ZT^Hw9P^C;CZPllUh(`nlY^FV|53Ej z394wvtaw(filz-R)-|}1B`~8iR8F*ax#rqC(HBHml$+IFrQDCUxtZR5^nsXx*}A4k(43Yb zyeI#ga%~#9?eN+m^Lh91C||U zZW>I+PFjxrQ^A!I)2QW%MZaDe-7x>D&`xTmOSa?yLXG8NkXrUUGevA}>e_eFs>_90 z^d*eES-_)P08fL4-`<&KSMO0BM)y;fu}x1TX58z1R(Z{)_M1}aU%YqLeRo_+=fY}j zV==Q2*H3>blX&TSBzbl|9`4njaxuwfhdQ&!u2c?@zm%7|RR4YjClf*=+zb`SXEpl> z$GjFx;TdWTEH11X+(1Qldrez(AMhY2dv%xa#> zH+n!wzWnPG?fw^KnYCfrM|mD-?K1VI4$F2bNhK}Y)|*% z6>&kR72CS<&8*AbL8DE;bgtt5vAo&p9XTzm9=WNLWrTP@$C1|6dzL>E)tXOf?L__R z@F2wB`w)#|Z{FGM7p&MJd#Vc*5VqCqt<*R%>Lwz^tU;)@6e&)=9dj=7W#F!LVzD7z zJ41T;LH**C?yF0lLwlfSuS>8YQ>-E5{GfnuN&nR!Yome?P%u)e3v+&+vv|? zss7kmbkBWn%s_QdJIr;Cp0a6wWpGgF>i+F$1`uAb2_aBwM`c|MxKqE~VzO&nJbb_q zeE5!}U~b>U&0M)}J`d#U6N&llYSSHzN)YD#%bzuNjyJ2Gec*u-HI{c@?Te% zt~!C#Y(I6e{A;Adj2m79c=|ix5Mb>8W+t;MD^&7iR9fmmmE#5FJ!v?|n8h=}Kc3!P z+*9pbpy~IcX13V^m2pz%uipht(Oa?FV7ZDOQa}eAIHyfMzgXC<;hx@x9rQkU#k%jQ z4rK6GucdtFzgeL^bG34mf`ibdgwJDXcDL$?0!O@|#V1$N)sn7z6$S3QQpSIyCndll{Mon6QETxnPGvpj@}AqS5V z!9_gdK6M!?1@8>eF8Lom67NU;RspFsYRAuLbLN${!{r$oQ9RstO=8ITInapQ4hO0L zWhoq5Ftk3GsM{%#S8CgPHNT|Jz!C|!;kvdGfe77xw;p(=SRJYNyaG~~(&gIEtte;9 zhv1_4HELT?`FwzZ(BqRyHV=@3h=pW|6FtakeTNZUqS%c}bvXZ3^BcH}PaNOft9xG` zE(#hMi3)wNi?i)b?>ZnWbauZgf1`8*TjYaj%UIOnaf7FgV`V{JlIH@(bT`)#pZhu4 zB-KC_lK0>%Pu~%(yJs;4>(5R3H#^IClHaj0$FBAnRuK-8>UcOx!pC(v#?0?fDu}1wnw&7YldoE%6EEX4)#%<=f z%*<0VFU6{s%4u4yEfreh+8>cACQv(PzN#cJ7M`9QPa62=Coc-Hrs_( zL?HqM<^bJnSZilTjmW1OT-gRh9DmKbPsM_l+LZH~Q?8xZZ&YR~upY8Y8xsZT zE!3Q$S{81ECKt@8Z4UO&59Vl)A_J8CjDRSREwL>Mpc$gTG0tsK;3~y?By$EgVwz=f zFAy98FIVZ*uUb|aSA##5?<(~39m$p;>Xf$4I%>DW%L8nyY%CP8vMq~~7*Iv5xG*(n z2fuEobibih2lw6m!exa>R-=2}`NnJliw9126yat7J>KVpXk1pMO_kvL-Njm37p6~k z6dKKP%}P6o;g`sMQ$2MvE8tpRHq_O!&_{)a=gDWk^&Mwa{m{AvK0)_EhhQWhkl=pG zEWDpb-jgkC<{9Ye(HlD{+LTq2belKD2z0sSY_1;M*Q$)zd(G0a$Ge~r=enN5I7GoF z0)5C0zhQnIFW`)$WCV{+a<=kraTOP$eL?>(8aDw=!ZqWR|?3E26pCv?1%~La>7z0 z#b3JnSM}@vv6ElSlAt;)^zjW)9ENN~f#R?l3Fjma^-ohAE{KJ9ThNr^7W9;`nGAuF z^6LG9ypwKi2CiN&`=tHel{O5YTu~dv+%q*o@`ECgM@mfrZV98HZauk~wbM?6>%|k- zNN0;?+A-V)I4?bl-GWd^Aq`ldQnKQ{>&N>@B4buh+$WMXMAs){!>1;;PsRKfy^p}) zp*CFcig^?QzXYGfRrHnf-|(^VG1VNNhtRj|uWW>pF__W0O+qXd+#)!qUC7h}R4QKR zisxgu+}XwI`rrFpeCo8eilu3`}uc};s4_U_Z~)qR@>^p zNYI*>JVAoiKNiNQXyLY}i{vft-7T`zASko$qD!&CD@NC=iJZ*Qqw`s9>Y|s-fvW+P zG#r$}HK&s05)9;=crUZfPP8rmXAw{Y%Hyz%Z>1|g`cr(Z;1If7N3-1C$(-j_`6!wROA+WWjmyZ~>!$lc}7CmxtZsW~lqq;A;4JWtNvK zbt*($_I@<`hd6YfyTCYJWnEvaKF773aR37QPq-{b)#PYiqRMbwWXehIv%ccyjy zTlsRW&10JA5W&RrugA%4YPhhp{u%AS2N-;y7W3&=nL_D94mOziGoPFoh=N(IaYv!; zpU1&+5sO9-+_g}(s;1=jUYQHCz2Y7mpC90PHvqvm>;Pb6O;_T^i zgF=~t+sZ7p@o>mR!V{q4S5U6oP_@5gHghVLH=UI5myar$pEl{E$p)h?y1r`^x2nzm z%pKG6)Oj6v={J-61%GFi)$UT`-5g1i8Su#;U1c2M0+{LN!#w}`=f7s-Ul#Fy<*q<) G_52Uk5rWeI literal 0 HcmV?d00001 diff --git a/research/object_detection/test_images/snapshot_serengeti/context_rcnn_demo_metadata.json b/research/object_detection/test_images/snapshot_serengeti/context_rcnn_demo_metadata.json new file mode 100644 index 000000000..110793e2f --- /dev/null +++ b/research/object_detection/test_images/snapshot_serengeti/context_rcnn_demo_metadata.json @@ -0,0 +1 @@ +{"images": [{"file_name": "models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0038.jpeg", "frame_num": 0, "seq_num_frames": 2, "id": "S1/E03/E03_R3/S1_E03_R3_PICT0038", "height": 1536, "season": "S1", "date_captured": "2010-08-07 01:04:14", "width": 2048, "seq_id": "ASG0003041", "location": "E03"}, {"file_name": "models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0039.jpeg", "frame_num": 1, "seq_num_frames": 2, "id": "S1/E03/E03_R3/S1_E03_R3_PICT0039", "height": 1536, "season": "S1", "date_captured": "2010-08-07 01:04:14", "width": 2048, "seq_id": "ASG0003041", "location": "E03"}, {"file_name": "models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0040.jpeg", "frame_num": 0, "seq_num_frames": 2, "id": "S1/E03/E03_R3/S1_E03_R3_PICT0040", "height": 1536, "season": "S1", "date_captured": "2010-08-07 02:53:46", "width": 2048, "seq_id": "ASG0003042", "location": "E03"}, {"file_name": "models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0041.jpeg", "frame_num": 1, "seq_num_frames": 2, "id": "S1/E03/E03_R3/S1_E03_R3_PICT0041", "height": 1536, "season": "S1", "date_captured": "2010-08-07 02:53:46", "width": 2048, "seq_id": "ASG0003042", "location": "E03"}], "categories": [{"name": "empty", "id": 0}, {"name": "human", "id": 1}, {"name": "gazelleGrants", "id": 2}, {"name": "reedbuck", "id": 3}, {"name": "dikDik", "id": 4}, {"name": "zebra", "id": 5}, {"name": "porcupine", "id": 6}, {"name": "gazelleThomsons", "id": 7}, {"name": "hyenaSpotted", "id": 8}, {"name": "warthog", "id": 9}, {"name": "impala", "id": 10}, {"name": "elephant", "id": 11}, {"name": "giraffe", "id": 12}, {"name": "mongoose", "id": 13}, {"name": "buffalo", "id": 14}, {"name": "hartebeest", "id": 15}, {"name": "guineaFowl", "id": 16}, {"name": "wildebeest", "id": 17}, {"name": "leopard", "id": 18}, {"name": "ostrich", "id": 19}, {"name": "lionFemale", "id": 20}, {"name": "koriBustard", "id": 21}, {"name": "otherBird", "id": 22}, {"name": "batEaredFox", "id": 23}, {"name": "bushbuck", "id": 24}, {"name": "jackal", "id": 25}, {"name": "cheetah", "id": 26}, {"name": "eland", "id": 27}, {"name": "aardwolf", "id": 28}, {"name": "hippopotamus", "id": 29}, {"name": "hyenaStriped", "id": 30}, {"name": "aardvark", "id": 31}, {"name": "hare", "id": 32}, {"name": "baboon", "id": 33}, {"name": "vervetMonkey", "id": 34}, {"name": "waterbuck", "id": 35}, {"name": "secretaryBird", "id": 36}, {"name": "serval", "id": 37}, {"name": "lionMale", "id": 38}, {"name": "topi", "id": 39}, {"name": "honeyBadger", "id": 40}, {"name": "rodents", "id": 41}, {"name": "wildcat", "id": 42}, {"name": "civet", "id": 43}, {"name": "genet", "id": 44}, {"name": "caracal", "id": 45}, {"name": "rhinoceros", "id": 46}, {"name": "reptiles", "id": 47}, {"name": "zorilla", "id": 48}], "annotations": [{"category_id": 29, "image_id": "S1/E03/E03_R3/S1_E03_R3_PICT0038", "bbox": [614.9233639240294, 476.2385201454182, 685.5741333961523, 374.18740868568574], "id": "0154T1541168895361"}, {"category_id": 29, "image_id": "S1/E03/E03_R3/S1_E03_R3_PICT0039", "bbox": [382.03749418258434, 471.005129814144, 756.2249028682752, 397.73766517639683], "id": "Lxtry1541168934504"}, {"category_id": 29, "image_id": "S1/E03/E03_R3/S1_E03_R3_PICT0040", "bbox": [786.9475708007834, 461.0229187011687, 749.0524291992166, 385.0301413536], "id": "Xmyih1541168739115"}, {"category_id": 29, "image_id": "S1/E03/E03_R3/S1_E03_R3_PICT0041", "bbox": [573.8866577148518, 453.0573425292903, 845.0, 398.9770812988263], "id": "ZllAa1541168769217"}]} \ No newline at end of file diff --git a/research/object_detection/tpu_exporters/export_saved_model_tpu_lib_test.py b/research/object_detection/tpu_exporters/export_saved_model_tpu_lib_tf1_test.py similarity index 95% rename from research/object_detection/tpu_exporters/export_saved_model_tpu_lib_test.py rename to research/object_detection/tpu_exporters/export_saved_model_tpu_lib_tf1_test.py index 4bbffed36..653535aa3 100644 --- a/research/object_detection/tpu_exporters/export_saved_model_tpu_lib_test.py +++ b/research/object_detection/tpu_exporters/export_saved_model_tpu_lib_tf1_test.py @@ -19,12 +19,14 @@ from __future__ import division from __future__ import print_function import os +import unittest from absl.testing import parameterized import numpy as np import tensorflow.compat.v1 as tf from object_detection.tpu_exporters import export_saved_model_tpu_lib +from object_detection.utils import tf_version flags = tf.app.flags FLAGS = flags.FLAGS @@ -35,6 +37,7 @@ def get_path(path_suffix): path_suffix) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class ExportSavedModelTPUTest(tf.test.TestCase, parameterized.TestCase): @parameterized.named_parameters( diff --git a/research/object_detection/utils/config_util_test.py b/research/object_detection/utils/config_util_test.py index cd5f87d8d..f36970c11 100644 --- a/research/object_detection/utils/config_util_test.py +++ b/research/object_detection/utils/config_util_test.py @@ -19,7 +19,7 @@ from __future__ import division from __future__ import print_function import os - +import unittest from six.moves import range import tensorflow.compat.v1 as tf @@ -32,6 +32,7 @@ from object_detection.protos import model_pb2 from object_detection.protos import pipeline_pb2 from object_detection.protos import train_pb2 from object_detection.utils import config_util +from object_detection.utils import tf_version # pylint: disable=g-import-not-at-top try: @@ -282,18 +283,22 @@ class ConfigUtilTest(tf.test.TestCase): self.assertAlmostEqual(hparams.learning_rate * warmup_scale_factor, cosine_lr.warmup_learning_rate) + @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.") def testRMSPropWithNewLearingRate(self): """Tests new learning rates for RMSProp Optimizer.""" self._assertOptimizerWithNewLearningRate("rms_prop_optimizer") + @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.") def testMomentumOptimizerWithNewLearningRate(self): """Tests new learning rates for Momentum Optimizer.""" self._assertOptimizerWithNewLearningRate("momentum_optimizer") + @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.") def testAdamOptimizerWithNewLearningRate(self): """Tests new learning rates for Adam Optimizer.""" self._assertOptimizerWithNewLearningRate("adam_optimizer") + @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.") def testGenericConfigOverride(self): """Tests generic config overrides for all top-level configs.""" # Set one parameter for each of the top-level pipeline configs: @@ -329,6 +334,7 @@ class ConfigUtilTest(tf.test.TestCase): self.assertEqual(2, configs["graph_rewriter_config"].quantization.weight_bits) + @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.") def testNewBatchSize(self): """Tests that batch size is updated appropriately.""" original_batch_size = 2 @@ -344,6 +350,7 @@ class ConfigUtilTest(tf.test.TestCase): new_batch_size = configs["train_config"].batch_size self.assertEqual(16, new_batch_size) + @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.") def testNewBatchSizeWithClipping(self): """Tests that batch size is clipped to 1 from below.""" original_batch_size = 2 @@ -359,6 +366,7 @@ class ConfigUtilTest(tf.test.TestCase): new_batch_size = configs["train_config"].batch_size self.assertEqual(1, new_batch_size) # Clipped to 1.0. + @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.") def testOverwriteBatchSizeWithKeyValue(self): """Tests that batch size is overwritten based on key/value.""" pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() @@ -369,6 +377,7 @@ class ConfigUtilTest(tf.test.TestCase): new_batch_size = configs["train_config"].batch_size self.assertEqual(10, new_batch_size) + @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.") def testKeyValueOverrideBadKey(self): """Tests that overwriting with a bad key causes an exception.""" pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() @@ -377,6 +386,7 @@ class ConfigUtilTest(tf.test.TestCase): with self.assertRaises(ValueError): config_util.merge_external_params_with_configs(configs, hparams) + @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.") def testOverwriteBatchSizeWithBadValueType(self): """Tests that overwriting with a bad valuye type causes an exception.""" pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() @@ -387,6 +397,7 @@ class ConfigUtilTest(tf.test.TestCase): with self.assertRaises(TypeError): config_util.merge_external_params_with_configs(configs, hparams) + @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.") def testNewMomentumOptimizerValue(self): """Tests that new momentum value is updated appropriately.""" original_momentum_value = 0.4 @@ -404,6 +415,7 @@ class ConfigUtilTest(tf.test.TestCase): new_momentum_value = optimizer_config.momentum_optimizer_value self.assertAlmostEqual(1.0, new_momentum_value) # Clipped to 1.0. + @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.") def testNewClassificationLocalizationWeightRatio(self): """Tests that the loss weight ratio is updated appropriately.""" original_localization_weight = 0.1 @@ -426,6 +438,7 @@ class ConfigUtilTest(tf.test.TestCase): self.assertAlmostEqual(1.0, loss.localization_weight) self.assertAlmostEqual(new_weight_ratio, loss.classification_weight) + @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.") def testNewFocalLossParameters(self): """Tests that the loss weight ratio is updated appropriately.""" original_alpha = 1.0 diff --git a/research/object_detection/utils/model_util_test.py b/research/object_detection/utils/model_util_tf2_test.py similarity index 94% rename from research/object_detection/utils/model_util_test.py rename to research/object_detection/utils/model_util_tf2_test.py index c505464c7..77b1d0172 100644 --- a/research/object_detection/utils/model_util_test.py +++ b/research/object_detection/utils/model_util_tf2_test.py @@ -19,11 +19,14 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function +import unittest import tensorflow.compat.v1 as tf from object_detection.utils import model_util +from object_detection.utils import tf_version +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') class ExtractSubmodelUtilTest(tf.test.TestCase): def test_simple_model(self): diff --git a/research/object_detection/utils/object_detection_evaluation_test.py b/research/object_detection/utils/object_detection_evaluation_test.py index 5b2b5c801..ff399ed4b 100644 --- a/research/object_detection/utils/object_detection_evaluation_test.py +++ b/research/object_detection/utils/object_detection_evaluation_test.py @@ -18,6 +18,8 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function + +import unittest from absl.testing import parameterized import numpy as np import six @@ -26,6 +28,7 @@ import tensorflow.compat.v1 as tf from object_detection import eval_util from object_detection.core import standard_fields from object_detection.utils import object_detection_evaluation +from object_detection.utils import tf_version class OpenImagesV2EvaluationTest(tf.test.TestCase): @@ -970,6 +973,8 @@ class ObjectDetectionEvaluationTest(tf.test.TestCase): self.assertAlmostEqual(copy_mean_corloc, mean_corloc) +@unittest.skipIf(tf_version.is_tf2(), 'Eval Metrics ops are supported in TF1.X ' + 'only.') class ObjectDetectionEvaluatorTest(tf.test.TestCase, parameterized.TestCase): def setUp(self): diff --git a/research/object_detection/utils/ops.py b/research/object_detection/utils/ops.py index f59881580..0cd83d38d 100644 --- a/research/object_detection/utils/ops.py +++ b/research/object_detection/utils/ops.py @@ -268,7 +268,7 @@ def padded_one_hot_encoding(indices, depth, left_pad): on_value=1, off_value=0), tf.float32) return tf.pad(one_hot, [[0, 0], [left_pad, 0]], mode='CONSTANT') result = tf.cond(tf.greater(tf.size(indices), 0), one_hot_and_pad, - lambda: tf.zeros((depth + left_pad, 0))) + lambda: tf.zeros((tf.size(indices), depth + left_pad))) return tf.reshape(result, [-1, depth + left_pad]) diff --git a/research/object_detection/utils/ops_test.py b/research/object_detection/utils/ops_test.py index a7a6f8df3..d4da7b107 100644 --- a/research/object_detection/utils/ops_test.py +++ b/research/object_detection/utils/ops_test.py @@ -196,8 +196,7 @@ class OpsTestPaddedOneHotEncoding(test_case.TestCase): [0, 0, 0, 1, 0, 0], [0, 0, 0, 0, 0, 1]], np.float32) - # Executing on CPU only because output shape is not constant. - out_one_hot_tensor = self.execute_cpu(graph_fn, []) + out_one_hot_tensor = self.execute(graph_fn, []) self.assertAllClose(out_one_hot_tensor, expected_tensor, rtol=1e-10, atol=1e-10) @@ -212,8 +211,7 @@ class OpsTestPaddedOneHotEncoding(test_case.TestCase): [0, 0, 0, 1, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0], [0, 0, 0, 0, 0, 0, 1]], np.float32) - # Executing on CPU only because output shape is not constant. - out_one_hot_tensor = self.execute_cpu(graph_fn, []) + out_one_hot_tensor = self.execute(graph_fn, []) self.assertAllClose(out_one_hot_tensor, expected_tensor, rtol=1e-10, atol=1e-10) @@ -229,8 +227,7 @@ class OpsTestPaddedOneHotEncoding(test_case.TestCase): [0, 0, 0, 0, 0, 0, 1, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 1]], np.float32) - # executing on CPU only because output shape is not constant. - out_one_hot_tensor = self.execute_cpu(graph_fn, []) + out_one_hot_tensor = self.execute(graph_fn, []) self.assertAllClose(out_one_hot_tensor, expected_tensor, rtol=1e-10, atol=1e-10) @@ -246,8 +243,7 @@ class OpsTestPaddedOneHotEncoding(test_case.TestCase): return one_hot_tensor expected_tensor = np.zeros((0, depth + pad)) - # executing on CPU only because output shape is not constant. - out_one_hot_tensor = self.execute_cpu(graph_fn, []) + out_one_hot_tensor = self.execute(graph_fn, []) self.assertAllClose(out_one_hot_tensor, expected_tensor, rtol=1e-10, atol=1e-10) diff --git a/research/object_detection/utils/target_assigner_utils.py b/research/object_detection/utils/target_assigner_utils.py index ca7918f3d..0aa26a47e 100644 --- a/research/object_detection/utils/target_assigner_utils.py +++ b/research/object_detection/utils/target_assigner_utils.py @@ -118,12 +118,17 @@ def compute_floor_offsets_with_indices(y_source, they were put on the grids) to target coordinates. Note that the input coordinates should be the "absolute" coordinates in terms of the output image dimensions as opposed to the normalized coordinates (i.e. values in [0, 1]). + If the input y and x source have the second dimension (representing the + neighboring pixels), then the offsets are computed from each of the + neighboring pixels to their corresponding target (first dimension). Args: - y_source: A tensor with shape [num_points] representing the absolute - y-coordinates (in the output image space) of the source points. - x_source: A tensor with shape [num_points] representing the absolute - x-coordinates (in the output image space) of the source points. + y_source: A tensor with shape [num_points] (or [num_points, num_neighbors]) + representing the absolute y-coordinates (in the output image space) of the + source points. + x_source: A tensor with shape [num_points] (or [num_points, num_neighbors]) + representing the absolute x-coordinates (in the output image space) of the + source points. y_target: A tensor with shape [num_points] representing the absolute y-coordinates (in the output image space) of the target points. If not provided, then y_source is used as the targets. @@ -133,18 +138,33 @@ def compute_floor_offsets_with_indices(y_source, Returns: A tuple of two tensors: - offsets: A tensor with shape [num_points, 2] representing the offsets of - each input point. - indices: A tensor with shape [num_points, 2] representing the indices of - where the offsets should be retrieved in the output image dimension - space. + offsets: A tensor with shape [num_points, 2] (or + [num_points, num_neighbors, 2]) representing the offsets of each input + point. + indices: A tensor with shape [num_points, 2] (or + [num_points, num_neighbors, 2]) representing the indices of where the + offsets should be retrieved in the output image dimension space. + + Raise: + ValueError: source and target shapes have unexpected values. """ y_source_floored = tf.floor(y_source) x_source_floored = tf.floor(x_source) - if y_target is None: + + source_shape = shape_utils.combined_static_and_dynamic_shape(y_source) + if y_target is None and x_target is None: y_target = y_source - if x_target is None: x_target = x_source + else: + target_shape = shape_utils.combined_static_and_dynamic_shape(y_target) + if len(source_shape) == 2 and len(target_shape) == 1: + _, num_neighbors = source_shape + y_target = tf.tile( + tf.expand_dims(y_target, -1), multiples=[1, num_neighbors]) + x_target = tf.tile( + tf.expand_dims(x_target, -1), multiples=[1, num_neighbors]) + elif source_shape != target_shape: + raise ValueError('Inconsistent source and target shape.') y_offset = y_target - y_source_floored x_offset = x_target - x_source_floored @@ -152,9 +172,8 @@ def compute_floor_offsets_with_indices(y_source, y_source_indices = tf.cast(y_source_floored, tf.int32) x_source_indices = tf.cast(x_source_floored, tf.int32) - indices = tf.stack([y_source_indices, x_source_indices], axis=1) - offsets = tf.stack([y_offset, x_offset], axis=1) - + indices = tf.stack([y_source_indices, x_source_indices], axis=-1) + offsets = tf.stack([y_offset, x_offset], axis=-1) return offsets, indices @@ -231,6 +250,12 @@ def blackout_pixel_weights_by_box_regions(height, width, boxes, blackout): A float tensor with shape [height, width] where all values within the regions of the blackout boxes are 0.0 and 1.0 else where. """ + num_instances, _ = shape_utils.combined_static_and_dynamic_shape(boxes) + # If no annotation instance is provided, return all ones (instead of + # unexpected values) to avoid NaN loss value. + if num_instances == 0: + return tf.ones([height, width], dtype=tf.float32) + (y_grid, x_grid) = image_shape_to_grids(height, width) y_grid = tf.expand_dims(y_grid, axis=0) x_grid = tf.expand_dims(x_grid, axis=0) @@ -257,3 +282,72 @@ def blackout_pixel_weights_by_box_regions(height, width, boxes, blackout): out_boxes = tf.reduce_max(selected_in_boxes, axis=0) out_boxes = tf.ones_like(out_boxes) - out_boxes return out_boxes + + +def _get_yx_indices_offset_by_radius(radius): + """Gets the y and x index offsets that are within the radius.""" + y_offsets = [] + x_offsets = [] + for y_offset in range(-radius, radius + 1, 1): + for x_offset in range(-radius, radius + 1, 1): + if x_offset ** 2 + y_offset ** 2 <= radius ** 2: + y_offsets.append(y_offset) + x_offsets.append(x_offset) + return (tf.constant(y_offsets, dtype=tf.float32), + tf.constant(x_offsets, dtype=tf.float32)) + + +def get_surrounding_grids(height, width, y_coordinates, x_coordinates, radius): + """Gets the indices of the surrounding pixels of the input y, x coordinates. + + This function returns the pixel indices corresponding to the (floor of the) + input coordinates and their surrounding pixels within the radius. If the + radius is set to 0, then only the pixels that correspond to the floor of the + coordinates will be returned. If the radius is larger than 0, then all of the + pixels within the radius of the "floor pixels" will also be returned. For + example, if the input coorindate is [2.1, 3.5] and radius is 1, then the five + pixel indices will be returned: [2, 3], [1, 3], [2, 2], [2, 4], [3, 3]. Also, + if the surrounding pixels are outside of valid image region, then the returned + pixel indices will be [0, 0] and its corresponding "valid" value will be + False. + + Args: + height: int, the height of the output image. + width: int, the width of the output image. + y_coordinates: A tensor with shape [num_points] representing the absolute + y-coordinates (in the output image space) of the points. + x_coordinates: A tensor with shape [num_points] representing the absolute + x-coordinates (in the output image space) of the points. + radius: int, the radius of the neighboring pixels to be considered and + returned. If set to 0, then only the pixel indices corresponding to the + floor of the input coordinates will be returned. + + Returns: + A tuple of three tensors: + y_indices: A [num_points, num_neighbors] float tensor representing the + pixel y indices corresponding to the input points within radius. The + "num_neighbors" is determined by the size of the radius. + x_indices: A [num_points, num_neighbors] float tensor representing the + pixel x indices corresponding to the input points within radius. The + "num_neighbors" is determined by the size of the radius. + valid: A [num_points, num_neighbors] boolean tensor representing whether + each returned index is in valid image region or not. + """ + # Floored y, x: [num_points, 1]. + y_center = tf.expand_dims(tf.math.floor(y_coordinates), axis=-1) + x_center = tf.expand_dims(tf.math.floor(x_coordinates), axis=-1) + y_offsets, x_offsets = _get_yx_indices_offset_by_radius(radius) + # Indices offsets: [1, num_neighbors]. + y_offsets = tf.expand_dims(y_offsets, axis=0) + x_offsets = tf.expand_dims(x_offsets, axis=0) + + # Floor + offsets: [num_points, num_neighbors]. + y_output = y_center + y_offsets + x_output = x_center + x_offsets + default_output = tf.zeros_like(y_output) + valid = tf.logical_and( + tf.logical_and(x_output >= 0, x_output < width), + tf.logical_and(y_output >= 0, y_output < height)) + y_output = tf.where(valid, y_output, default_output) + x_output = tf.where(valid, x_output, default_output) + return (y_output, x_output, valid) diff --git a/research/object_detection/utils/target_assigner_utils_test.py b/research/object_detection/utils/target_assigner_utils_test.py index b895cca01..f66344532 100644 --- a/research/object_detection/utils/target_assigner_utils_test.py +++ b/research/object_detection/utils/target_assigner_utils_test.py @@ -87,8 +87,32 @@ class TargetUtilTest(test_case.TestCase): np.testing.assert_array_almost_equal(offsets, np.array([[1.1, -0.8], [0.1, 0.5]])) - np.testing.assert_array_almost_equal(indices, - np.array([[1, 2], [0, 4]])) + np.testing.assert_array_almost_equal(indices, np.array([[1, 2], [0, 4]])) + + def test_compute_floor_offsets_with_indices_multisources(self): + + def graph_fn(): + y_source = tf.constant([[1.0, 0.0], [2.0, 3.0]], dtype=tf.float32) + x_source = tf.constant([[2.0, 4.0], [3.0, 3.0]], dtype=tf.float32) + y_target = tf.constant([2.1, 0.1], dtype=tf.float32) + x_target = tf.constant([1.2, 4.5], dtype=tf.float32) + (offsets, indices) = ta_utils.compute_floor_offsets_with_indices( + y_source, x_source, y_target, x_target) + return offsets, indices + + offsets, indices = self.execute(graph_fn, []) + # Offset from the first source to target. + np.testing.assert_array_almost_equal(offsets[:, 0, :], + np.array([[1.1, -0.8], [-1.9, 1.5]])) + # Offset from the second source to target. + np.testing.assert_array_almost_equal(offsets[:, 1, :], + np.array([[2.1, -2.8], [-2.9, 1.5]])) + # Indices from the first source to target. + np.testing.assert_array_almost_equal(indices[:, 0, :], + np.array([[1, 2], [2, 3]])) + # Indices from the second source to target. + np.testing.assert_array_almost_equal(indices[:, 1, :], + np.array([[0, 4], [3, 3]])) def test_get_valid_keypoints_mask(self): @@ -174,6 +198,44 @@ class TargetUtilTest(test_case.TestCase): # 20 * 10 - 6 * 6 - 3 * 7 = 143.0 self.assertAlmostEqual(np.sum(output), 143.0) + def test_blackout_pixel_weights_by_box_regions_zero_instance(self): + def graph_fn(): + boxes = tf.zeros([0, 4], dtype=tf.float32) + blackout = tf.zeros([0], dtype=tf.bool) + blackout_pixel_weights_by_box_regions = tf.function( + ta_utils.blackout_pixel_weights_by_box_regions) + output = blackout_pixel_weights_by_box_regions(10, 20, boxes, blackout) + return output + + output = self.execute(graph_fn, []) + # The output should be all 1s since there's no annotation provided. + np.testing.assert_array_equal(output, np.ones([10, 20], dtype=np.float32)) + + def test_get_surrounding_grids(self): + + def graph_fn(): + y_coordinates = tf.constant([0.5], dtype=tf.float32) + x_coordinates = tf.constant([4.5], dtype=tf.float32) + output = ta_utils.get_surrounding_grids( + height=3, + width=5, + y_coordinates=y_coordinates, + x_coordinates=x_coordinates, + radius=1) + return output + + y_indices, x_indices, valid = self.execute(graph_fn, []) + + # Five neighboring indices: [-1, 4] (out of bound), [0, 3], [0, 4], + # [0, 5] (out of bound), [1, 4]. + np.testing.assert_array_almost_equal( + y_indices, + np.array([[0.0, 0.0, 0.0, 0.0, 1.0]])) + np.testing.assert_array_almost_equal( + x_indices, + np.array([[0.0, 3.0, 4.0, 0.0, 4.0]])) + self.assertAllEqual(valid, [[False, True, True, False, True]]) + if __name__ == '__main__': tf.test.main() diff --git a/research/object_detection/utils/test_utils.py b/research/object_detection/utils/test_utils.py index f7e92c0bf..666a29adb 100644 --- a/research/object_detection/utils/test_utils.py +++ b/research/object_detection/utils/test_utils.py @@ -271,3 +271,19 @@ class GraphContextOrNone(object): return False else: return self.graph.__exit__(ttype, value, traceback) + + +def image_with_dynamic_shape(height, width, channels): + """Returns a single image with dynamic shape.""" + h = tf.random.uniform([], minval=height, maxval=height+1, dtype=tf.int32) + w = tf.random.uniform([], minval=width, maxval=width+1, dtype=tf.int32) + image = tf.random.uniform([h, w, channels]) + return image + + +def keypoints_with_dynamic_shape(num_instances, num_keypoints, num_coordinates): + """Returns keypoints with dynamic shape.""" + n = tf.random.uniform([], minval=num_instances, maxval=num_instances+1, + dtype=tf.int32) + keypoints = tf.random.uniform([n, num_keypoints, num_coordinates]) + return keypoints diff --git a/research/object_detection/utils/variables_helper.py b/research/object_detection/utils/variables_helper.py index 327f3b679..17b63eb70 100644 --- a/research/object_detection/utils/variables_helper.py +++ b/research/object_detection/utils/variables_helper.py @@ -47,8 +47,6 @@ def filter_variables(variables, filter_regex_list, invert=False): Returns: a list of filtered variables. """ - if tf.executing_eagerly(): - raise ValueError('Accessing variables is not supported in eager mode.') kept_vars = [] variables_to_ignore_patterns = list([fre for fre in filter_regex_list if fre]) for var in variables: @@ -74,8 +72,6 @@ def multiply_gradients_matching_regex(grads_and_vars, regex_list, multiplier): Returns: grads_and_vars: A list of gradient to variable pairs (tuples). """ - if tf.executing_eagerly(): - raise ValueError('Accessing variables is not supported in eager mode.') variables = [pair[1] for pair in grads_and_vars] matching_vars = filter_variables(variables, regex_list, invert=True) for var in matching_vars: @@ -97,8 +93,6 @@ def freeze_gradients_matching_regex(grads_and_vars, regex_list): grads_and_vars: A list of gradient to variable pairs (tuples) that do not contain the variables and gradients matching the regex. """ - if tf.executing_eagerly(): - raise ValueError('Accessing variables is not supported in eager mode.') variables = [pair[1] for pair in grads_and_vars] matching_vars = filter_variables(variables, regex_list, invert=True) kept_grads_and_vars = [pair for pair in grads_and_vars @@ -129,8 +123,6 @@ def get_variables_available_in_checkpoint(variables, Raises: ValueError: if `variables` is not a list or dict. """ - if tf.executing_eagerly(): - raise ValueError('Accessing variables is not supported in eager mode.') if isinstance(variables, list): variable_names_map = {} for variable in variables: @@ -178,8 +170,6 @@ def get_global_variables_safely(): Returns: The result of tf.global_variables() """ - if tf.executing_eagerly(): - raise ValueError('Accessing variables is not supported in eager mode.') with tf.init_scope(): if tf.executing_eagerly(): raise ValueError("Global variables collection is not tracked when " diff --git a/research/object_detection/utils/variables_helper_test.py b/research/object_detection/utils/variables_helper_tf1_test.py similarity index 96% rename from research/object_detection/utils/variables_helper_test.py rename to research/object_detection/utils/variables_helper_tf1_test.py index 44e72d0d1..a8bd43ed9 100644 --- a/research/object_detection/utils/variables_helper_test.py +++ b/research/object_detection/utils/variables_helper_tf1_test.py @@ -20,13 +20,15 @@ from __future__ import division from __future__ import print_function import os - +import unittest import tensorflow.compat.v1 as tf from object_detection.utils import test_case +from object_detection.utils import tf_version from object_detection.utils import variables_helper +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class FilterVariablesTest(test_case.TestCase): def _create_variables(self): @@ -68,6 +70,7 @@ class FilterVariablesTest(test_case.TestCase): self.assertCountEqual(out_variables, [variables[1], variables[3]]) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class MultiplyGradientsMatchingRegexTest(tf.test.TestCase): def _create_grads_and_vars(self): @@ -107,6 +110,7 @@ class MultiplyGradientsMatchingRegexTest(tf.test.TestCase): self.assertCountEqual(output, exp_output) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class FreezeGradientsMatchingRegexTest(test_case.TestCase): def _create_grads_and_vars(self): @@ -132,6 +136,7 @@ class FreezeGradientsMatchingRegexTest(test_case.TestCase): self.assertCountEqual(output, exp_output) +@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') class GetVariablesAvailableInCheckpointTest(test_case.TestCase): def test_return_all_variables_from_checkpoint(self): -- GitLab From e9df75ab4ae5f687d98d0f13d0c13cb10ab50ea7 Mon Sep 17 00:00:00 2001 From: kyscg Date: Wed, 17 Jun 2020 21:09:54 +0530 Subject: [PATCH 15/79] Added WikiTableQuestions link (#8677) --- research/neural_programmer/README.md | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/research/neural_programmer/README.md b/research/neural_programmer/README.md index 6101a85b9..dcc27f6fb 100644 --- a/research/neural_programmer/README.md +++ b/research/neural_programmer/README.md @@ -4,20 +4,23 @@ # Neural Programmer -Implementation of the Neural Programmer model described in [paper](https://openreview.net/pdf?id=ry2YOrcge) +Implementation of the Neural Programmer model as described in this [paper](https://openreview.net/pdf?id=ry2YOrcge). -Download and extract the data from [dropbox](https://www.dropbox.com/s/9tvtcv6lmy51zfw/data.zip?dl=0). Change the ``data_dir FLAG`` to the location of the data. +Download and extract the data from the [WikiTableQuestions](https://ppasupat.github.io/WikiTableQuestions/) site. The dataset contains +11321, 2831, and 4344 examples for training, development, and testing respectively. We use their tokenization, number and date pre-processing. Please note that the above paper used the [initial release](https://github.com/ppasupat/WikiTableQuestions/releases/tag/v0.2) for training, development and testing. + +Change the `data_dir FLAG` to the location of the data. ### Training -``python neural_programmer.py`` +Run `python neural_programmer.py` -The models are written to FLAGS.output_dir +The models are written to `FLAGS.output_dir`. ### Testing -``python neural_programmer.py --evaluator_job=True`` +Run `python neural_programmer.py --evaluator_job=True` -The models are loaded from ``FLAGS.output_dir``. The evaluation is done on development data. +The models are loaded from `FLAGS.output_dir`. The evaluation is done on development data. -In case of errors because of encoding, add ``"# -*- coding: utf-8 -*-"`` as the first line in ``wiki_data.py`` +In case of errors because of encoding, add `"# -*- coding: utf-8 -*-"` as the first line in `wiki_data.py` Maintained by Arvind Neelakantan (arvind2505) -- GitLab From 57e7ca73a636ddeb600b798cca272b6e7e2a873e Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Wed, 17 Jun 2020 10:26:39 -0700 Subject: [PATCH 16/79] Updating classifier_trainer MultiWorkerMirrored Strategy. PiperOrigin-RevId: 316915450 --- official/vision/image_classification/README.md | 18 ++++++++++++++++++ .../image_classification/classifier_trainer.py | 7 ++++--- 2 files changed, 22 insertions(+), 3 deletions(-) diff --git a/official/vision/image_classification/README.md b/official/vision/image_classification/README.md index c16fdc0c5..eb061d5b5 100644 --- a/official/vision/image_classification/README.md +++ b/official/vision/image_classification/README.md @@ -119,6 +119,24 @@ python3 classifier_trainer.py \ --params_override='runtime.num_gpus=$NUM_GPUS' ``` +To train on multiple hosts, each with GPUs attached using +[MultiWorkerMirroredStrategy](https://www.tensorflow.org/api_docs/python/tf/distribute/experimental/MultiWorkerMirroredStrategy) +please update `runtime` section in gpu.yaml +(or override using `--params_override`) with: + +```YAML +# gpu.yaml +runtime: + distribution_strategy: 'multi_worker_mirrored' + worker_hosts: '$HOST1:port,$HOST2:port' + num_gpus: $NUM_GPUS + task_index: 0 +``` +By having `task_index: 0` on the first host and `task_index: 1` on the second +and so on. `$HOST1` and `$HOST2` are the IP addresses of the hosts, and `port` +can be chosen any free port on the hosts. Only the first host will write +TensorBoard Summaries and save checkpoints. + #### On TPU: ```bash python3 classifier_trainer.py \ diff --git a/official/vision/image_classification/classifier_trainer.py b/official/vision/image_classification/classifier_trainer.py index 639ce1b47..1e5ea468c 100644 --- a/official/vision/image_classification/classifier_trainer.py +++ b/official/vision/image_classification/classifier_trainer.py @@ -235,9 +235,6 @@ def initialize(params: base_configs.ExperimentConfig, else: data_format = 'channels_last' tf.keras.backend.set_image_data_format(data_format) - distribution_utils.configure_cluster( - params.runtime.worker_hosts, - params.runtime.task_index) if params.runtime.run_eagerly: # Enable eager execution to allow step-by-step debugging tf.config.experimental_run_functions_eagerly(True) @@ -296,6 +293,10 @@ def train_and_eval( """Runs the train and eval path using compile/fit.""" logging.info('Running train and eval.') + distribution_utils.configure_cluster( + params.runtime.worker_hosts, + params.runtime.task_index) + # Note: for TPUs, strategy and scope should be created before the dataset strategy = strategy_override or distribution_utils.get_distribution_strategy( distribution_strategy=params.runtime.distribution_strategy, -- GitLab From 0a6f6426ae511c0a09ae5fe2c8cafdbecacdbd1b Mon Sep 17 00:00:00 2001 From: Abdullah Rashwan Date: Wed, 17 Jun 2020 14:16:25 -0700 Subject: [PATCH 17/79] Internal change PiperOrigin-RevId: 316962972 --- .../configs/optimization_config.py | 2 + .../optimization/configs/optimizer_config.py | 23 ++++++ .../optimization/optimizer_factory.py | 4 +- .../optimization/optimizer_factory_test.py | 75 ++++--------------- 4 files changed, 41 insertions(+), 63 deletions(-) diff --git a/official/modeling/optimization/configs/optimization_config.py b/official/modeling/optimization/configs/optimization_config.py index 8aba9943a..1cf3616c7 100644 --- a/official/modeling/optimization/configs/optimization_config.py +++ b/official/modeling/optimization/configs/optimization_config.py @@ -39,12 +39,14 @@ class OptimizerConfig(oneof.OneOfConfig): adam: adam optimizer config. adamw: adam with weight decay. lamb: lamb optimizer. + rmsprop: rmsprop optimizer. """ type: Optional[str] = None sgd: opt_cfg.SGDConfig = opt_cfg.SGDConfig() adam: opt_cfg.AdamConfig = opt_cfg.AdamConfig() adamw: opt_cfg.AdamWeightDecayConfig = opt_cfg.AdamWeightDecayConfig() lamb: opt_cfg.LAMBConfig = opt_cfg.LAMBConfig() + rmsprop: opt_cfg.RMSPropConfig = opt_cfg.RMSPropConfig() @dataclasses.dataclass diff --git a/official/modeling/optimization/configs/optimizer_config.py b/official/modeling/optimization/configs/optimizer_config.py index 4cafa9659..6e2957774 100644 --- a/official/modeling/optimization/configs/optimizer_config.py +++ b/official/modeling/optimization/configs/optimizer_config.py @@ -40,6 +40,29 @@ class SGDConfig(base_config.Config): momentum: float = 0.0 +@dataclasses.dataclass +class RMSPropConfig(base_config.Config): + """Configuration for RMSProp optimizer. + + The attributes for this class matches the arguments of + tf.keras.optimizers.RMSprop. + + Attributes: + name: name of the optimizer. + learning_rate: learning_rate for RMSprop optimizer. + rho: discounting factor for RMSprop optimizer. + momentum: momentum for RMSprop optimizer. + epsilon: epsilon value for RMSprop optimizer, help with numerical stability. + centered: Whether to normalize gradients or not. + """ + name: str = "RMSprop" + learning_rate: float = 0.001 + rho: float = 0.9 + momentum: float = 0.0 + epsilon: float = 1e-7 + centered: bool = False + + @dataclasses.dataclass class AdamConfig(base_config.Config): """Configuration for Adam optimizer. diff --git a/official/modeling/optimization/optimizer_factory.py b/official/modeling/optimization/optimizer_factory.py index 0988f6b3d..ccb03d50e 100644 --- a/official/modeling/optimization/optimizer_factory.py +++ b/official/modeling/optimization/optimizer_factory.py @@ -14,7 +14,6 @@ # limitations under the License. # ============================================================================== """Optimizer factory class.""" - from typing import Union import tensorflow as tf @@ -29,7 +28,8 @@ OPTIMIZERS_CLS = { 'sgd': tf.keras.optimizers.SGD, 'adam': tf.keras.optimizers.Adam, 'adamw': nlp_optimization.AdamWeightDecay, - 'lamb': tfa_optimizers.LAMB + 'lamb': tfa_optimizers.LAMB, + 'rmsprop': tf.keras.optimizers.RMSprop } LR_CLS = { diff --git a/official/modeling/optimization/optimizer_factory_test.py b/official/modeling/optimization/optimizer_factory_test.py index d7ffa16cf..6da76fec9 100644 --- a/official/modeling/optimization/optimizer_factory_test.py +++ b/official/modeling/optimization/optimizer_factory_test.py @@ -15,84 +15,37 @@ # ============================================================================== """Tests for optimizer_factory.py.""" +from absl.testing import parameterized + import tensorflow as tf -import tensorflow_addons.optimizers as tfa_optimizers from official.modeling.optimization import optimizer_factory from official.modeling.optimization.configs import optimization_config -from official.nlp import optimization as nlp_optimization - - -class OptimizerFactoryTest(tf.test.TestCase): - - def test_sgd_optimizer(self): - params = { - 'optimizer': { - 'type': 'sgd', - 'sgd': {'learning_rate': 0.1, 'momentum': 0.9} - } - } - expected_optimizer_config = { - 'name': 'SGD', - 'learning_rate': 0.1, - 'decay': 0.0, - 'momentum': 0.9, - 'nesterov': False - } - opt_config = optimization_config.OptimizationConfig(params) - opt_factory = optimizer_factory.OptimizerFactory(opt_config) - lr = opt_factory.build_learning_rate() - optimizer = opt_factory.build_optimizer(lr) - - self.assertIsInstance(optimizer, tf.keras.optimizers.SGD) - self.assertEqual(expected_optimizer_config, optimizer.get_config()) - - def test_adam_optimizer(self): - - # Define adam optimizer with default values. - params = { - 'optimizer': { - 'type': 'adam' - } - } - expected_optimizer_config = tf.keras.optimizers.Adam().get_config() - opt_config = optimization_config.OptimizationConfig(params) - opt_factory = optimizer_factory.OptimizerFactory(opt_config) - lr = opt_factory.build_learning_rate() - optimizer = opt_factory.build_optimizer(lr) - self.assertIsInstance(optimizer, tf.keras.optimizers.Adam) - self.assertEqual(expected_optimizer_config, optimizer.get_config()) +class OptimizerFactoryTest(tf.test.TestCase, parameterized.TestCase): - def test_adam_weight_decay_optimizer(self): + @parameterized.parameters( + ('sgd'), + ('rmsprop'), + ('adam'), + ('adamw'), + ('lamb')) + def test_optimizers(self, optimizer_type): params = { 'optimizer': { - 'type': 'adamw' + 'type': optimizer_type } } - expected_optimizer_config = nlp_optimization.AdamWeightDecay().get_config() - opt_config = optimization_config.OptimizationConfig(params) - opt_factory = optimizer_factory.OptimizerFactory(opt_config) - lr = opt_factory.build_learning_rate() - optimizer = opt_factory.build_optimizer(lr) - - self.assertIsInstance(optimizer, nlp_optimization.AdamWeightDecay) - self.assertEqual(expected_optimizer_config, optimizer.get_config()) + optimizer_cls = optimizer_factory.OPTIMIZERS_CLS[optimizer_type] + expected_optimizer_config = optimizer_cls().get_config() - def test_lamb_optimizer(self): - params = { - 'optimizer': { - 'type': 'lamb' - } - } - expected_optimizer_config = tfa_optimizers.LAMB().get_config() opt_config = optimization_config.OptimizationConfig(params) opt_factory = optimizer_factory.OptimizerFactory(opt_config) lr = opt_factory.build_learning_rate() optimizer = opt_factory.build_optimizer(lr) - self.assertIsInstance(optimizer, tfa_optimizers.LAMB) + self.assertIsInstance(optimizer, optimizer_cls) self.assertEqual(expected_optimizer_config, optimizer.get_config()) def test_stepwise_lr_schedule(self): -- GitLab From a3263c0f74661fcb4d7b47ad8ada45673bd5f41e Mon Sep 17 00:00:00 2001 From: Pengchong Jin Date: Wed, 17 Jun 2020 18:43:35 -0700 Subject: [PATCH 18/79] Internal change PiperOrigin-RevId: 317010426 --- official/modeling/hyperparams/config_definitions.py | 1 - 1 file changed, 1 deletion(-) diff --git a/official/modeling/hyperparams/config_definitions.py b/official/modeling/hyperparams/config_definitions.py index 2fbcdea44..78180cd8a 100644 --- a/official/modeling/hyperparams/config_definitions.py +++ b/official/modeling/hyperparams/config_definitions.py @@ -198,7 +198,6 @@ class TaskConfig(base_config.Config): @dataclasses.dataclass class ExperimentConfig(base_config.Config): """Top-level configuration.""" - mode: str = "train" # train, eval, train_and_eval. task: TaskConfig = TaskConfig() trainer: TrainerConfig = TrainerConfig() runtime: RuntimeConfig = RuntimeConfig() -- GitLab From 21b73d22f3ed05b650e85ac50849408dd36de32e Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Wed, 17 Jun 2020 18:47:01 -0700 Subject: [PATCH 19/79] Internal change PiperOrigin-RevId: 317010998 --- official/nlp/configs/bert.py | 72 ++++---- official/nlp/configs/bert_test.py | 8 +- official/nlp/configs/encoders.py | 27 ++- .../nlp/modeling/models/bert_span_labeler.py | 6 + official/nlp/tasks/masked_lm.py | 2 +- official/nlp/tasks/question_answering.py | 167 ++++++++++++++++++ official/nlp/tasks/question_answering_test.py | 130 ++++++++++++++ official/nlp/tasks/sentence_prediction.py | 6 +- .../nlp/tasks/sentence_prediction_test.py | 53 +++--- 9 files changed, 412 insertions(+), 59 deletions(-) create mode 100644 official/nlp/tasks/question_answering.py create mode 100644 official/nlp/tasks/question_answering_test.py diff --git a/official/nlp/configs/bert.py b/official/nlp/configs/bert.py index 058af898f..0f6a74e31 100644 --- a/official/nlp/configs/bert.py +++ b/official/nlp/configs/bert.py @@ -13,7 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== -"""A multi-head BERT encoder network for pretraining.""" +"""Multi-head BERT encoder network with classification heads. + +Includes configurations and instantiation methods. +""" from typing import List, Optional, Text import dataclasses @@ -24,7 +27,6 @@ from official.modeling.hyperparams import base_config from official.modeling.hyperparams import config_definitions as cfg from official.nlp.configs import encoders from official.nlp.modeling import layers -from official.nlp.modeling import networks from official.nlp.modeling.models import bert_pretrainer @@ -47,43 +49,34 @@ class BertPretrainerConfig(base_config.Config): cls_heads: List[ClsHeadConfig] = dataclasses.field(default_factory=list) -def instantiate_from_cfg( +def instantiate_classification_heads_from_cfgs( + cls_head_configs: List[ClsHeadConfig]) -> List[layers.ClassificationHead]: + return [ + layers.ClassificationHead(**cfg.as_dict()) for cfg in cls_head_configs + ] if cls_head_configs else [] + + +def instantiate_bertpretrainer_from_cfg( config: BertPretrainerConfig, - encoder_network: Optional[tf.keras.Model] = None): + encoder_network: Optional[tf.keras.Model] = None + ) -> bert_pretrainer.BertPretrainerV2: """Instantiates a BertPretrainer from the config.""" encoder_cfg = config.encoder if encoder_network is None: - encoder_network = networks.TransformerEncoder( - vocab_size=encoder_cfg.vocab_size, - hidden_size=encoder_cfg.hidden_size, - num_layers=encoder_cfg.num_layers, - num_attention_heads=encoder_cfg.num_attention_heads, - intermediate_size=encoder_cfg.intermediate_size, - activation=tf_utils.get_activation(encoder_cfg.hidden_activation), - dropout_rate=encoder_cfg.dropout_rate, - attention_dropout_rate=encoder_cfg.attention_dropout_rate, - max_sequence_length=encoder_cfg.max_position_embeddings, - type_vocab_size=encoder_cfg.type_vocab_size, - initializer=tf.keras.initializers.TruncatedNormal( - stddev=encoder_cfg.initializer_range)) - if config.cls_heads: - classification_heads = [ - layers.ClassificationHead(**cfg.as_dict()) for cfg in config.cls_heads - ] - else: - classification_heads = [] + encoder_network = encoders.instantiate_encoder_from_cfg(encoder_cfg) return bert_pretrainer.BertPretrainerV2( config.num_masked_tokens, mlm_activation=tf_utils.get_activation(encoder_cfg.hidden_activation), mlm_initializer=tf.keras.initializers.TruncatedNormal( stddev=encoder_cfg.initializer_range), encoder_network=encoder_network, - classification_heads=classification_heads) + classification_heads=instantiate_classification_heads_from_cfgs( + config.cls_heads)) @dataclasses.dataclass class BertPretrainDataConfig(cfg.DataConfig): - """Data config for BERT pretraining task.""" + """Data config for BERT pretraining task (tasks/masked_lm).""" input_path: str = "" global_batch_size: int = 512 is_training: bool = True @@ -95,15 +88,15 @@ class BertPretrainDataConfig(cfg.DataConfig): @dataclasses.dataclass class BertPretrainEvalDataConfig(BertPretrainDataConfig): - """Data config for the eval set in BERT pretraining task.""" + """Data config for the eval set in BERT pretraining task (tasks/masked_lm).""" input_path: str = "" global_batch_size: int = 512 is_training: bool = False @dataclasses.dataclass -class BertSentencePredictionDataConfig(cfg.DataConfig): - """Data of sentence prediction dataset.""" +class SentencePredictionDataConfig(cfg.DataConfig): + """Data config for sentence prediction task (tasks/sentence_prediction).""" input_path: str = "" global_batch_size: int = 32 is_training: bool = True @@ -111,10 +104,29 @@ class BertSentencePredictionDataConfig(cfg.DataConfig): @dataclasses.dataclass -class BertSentencePredictionDevDataConfig(cfg.DataConfig): - """Dev data of MNLI sentence prediction dataset.""" +class SentencePredictionDevDataConfig(cfg.DataConfig): + """Dev Data config for sentence prediction (tasks/sentence_prediction).""" input_path: str = "" global_batch_size: int = 32 is_training: bool = False seq_length: int = 128 drop_remainder: bool = False + + +@dataclasses.dataclass +class QADataConfig(cfg.DataConfig): + """Data config for question answering task (tasks/question_answering).""" + input_path: str = "" + global_batch_size: int = 48 + is_training: bool = True + seq_length: int = 384 + + +@dataclasses.dataclass +class QADevDataConfig(cfg.DataConfig): + """Dev Data config for queston answering (tasks/question_answering).""" + input_path: str = "" + global_batch_size: int = 48 + is_training: bool = False + seq_length: int = 384 + drop_remainder: bool = False diff --git a/official/nlp/configs/bert_test.py b/official/nlp/configs/bert_test.py index 199608cd0..c734b190e 100644 --- a/official/nlp/configs/bert_test.py +++ b/official/nlp/configs/bert_test.py @@ -26,7 +26,7 @@ class BertModelsTest(tf.test.TestCase): def test_network_invocation(self): config = bert.BertPretrainerConfig( encoder=encoders.TransformerEncoderConfig(vocab_size=10, num_layers=1)) - _ = bert.instantiate_from_cfg(config) + _ = bert.instantiate_bertpretrainer_from_cfg(config) # Invokes with classification heads. config = bert.BertPretrainerConfig( @@ -35,7 +35,7 @@ class BertModelsTest(tf.test.TestCase): bert.ClsHeadConfig( inner_dim=10, num_classes=2, name="next_sentence") ]) - _ = bert.instantiate_from_cfg(config) + _ = bert.instantiate_bertpretrainer_from_cfg(config) with self.assertRaises(ValueError): config = bert.BertPretrainerConfig( @@ -47,7 +47,7 @@ class BertModelsTest(tf.test.TestCase): bert.ClsHeadConfig( inner_dim=10, num_classes=2, name="next_sentence") ]) - _ = bert.instantiate_from_cfg(config) + _ = bert.instantiate_bertpretrainer_from_cfg(config) def test_checkpoint_items(self): config = bert.BertPretrainerConfig( @@ -56,7 +56,7 @@ class BertModelsTest(tf.test.TestCase): bert.ClsHeadConfig( inner_dim=10, num_classes=2, name="next_sentence") ]) - encoder = bert.instantiate_from_cfg(config) + encoder = bert.instantiate_bertpretrainer_from_cfg(config) self.assertSameElements(encoder.checkpoint_items.keys(), ["encoder", "next_sentence.pooler_dense"]) diff --git a/official/nlp/configs/encoders.py b/official/nlp/configs/encoders.py index 146879a95..2b9c7e95b 100644 --- a/official/nlp/configs/encoders.py +++ b/official/nlp/configs/encoders.py @@ -13,11 +13,17 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== -"""Configurations for Encoders.""" +"""Transformer Encoders. + +Includes configurations and instantiation methods. +""" import dataclasses +import tensorflow as tf +from official.modeling import tf_utils from official.modeling.hyperparams import base_config +from official.nlp.modeling import networks @dataclasses.dataclass @@ -34,3 +40,22 @@ class TransformerEncoderConfig(base_config.Config): max_position_embeddings: int = 512 type_vocab_size: int = 2 initializer_range: float = 0.02 + + +def instantiate_encoder_from_cfg( + config: TransformerEncoderConfig) -> networks.TransformerEncoder: + """Instantiate a Transformer encoder network from TransformerEncoderConfig.""" + encoder_network = networks.TransformerEncoder( + vocab_size=config.vocab_size, + hidden_size=config.hidden_size, + num_layers=config.num_layers, + num_attention_heads=config.num_attention_heads, + intermediate_size=config.intermediate_size, + activation=tf_utils.get_activation(config.hidden_activation), + dropout_rate=config.dropout_rate, + attention_dropout_rate=config.attention_dropout_rate, + max_sequence_length=config.max_position_embeddings, + type_vocab_size=config.type_vocab_size, + initializer=tf.keras.initializers.TruncatedNormal( + stddev=config.initializer_range)) + return encoder_network diff --git a/official/nlp/modeling/models/bert_span_labeler.py b/official/nlp/modeling/models/bert_span_labeler.py index 9cc8d6244..2dd9ab13f 100644 --- a/official/nlp/modeling/models/bert_span_labeler.py +++ b/official/nlp/modeling/models/bert_span_labeler.py @@ -51,11 +51,13 @@ class BertSpanLabeler(tf.keras.Model): output='logits', **kwargs): self._self_setattr_tracking = False + self._network = network self._config = { 'network': network, 'initializer': initializer, 'output': output, } + # We want to use the inputs of the passed network as the inputs to this # Model. To do this, we need to keep a handle to the network inputs for use # when we construct the Model object at the end of init. @@ -89,6 +91,10 @@ class BertSpanLabeler(tf.keras.Model): super(BertSpanLabeler, self).__init__( inputs=inputs, outputs=logits, **kwargs) + @property + def checkpoint_items(self): + return dict(encoder=self._network) + def get_config(self): return self._config diff --git a/official/nlp/tasks/masked_lm.py b/official/nlp/tasks/masked_lm.py index 1679c09a0..803b4bca9 100644 --- a/official/nlp/tasks/masked_lm.py +++ b/official/nlp/tasks/masked_lm.py @@ -40,7 +40,7 @@ class MaskedLMTask(base_task.Task): """Mock task object for testing.""" def build_model(self): - return bert.instantiate_from_cfg(self.task_config.network) + return bert.instantiate_bertpretrainer_from_cfg(self.task_config.network) def build_losses(self, labels, diff --git a/official/nlp/tasks/question_answering.py b/official/nlp/tasks/question_answering.py new file mode 100644 index 000000000..c6bc58d96 --- /dev/null +++ b/official/nlp/tasks/question_answering.py @@ -0,0 +1,167 @@ +# Lint as: python3 +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Question answering task.""" +import logging +import dataclasses +import tensorflow as tf +import tensorflow_hub as hub + +from official.core import base_task +from official.modeling.hyperparams import config_definitions as cfg +from official.nlp.bert import input_pipeline +from official.nlp.configs import encoders +from official.nlp.modeling import models + + +@dataclasses.dataclass +class QuestionAnsweringConfig(cfg.TaskConfig): + """The model config.""" + # At most one of `init_checkpoint` and `hub_module_url` can be specified. + init_checkpoint: str = '' + hub_module_url: str = '' + network: encoders.TransformerEncoderConfig = ( + encoders.TransformerEncoderConfig()) + train_data: cfg.DataConfig = cfg.DataConfig() + validation_data: cfg.DataConfig = cfg.DataConfig() + + +@base_task.register_task_cls(QuestionAnsweringConfig) +class QuestionAnsweringTask(base_task.Task): + """Task object for question answering. + + TODO(lehou): Add post-processing. + """ + + def __init__(self, params=cfg.TaskConfig): + super(QuestionAnsweringTask, self).__init__(params) + if params.hub_module_url and params.init_checkpoint: + raise ValueError('At most one of `hub_module_url` and ' + '`init_checkpoint` can be specified.') + if params.hub_module_url: + self._hub_module = hub.load(params.hub_module_url) + else: + self._hub_module = None + + def build_model(self): + if self._hub_module: + # TODO(lehou): maybe add the hub_module building logic to a util function. + input_word_ids = tf.keras.layers.Input( + shape=(None,), dtype=tf.int32, name='input_word_ids') + input_mask = tf.keras.layers.Input( + shape=(None,), dtype=tf.int32, name='input_mask') + input_type_ids = tf.keras.layers.Input( + shape=(None,), dtype=tf.int32, name='input_type_ids') + bert_model = hub.KerasLayer(self._hub_module, trainable=True) + pooled_output, sequence_output = bert_model( + [input_word_ids, input_mask, input_type_ids]) + encoder_network = tf.keras.Model( + inputs=[input_word_ids, input_mask, input_type_ids], + outputs=[sequence_output, pooled_output]) + else: + encoder_network = encoders.instantiate_encoder_from_cfg( + self.task_config.network) + + return models.BertSpanLabeler( + network=encoder_network, + initializer=tf.keras.initializers.TruncatedNormal( + stddev=self.task_config.network.initializer_range)) + + def build_losses(self, labels, model_outputs, aux_losses=None) -> tf.Tensor: + start_positions = labels['start_positions'] + end_positions = labels['end_positions'] + start_logits, end_logits = model_outputs + + start_loss = tf.keras.losses.sparse_categorical_crossentropy( + start_positions, + tf.cast(start_logits, dtype=tf.float32), + from_logits=True) + end_loss = tf.keras.losses.sparse_categorical_crossentropy( + end_positions, + tf.cast(end_logits, dtype=tf.float32), + from_logits=True) + + loss = (tf.reduce_mean(start_loss) + tf.reduce_mean(end_loss)) / 2 + return loss + + def build_inputs(self, params, input_context=None): + """Returns tf.data.Dataset for sentence_prediction task.""" + if params.input_path == 'dummy': + def dummy_data(_): + dummy_ids = tf.zeros((1, params.seq_length), dtype=tf.int32) + x = dict( + input_word_ids=dummy_ids, + input_mask=dummy_ids, + input_type_ids=dummy_ids) + y = dict( + start_positions=tf.constant(0, dtype=tf.int32), + end_positions=tf.constant(1, dtype=tf.int32)) + return (x, y) + + dataset = tf.data.Dataset.range(1) + dataset = dataset.repeat() + dataset = dataset.map( + dummy_data, num_parallel_calls=tf.data.experimental.AUTOTUNE) + return dataset + + batch_size = input_context.get_per_replica_batch_size( + params.global_batch_size) if input_context else params.global_batch_size + # TODO(chendouble): add and use nlp.data.question_answering_dataloader. + dataset = input_pipeline.create_squad_dataset( + params.input_path, + params.seq_length, + batch_size, + is_training=params.is_training, + input_pipeline_context=input_context) + return dataset + + def build_metrics(self, training=None): + del training + # TODO(lehou): a list of metrics doesn't work the same as in compile/fit. + metrics = [ + tf.keras.metrics.SparseCategoricalAccuracy( + name='start_position_accuracy'), + tf.keras.metrics.SparseCategoricalAccuracy( + name='end_position_accuracy'), + ] + return metrics + + def process_metrics(self, metrics, labels, model_outputs): + metrics = dict([(metric.name, metric) for metric in metrics]) + start_logits, end_logits = model_outputs + metrics['start_position_accuracy'].update_state( + labels['start_positions'], start_logits) + metrics['end_position_accuracy'].update_state( + labels['end_positions'], end_logits) + + def process_compiled_metrics(self, compiled_metrics, labels, model_outputs): + start_logits, end_logits = model_outputs + compiled_metrics.update_state( + y_true=labels, # labels has keys 'start_positions' and 'end_positions'. + y_pred={'start_positions': start_logits, 'end_positions': end_logits}) + + def initialize(self, model): + """Load a pretrained checkpoint (if exists) and then train from iter 0.""" + ckpt_dir_or_file = self.task_config.init_checkpoint + if tf.io.gfile.isdir(ckpt_dir_or_file): + ckpt_dir_or_file = tf.train.latest_checkpoint(ckpt_dir_or_file) + if not ckpt_dir_or_file: + return + + ckpt = tf.train.Checkpoint(**model.checkpoint_items) + status = ckpt.restore(ckpt_dir_or_file) + status.expect_partial().assert_existing_objects_matched() + logging.info('finished loading pretrained checkpoint from %s', + ckpt_dir_or_file) diff --git a/official/nlp/tasks/question_answering_test.py b/official/nlp/tasks/question_answering_test.py new file mode 100644 index 000000000..8e0f3f10b --- /dev/null +++ b/official/nlp/tasks/question_answering_test.py @@ -0,0 +1,130 @@ +# Lint as: python3 +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for official.nlp.tasks.question_answering.""" +import functools +import os +import tensorflow as tf + +from official.nlp.bert import configs +from official.nlp.bert import export_tfhub +from official.nlp.configs import bert +from official.nlp.configs import encoders +from official.nlp.tasks import question_answering + + +class QuestionAnsweringTaskTest(tf.test.TestCase): + + def setUp(self): + super(QuestionAnsweringTaskTest, self).setUp() + self._encoder_config = encoders.TransformerEncoderConfig( + vocab_size=30522, num_layers=1) + self._train_data_config = bert.QADataConfig( + input_path="dummy", seq_length=128, global_batch_size=1) + + def _run_task(self, config): + task = question_answering.QuestionAnsweringTask(config) + model = task.build_model() + metrics = task.build_metrics() + + strategy = tf.distribute.get_strategy() + dataset = strategy.experimental_distribute_datasets_from_function( + functools.partial(task.build_inputs, config.train_data)) + + iterator = iter(dataset) + optimizer = tf.keras.optimizers.SGD(lr=0.1) + task.train_step(next(iterator), model, optimizer, metrics=metrics) + task.validation_step(next(iterator), model, metrics=metrics) + + def test_task(self): + # Saves a checkpoint. + pretrain_cfg = bert.BertPretrainerConfig( + encoder=self._encoder_config, + num_masked_tokens=20, + cls_heads=[ + bert.ClsHeadConfig( + inner_dim=10, num_classes=3, name="next_sentence") + ]) + pretrain_model = bert.instantiate_bertpretrainer_from_cfg(pretrain_cfg) + ckpt = tf.train.Checkpoint( + model=pretrain_model, **pretrain_model.checkpoint_items) + saved_path = ckpt.save(self.get_temp_dir()) + + config = question_answering.QuestionAnsweringConfig( + init_checkpoint=saved_path, + network=self._encoder_config, + train_data=self._train_data_config) + task = question_answering.QuestionAnsweringTask(config) + model = task.build_model() + metrics = task.build_metrics() + dataset = task.build_inputs(config.train_data) + + iterator = iter(dataset) + optimizer = tf.keras.optimizers.SGD(lr=0.1) + task.train_step(next(iterator), model, optimizer, metrics=metrics) + task.validation_step(next(iterator), model, metrics=metrics) + task.initialize(model) + + def test_task_with_fit(self): + config = question_answering.QuestionAnsweringConfig( + network=self._encoder_config, + train_data=self._train_data_config) + task = question_answering.QuestionAnsweringTask(config) + model = task.build_model() + model = task.compile_model( + model, + optimizer=tf.keras.optimizers.SGD(lr=0.1), + train_step=task.train_step, + metrics=[tf.keras.metrics.SparseCategoricalAccuracy(name="accuracy")]) + dataset = task.build_inputs(config.train_data) + logs = model.fit(dataset, epochs=1, steps_per_epoch=2) + self.assertIn("loss", logs.history) + self.assertIn("start_positions_accuracy", logs.history) + self.assertIn("end_positions_accuracy", logs.history) + + def _export_bert_tfhub(self): + bert_config = configs.BertConfig( + vocab_size=30522, + hidden_size=16, + intermediate_size=32, + max_position_embeddings=128, + num_attention_heads=2, + num_hidden_layers=1) + _, encoder = export_tfhub.create_bert_model(bert_config) + model_checkpoint_dir = os.path.join(self.get_temp_dir(), "checkpoint") + checkpoint = tf.train.Checkpoint(model=encoder) + checkpoint.save(os.path.join(model_checkpoint_dir, "test")) + model_checkpoint_path = tf.train.latest_checkpoint(model_checkpoint_dir) + + vocab_file = os.path.join(self.get_temp_dir(), "uncased_vocab.txt") + with tf.io.gfile.GFile(vocab_file, "w") as f: + f.write("dummy content") + + hub_destination = os.path.join(self.get_temp_dir(), "hub") + export_tfhub.export_bert_tfhub(bert_config, model_checkpoint_path, + hub_destination, vocab_file) + return hub_destination + + def test_task_with_hub(self): + hub_module_url = self._export_bert_tfhub() + config = question_answering.QuestionAnsweringConfig( + hub_module_url=hub_module_url, + network=self._encoder_config, + train_data=self._train_data_config) + self._run_task(config) + + +if __name__ == "__main__": + tf.test.main() diff --git a/official/nlp/tasks/sentence_prediction.py b/official/nlp/tasks/sentence_prediction.py index beebbdbad..0f16ecec1 100644 --- a/official/nlp/tasks/sentence_prediction.py +++ b/official/nlp/tasks/sentence_prediction.py @@ -34,7 +34,7 @@ class SentencePredictionConfig(cfg.TaskConfig): init_checkpoint: str = '' hub_module_url: str = '' network: bert.BertPretrainerConfig = bert.BertPretrainerConfig( - num_masked_tokens=0, + num_masked_tokens=0, # No masked language modeling head. cls_heads=[ bert.ClsHeadConfig( inner_dim=768, @@ -74,10 +74,10 @@ class SentencePredictionTask(base_task.Task): encoder_from_hub = tf.keras.Model( inputs=[input_word_ids, input_mask, input_type_ids], outputs=[sequence_output, pooled_output]) - return bert.instantiate_from_cfg( + return bert.instantiate_bertpretrainer_from_cfg( self.task_config.network, encoder_network=encoder_from_hub) else: - return bert.instantiate_from_cfg(self.task_config.network) + return bert.instantiate_bertpretrainer_from_cfg(self.task_config.network) def build_losses(self, labels, model_outputs, aux_losses=None) -> tf.Tensor: loss = loss_lib.weighted_sparse_categorical_crossentropy_loss( diff --git a/official/nlp/tasks/sentence_prediction_test.py b/official/nlp/tasks/sentence_prediction_test.py index e68db0a1a..fc7676333 100644 --- a/official/nlp/tasks/sentence_prediction_test.py +++ b/official/nlp/tasks/sentence_prediction_test.py @@ -27,6 +27,19 @@ from official.nlp.tasks import sentence_prediction class SentencePredictionTaskTest(tf.test.TestCase): + def setUp(self): + super(SentencePredictionTaskTest, self).setUp() + self._network_config = bert.BertPretrainerConfig( + encoder=encoders.TransformerEncoderConfig( + vocab_size=30522, num_layers=1), + num_masked_tokens=0, + cls_heads=[ + bert.ClsHeadConfig( + inner_dim=10, num_classes=3, name="sentence_prediction") + ]) + self._train_data_config = bert.SentencePredictionDataConfig( + input_path="dummy", seq_length=128, global_batch_size=1) + def _run_task(self, config): task = sentence_prediction.SentencePredictionTask(config) model = task.build_model() @@ -44,16 +57,8 @@ class SentencePredictionTaskTest(tf.test.TestCase): def test_task(self): config = sentence_prediction.SentencePredictionConfig( init_checkpoint=self.get_temp_dir(), - network=bert.BertPretrainerConfig( - encoder=encoders.TransformerEncoderConfig( - vocab_size=30522, num_layers=1), - num_masked_tokens=0, - cls_heads=[ - bert.ClsHeadConfig( - inner_dim=10, num_classes=3, name="sentence_prediction") - ]), - train_data=bert.BertSentencePredictionDataConfig( - input_path="dummy", seq_length=128, global_batch_size=1)) + network=self._network_config, + train_data=self._train_data_config) task = sentence_prediction.SentencePredictionTask(config) model = task.build_model() metrics = task.build_metrics() @@ -73,12 +78,27 @@ class SentencePredictionTaskTest(tf.test.TestCase): bert.ClsHeadConfig( inner_dim=10, num_classes=3, name="next_sentence") ]) - pretrain_model = bert.instantiate_from_cfg(pretrain_cfg) + pretrain_model = bert.instantiate_bertpretrainer_from_cfg(pretrain_cfg) ckpt = tf.train.Checkpoint( model=pretrain_model, **pretrain_model.checkpoint_items) ckpt.save(config.init_checkpoint) task.initialize(model) + def test_task_with_fit(self): + config = sentence_prediction.SentencePredictionConfig( + network=self._network_config, + train_data=self._train_data_config) + task = sentence_prediction.SentencePredictionTask(config) + model = task.build_model() + model = task.compile_model( + model, + optimizer=tf.keras.optimizers.SGD(lr=0.1), + train_step=task.train_step, + metrics=task.build_metrics()) + dataset = task.build_inputs(config.train_data) + logs = model.fit(dataset, epochs=1, steps_per_epoch=2) + self.assertIn("loss", logs.history) + def _export_bert_tfhub(self): bert_config = configs.BertConfig( vocab_size=30522, @@ -106,15 +126,8 @@ class SentencePredictionTaskTest(tf.test.TestCase): hub_module_url = self._export_bert_tfhub() config = sentence_prediction.SentencePredictionConfig( hub_module_url=hub_module_url, - network=bert.BertPretrainerConfig( - encoders.TransformerEncoderConfig(vocab_size=30522, num_layers=1), - num_masked_tokens=0, - cls_heads=[ - bert.ClsHeadConfig( - inner_dim=10, num_classes=3, name="sentence_prediction") - ]), - train_data=bert.BertSentencePredictionDataConfig( - input_path="dummy", seq_length=128, global_batch_size=10)) + network=self._network_config, + train_data=self._train_data_config) self._run_task(config) -- GitLab From bdf6f121bdb0c3b0e519d09f3fecb48f9caf98d3 Mon Sep 17 00:00:00 2001 From: Jaeyoun Kim Date: Wed, 17 Jun 2020 19:56:43 -0700 Subject: [PATCH 20/79] Update README.md Add MobileDets to the Announcements section --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index d9dbe91dd..3ff84454a 100644 --- a/README.md +++ b/README.md @@ -15,6 +15,7 @@ The TensorFlow Model Garden is a repository with a number of different implement | Date | News | |------|------| | May 21, 2020 | [Unifying Deep Local and Global Features for Image Search (DELG)](https://github.com/tensorflow/models/tree/master/research/delf#delg) code released +| May 19, 2020 | [MobileDets: Searching for Object Detection Architectures for Mobile Accelerators](https://github.com/tensorflow/models/tree/master/research/object_detection#may-19th-2020) released | May 7, 2020 | [MnasFPN with MobileNet-V2 backbone](https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/detection_model_zoo.md#mobile-models) released for object detection | May 1, 2020 | [DELF: DEep Local Features](https://github.com/tensorflow/models/tree/master/research/delf) updated to support TensorFlow 2.1 | March 31, 2020 | [Introducing the Model Garden for TensorFlow 2](https://blog.tensorflow.org/2020/03/introducing-model-garden-for-tensorflow-2.html) ([Tweet](https://twitter.com/TensorFlow/status/1245029834633297921)) | -- GitLab From 3e44a9d634fccee48eeb97c5939eb568b0db7fe2 Mon Sep 17 00:00:00 2001 From: Hongkun Yu Date: Wed, 17 Jun 2020 20:12:15 -0700 Subject: [PATCH 21/79] Internal change PiperOrigin-RevId: 317020610 --- official/nlp/tasks/masked_lm.py | 13 +++++-------- official/nlp/tasks/sentence_prediction.py | 2 +- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/official/nlp/tasks/masked_lm.py b/official/nlp/tasks/masked_lm.py index 803b4bca9..4d392ad11 100644 --- a/official/nlp/tasks/masked_lm.py +++ b/official/nlp/tasks/masked_lm.py @@ -48,23 +48,20 @@ class MaskedLMTask(base_task.Task): metrics, aux_losses=None) -> tf.Tensor: metrics = dict([(metric.name, metric) for metric in metrics]) - lm_output = tf.nn.log_softmax(model_outputs['lm_output'], axis=-1) + lm_output = tf.nn.log_softmax( + tf.cast(model_outputs['lm_output'], tf.float32), axis=-1) mlm_loss = loss_lib.weighted_sparse_categorical_crossentropy_loss( labels=labels['masked_lm_ids'], predictions=lm_output, weights=labels['masked_lm_weights']) metrics['lm_example_loss'].update_state(mlm_loss) if 'next_sentence_labels' in labels: - policy = tf.keras.mixed_precision.experimental.global_policy() - if policy.name == 'mixed_bfloat16': # b/158514794: bf16 is not stable. - policy = tf.float32 - predictions = tf.keras.layers.Activation( - tf.nn.log_softmax, dtype=policy)(model_outputs['next_sentence']) - sentence_labels = labels['next_sentence_labels'] + sentence_outputs = tf.cast( + model_outputs['next_sentence'], dtype=tf.float32) sentence_loss = loss_lib.weighted_sparse_categorical_crossentropy_loss( labels=sentence_labels, - predictions=predictions) + predictions=tf.nn.log_softmax(sentence_outputs, axis=-1)) metrics['next_sentence_loss'].update_state(sentence_loss) total_loss = mlm_loss + sentence_loss else: diff --git a/official/nlp/tasks/sentence_prediction.py b/official/nlp/tasks/sentence_prediction.py index 0f16ecec1..d3d3d0034 100644 --- a/official/nlp/tasks/sentence_prediction.py +++ b/official/nlp/tasks/sentence_prediction.py @@ -83,7 +83,7 @@ class SentencePredictionTask(base_task.Task): loss = loss_lib.weighted_sparse_categorical_crossentropy_loss( labels=labels, predictions=tf.nn.log_softmax( - model_outputs['sentence_prediction'], axis=-1)) + tf.cast(model_outputs['sentence_prediction'], tf.float32), axis=-1)) if aux_losses: loss += tf.add_n(aux_losses) -- GitLab From 20f31bf6e68dd221227230c826fd16a3640db2a7 Mon Sep 17 00:00:00 2001 From: moneypi <1483586698@qq.com> Date: Thu, 18 Jun 2020 12:55:10 +0800 Subject: [PATCH 22/79] fix "No module named 'official.utils.logs'" for deep_speech (#8687) --- research/deep_speech/deep_speech.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/research/deep_speech/deep_speech.py b/research/deep_speech/deep_speech.py index 526f0fc25..6af2ac0b8 100644 --- a/research/deep_speech/deep_speech.py +++ b/research/deep_speech/deep_speech.py @@ -28,8 +28,8 @@ import data.dataset as dataset import decoder import deep_speech_model from official.utils.flags import core as flags_core -from official.utils.logs import hooks_helper -from official.utils.logs import logger +from official.r1.utils.logs import hooks_helper +from official.r1.utils.logs import logger from official.utils.misc import distribution_utils from official.utils.misc import model_helpers -- GitLab From c187e4041de3959a5424e7ea360fb4ecc157ffdb Mon Sep 17 00:00:00 2001 From: Chen Chen Date: Thu, 18 Jun 2020 09:55:26 -0700 Subject: [PATCH 23/79] Internal Change PiperOrigin-RevId: 317123221 --- official/nlp/configs/encoders.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/official/nlp/configs/encoders.py b/official/nlp/configs/encoders.py index 2b9c7e95b..59d43295c 100644 --- a/official/nlp/configs/encoders.py +++ b/official/nlp/configs/encoders.py @@ -34,7 +34,7 @@ class TransformerEncoderConfig(base_config.Config): num_layers: int = 12 num_attention_heads: int = 12 hidden_activation: str = "gelu" - intermediate_size: int = 3076 + intermediate_size: int = 3072 dropout_rate: float = 0.1 attention_dropout_rate: float = 0.1 max_position_embeddings: int = 512 -- GitLab From 53fd242a90cd0a2440b4d61020b7d56601cc735f Mon Sep 17 00:00:00 2001 From: Jaeyoun Kim Date: Thu, 18 Jun 2020 11:02:10 -0700 Subject: [PATCH 24/79] Update README_TEMPLATE.md Add badge logos --- .github/README_TEMPLATE.md | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/.github/README_TEMPLATE.md b/.github/README_TEMPLATE.md index d04a02995..45179d0ae 100644 --- a/.github/README_TEMPLATE.md +++ b/.github/README_TEMPLATE.md @@ -1,13 +1,13 @@ > :memo: A README.md template for releasing a paper code implementation to a GitHub repository. > -> * Template version: 1.0.2020.125 +> * Template version: 1.0.2020.170 > * Please modify sections depending on needs. # Model name, Paper title, or Project Name > :memo: Add a badge for the ArXiv identifier of your paper (arXiv:YYMM.NNNNN) -[![Paper](http://img.shields.io/badge/paper-arXiv.YYMM.NNNNN-B3181B.svg)](https://arxiv.org/abs/...) +[![Paper](http://img.shields.io/badge/Paper-arXiv.YYMM.NNNNN-B3181B?logo=arXiv)](https://arxiv.org/abs/...) This repository is the official or unofficial implementation of the following paper. @@ -28,8 +28,8 @@ This repository is the official or unofficial implementation of the following pa > :memo: Provide maintainer information. -* Last name, First name ([@GitHub username](https://github.com/username)) -* Last name, First name ([@GitHub username](https://github.com/username)) +* Full name ([@GitHub username](https://github.com/username)) +* Full name ([@GitHub username](https://github.com/username)) ## Table of Contents @@ -37,8 +37,8 @@ This repository is the official or unofficial implementation of the following pa ## Requirements -[![TensorFlow 2.1](https://img.shields.io/badge/tensorflow-2.1-brightgreen)](https://github.com/tensorflow/tensorflow/releases/tag/v2.1.0) -[![Python 3.6](https://img.shields.io/badge/python-3.6-blue.svg)](https://www.python.org/downloads/release/python-360/) +[![TensorFlow 2.1](https://img.shields.io/badge/TensorFlow-2.1-FF6F00?logo=tensorflow)](https://github.com/tensorflow/tensorflow/releases/tag/v2.1.0) +[![Python 3.6](https://img.shields.io/badge/Python-3.6-3776AB)](https://www.python.org/downloads/release/python-360/) > :memo: Provide details of the software required. > @@ -104,6 +104,8 @@ python3 ... ## License +[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) + > :memo: Place your license text in a file named LICENSE in the root of the repository. > > * Include information about your license. -- GitLab From c87c3965309f675f8b3827e364849b136fdd8e51 Mon Sep 17 00:00:00 2001 From: Hongkun Yu Date: Thu, 18 Jun 2020 12:20:32 -0700 Subject: [PATCH 25/79] Update deep_speech.py (#8694) --- research/deep_speech/deep_speech.py | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/research/deep_speech/deep_speech.py b/research/deep_speech/deep_speech.py index 6af2ac0b8..3d809c3cb 100644 --- a/research/deep_speech/deep_speech.py +++ b/research/deep_speech/deep_speech.py @@ -28,8 +28,6 @@ import data.dataset as dataset import decoder import deep_speech_model from official.utils.flags import core as flags_core -from official.r1.utils.logs import hooks_helper -from official.r1.utils.logs import logger from official.utils.misc import distribution_utils from official.utils.misc import model_helpers @@ -276,16 +274,6 @@ def run_deep_speech(_): "use_bias": flags_obj.use_bias } - dataset_name = "LibriSpeech" - benchmark_logger = logger.get_benchmark_logger() - benchmark_logger.log_run_info("deep_speech", dataset_name, run_params, - test_id=flags_obj.benchmark_test_id) - - train_hooks = hooks_helper.get_train_hooks( - flags_obj.hooks, - model_dir=flags_obj.model_dir, - batch_size=flags_obj.batch_size) - per_replica_batch_size = per_device_batch_size(flags_obj.batch_size, num_gpus) def input_fn_train(): @@ -307,7 +295,7 @@ def run_deep_speech(_): train_speech_dataset.entries, cycle_index, flags_obj.sortagrad, flags_obj.batch_size) - estimator.train(input_fn=input_fn_train, hooks=train_hooks) + estimator.train(input_fn=input_fn_train) # Evaluation tf.logging.info("Starting to evaluate...") @@ -433,8 +421,7 @@ def define_deep_speech_flags(): def main(_): - with logger.benchmark_context(flags_obj): - run_deep_speech(flags_obj) + run_deep_speech(flags_obj) if __name__ == "__main__": -- GitLab From 79ae8004263faafe6de690f3725103a6498db84d Mon Sep 17 00:00:00 2001 From: pkulzc Date: Thu, 18 Jun 2020 15:10:03 -0700 Subject: [PATCH 26/79] Release Context RCNN code and pre-trained model Context R-CNN: Long Term Temporal Context for Per Camera Object Detection http://openaccess.thecvf.com/content_CVPR_2020/html/Beery_Context_R-CNN_Long_Term_Temporal_Context_for_Per-Camera_Object_Detection_CVPR_2020_paper.html --- research/object_detection/README.md | 361 ++++++++++-------- .../object_detection/g3doc/context_rcnn.md | 22 +- research/object_detection/model_lib.py | 4 +- ...n_resnet101_snapshot_serengeti_sync.config | 166 ++++++++ 4 files changed, 398 insertions(+), 155 deletions(-) create mode 100644 research/object_detection/samples/configs/context_rcnn_resnet101_snapshot_serengeti_sync.config diff --git a/research/object_detection/README.md b/research/object_detection/README.md index b6dc9ad0a..4821f3e66 100644 --- a/research/object_detection/README.md +++ b/research/object_detection/README.md @@ -2,17 +2,16 @@ ![TensorFlow 2 Not Supported](https://img.shields.io/badge/TensorFlow%202%20Not%20Supported-%E2%9C%95-red.svg) # Tensorflow Object Detection API + Creating accurate machine learning models capable of localizing and identifying multiple objects in a single image remains a core challenge in computer vision. The TensorFlow Object Detection API is an open source framework built on top of TensorFlow that makes it easy to construct, train and deploy object detection -models. At Google we’ve certainly found this codebase to be useful for our -computer vision needs, and we hope that you will as well. -

    - -

    +models. At Google we’ve certainly found this codebase to be useful for our +computer vision needs, and we hope that you will as well.

    +

    Contributions to the codebase are welcome and we would love to hear back from -you if you find this API useful. Finally if you use the Tensorflow Object +you if you find this API useful. Finally if you use the Tensorflow Object Detection API for a research publication, please consider citing: ``` @@ -20,8 +19,8 @@ Detection API for a research publication, please consider citing: Huang J, Rathod V, Sun C, Zhu M, Korattikara A, Fathi A, Fischer I, Wojna Z, Song Y, Guadarrama S, Murphy K, CVPR 2017 ``` -\[[link](https://arxiv.org/abs/1611.10012)\]\[[bibtex]( -https://scholar.googleusercontent.com/scholar.bib?q=info:l291WsrB-hQJ:scholar.google.com/&output=citation&scisig=AAGBfm0AAAAAWUIIlnPZ_L9jxvPwcC49kDlELtaeIyU-&scisf=4&ct=citation&cd=-1&hl=en&scfhb=1)\] + +\[[link](https://arxiv.org/abs/1611.10012)\]\[[bibtex](https://scholar.googleusercontent.com/scholar.bib?q=info:l291WsrB-hQJ:scholar.google.com/&output=citation&scisig=AAGBfm0AAAAAWUIIlnPZ_L9jxvPwcC49kDlELtaeIyU-&scisf=4&ct=citation&cd=-1&hl=en&scfhb=1)\]

    @@ -29,63 +28,65 @@ https://scholar.googleusercontent.com/scholar.bib?q=info:l291WsrB-hQJ:scholar.go ## Maintainers -| Name | GitHub | -| --- | --- | -| Jonathan Huang | [jch1](https://github.com/jch1) | -| Vivek Rathod | [tombstone](https://github.com/tombstone) | -| Ronny Votel | [ronnyvotel](https://github.com/ronnyvotel) | -| Derek Chow | [derekjchow](https://github.com/derekjchow) | -| Chen Sun | [jesu9](https://github.com/jesu9) | -| Menglong Zhu | [dreamdragon](https://github.com/dreamdragon) | -| Alireza Fathi | [afathi3](https://github.com/afathi3) | -| Zhichao Lu | [pkulzc](https://github.com/pkulzc) | +Name | GitHub +-------------- | --------------------------------------------- +Jonathan Huang | [jch1](https://github.com/jch1) +Vivek Rathod | [tombstone](https://github.com/tombstone) +Ronny Votel | [ronnyvotel](https://github.com/ronnyvotel) +Derek Chow | [derekjchow](https://github.com/derekjchow) +Chen Sun | [jesu9](https://github.com/jesu9) +Menglong Zhu | [dreamdragon](https://github.com/dreamdragon) +Alireza Fathi | [afathi3](https://github.com/afathi3) +Zhichao Lu | [pkulzc](https://github.com/pkulzc) ## Table of contents Setup: - * Installation
    +* Installation
    Quick Start: - * +* Quick Start: Jupyter notebook for off-the-shelf inference
    - * Quick Start: Training a pet detector
    +* Quick Start: Training a pet detector
    Customizing a Pipeline: - * +* Configuring an object detection pipeline
    - * Preparing inputs
    +* Preparing inputs
    Running: - * Running locally
    - * Running on the cloud
    +* Running locally
    +* Running on the cloud
    Extras: - * Tensorflow detection model zoo
    - * +* Tensorflow detection model zoo
    +* Exporting a trained model for inference
    - * +* Exporting a trained model for TPU inference
    - * +* Defining your own model architecture
    - * +* Bringing in your own dataset
    - * +* Supported object detection evaluation protocols
    - * +* Inference and evaluation on the Open Images dataset
    - * +* Run an instance segmentation model
    - * +* Run the evaluation for the Open Images Challenge 2018/2019
    - * +* TPU compatible detection pipelines
    - * +* Running object detection on mobile devices with TensorFlow Lite
    +* + Context R-CNN documentation for data preparation, training, and export
    ## Getting Help @@ -98,78 +99,105 @@ tensorflow/models GitHub [issue tracker](https://github.com/tensorflow/models/issues), prefixing the issue name with "object_detection". -Please check [FAQ](g3doc/faq.md) for frequently asked questions before -reporting an issue. - +Please check [FAQ](g3doc/faq.md) for frequently asked questions before reporting +an issue. ## Release information +### June 17th, 2020 + +We have released [Context R-CNN](https://arxiv.org/abs/1912.03538), a model that +uses attention to incorporate contextual information images (e.g. from +temporally nearby frames taken by a static camera) in order to improve accuracy. +Importantly, these contextual images need not be labeled. + +* When applied to a challenging wildlife detection dataset ([Snapshot Serengeti](http://lila.science/datasets/snapshot-serengeti)), + Context R-CNN with context from up to a month of images outperforms a + single-frame baseline by 17.9% mAP, and outperforms S3D (a 3d convolution + based baseline) by 11.2% mAP. +* Context R-CNN leverages temporal context from the unlabeled frames of a + novel camera deployment to improve performance at that camera, boosting + model generalizeability. + +We have provided code for generating data with associated context +[here](g3doc/context_rcnn.md), and a sample config for a Context R-CNN +model [here](samples/configs/context_rcnn_resnet101_snapshot_serengeti_sync.config). + +Snapshot Serengeti-trained Faster R-CNN and Context R-CNN models can be found in +the [model zoo](https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/detection_model_zoo.md#snapshot-serengeti-camera-trap-trained-models). + +A colab demonstrating Context R-CNN is provided +[here](colab_tutorials/context_rcnn_tutorial.ipynb). + +Thanks to contributors: Sara Beery, Jonathan Huang, Guanhang Wu, Vivek +Rathod, Ronny Votel, Zhichao Lu, David Ross, Pietro Perona, Tanya Birch, and +the Wildlife Insights AI Team. ### May 19th, 2020 -We have released -[MobileDets](https://arxiv.org/abs/2004.14525), -a set of high-performance models for mobile CPUs, DSPs and EdgeTPUs. -* MobileDets outperform MobileNetV3+SSDLite by 1.7 mAP at comparable mobile CPU -inference latencies. MobileDets also outperform MobileNetV2+SSDLite by 1.9 mAP -on mobile CPUs, 3.7 mAP on EdgeTPUs and 3.4 mAP on DSPs while running equally -fast. MobileDets also offer up to 2x speedup over MnasFPN on EdgeTPUs and DSPs. +We have released [MobileDets](https://arxiv.org/abs/2004.14525), a set of +high-performance models for mobile CPUs, DSPs and EdgeTPUs. + +* MobileDets outperform MobileNetV3+SSDLite by 1.7 mAP at comparable mobile + CPU inference latencies. MobileDets also outperform MobileNetV2+SSDLite by + 1.9 mAP on mobile CPUs, 3.7 mAP on EdgeTPUs and 3.4 mAP on DSPs while + running equally fast. MobileDets also offer up to 2x speedup over MnasFPN on + EdgeTPUs and DSPs. For each of the three hardware platforms we have released model definition, model checkpoints trained on the COCO14 dataset and converted TFLite models in fp32 and/or uint8. -Thanks to contributors: Yunyang Xiong, Hanxiao Liu, Suyog Gupta, -Berkin Akin, Gabriel Bender, Pieter-Jan Kindermans, Mingxing Tan, Vikas Singh, -Bo Chen, Quoc Le, Zhichao Lu. - +Thanks to contributors: Yunyang Xiong, Hanxiao Liu, Suyog Gupta, Berkin +Akin, Gabriel Bender, Pieter-Jan Kindermans, Mingxing Tan, Vikas Singh, Bo Chen, +Quoc Le, Zhichao Lu. ### May 7th, 2020 + We have released a mobile model with the [MnasFPN head](https://arxiv.org/abs/1912.01106). +* MnasFPN with MobileNet-V2 backbone is the most accurate (26.6 mAP at 183ms + on Pixel 1) mobile detection model we have released to date. With + depth-multiplier, MnasFPN with MobileNet-V2 backbone is 1.8 mAP higher than + MobileNet-V3-Large with SSDLite (23.8 mAP vs 22.0 mAP) at similar latency + (120ms) on Pixel 1. -* MnasFPN with MobileNet-V2 backbone is the most accurate (26.6 mAP at 183ms on -Pixel 1) mobile detection model we have released to date. With depth-multiplier, -MnasFPN with MobileNet-V2 backbone is 1.8 mAP higher than MobileNet-V3-Large -with SSDLite (23.8 mAP vs 22.0 mAP) at similar latency (120ms) on Pixel 1. - -We have released model definition, model checkpoints trained on -the COCO14 dataset and a converted TFLite model. - -Thanks to contributors: Bo Chen, Golnaz Ghiasi, Hanxiao Liu, -Tsung-Yi Lin, Dmitry Kalenichenko, Hartwig Adam, Quoc Le, Zhichao Lu, -Jonathan Huang, Hao Xu. - +We have released model definition, model checkpoints trained on the COCO14 +dataset and a converted TFLite model. +Thanks to contributors: Bo Chen, Golnaz Ghiasi, Hanxiao Liu, Tsung-Yi +Lin, Dmitry Kalenichenko, Hartwig Adam, Quoc Le, Zhichao Lu, Jonathan Huang, Hao +Xu. ### Nov 13th, 2019 + We have released MobileNetEdgeTPU SSDLite model. -* SSDLite with MobileNetEdgeTPU backbone, which achieves 10% mAP higher than -MobileNetV2 SSDLite (24.3 mAP vs 22 mAP) on a Google Pixel4 at comparable -latency (6.6ms vs 6.8ms). +* SSDLite with MobileNetEdgeTPU backbone, which achieves 10% mAP higher than + MobileNetV2 SSDLite (24.3 mAP vs 22 mAP) on a Google Pixel4 at comparable + latency (6.6ms vs 6.8ms). -Along with the model definition, we are also releasing model checkpoints -trained on the COCO dataset. +Along with the model definition, we are also releasing model checkpoints trained +on the COCO dataset. Thanks to contributors: Yunyang Xiong, Bo Chen, Suyog Gupta, Hanxiao Liu, Gabriel Bender, Mingxing Tan, Berkin Akin, Zhichao Lu, Quoc Le ### Oct 15th, 2019 + We have released two MobileNet V3 SSDLite models (presented in [Searching for MobileNetV3](https://arxiv.org/abs/1905.02244)). -* SSDLite with MobileNet-V3-Large backbone, which is 27% faster than Mobilenet -V2 SSDLite (119ms vs 162ms) on a Google Pixel phone CPU at the same mAP. -* SSDLite with MobileNet-V3-Small backbone, which is 37% faster than MnasNet -SSDLite reduced with depth-multiplier (43ms vs 68ms) at the same mAP. +* SSDLite with MobileNet-V3-Large backbone, which is 27% faster than Mobilenet + V2 SSDLite (119ms vs 162ms) on a Google Pixel phone CPU at the same mAP. +* SSDLite with MobileNet-V3-Small backbone, which is 37% faster than MnasNet + SSDLite reduced with depth-multiplier (43ms vs 68ms) at the same mAP. -Along with the model definition, we are also releasing model checkpoints -trained on the COCO dataset. +Along with the model definition, we are also releasing model checkpoints trained +on the COCO dataset. Thanks to contributors: Bo Chen, Zhichao Lu, Vivek Rathod, Jonathan Huang - ### July 1st, 2019 We have released an updated set of utils and an updated @@ -177,28 +205,30 @@ We have released an updated set of utils and an updated [Open Images Challenge 2019](https://storage.googleapis.com/openimages/web/challenge2019.html)! The Instance Segmentation metric for -[Open Images V5](https://storage.googleapis.com/openimages/web/index.html) -and [Challenge 2019](https://storage.googleapis.com/openimages/web/challenge2019.html) -is part of this release. Check out [the metric description](https://storage.googleapis.com/openimages/web/evaluation.html#instance_segmentation_eval) +[Open Images V5](https://storage.googleapis.com/openimages/web/index.html) and +[Challenge 2019](https://storage.googleapis.com/openimages/web/challenge2019.html) +is part of this release. Check out +[the metric description](https://storage.googleapis.com/openimages/web/evaluation.html#instance_segmentation_eval) on the Open Images website. Thanks to contributors: Alina Kuznetsova, Rodrigo Benenson ### Feb 11, 2019 -We have released detection models trained on the Open Images Dataset V4 -in our detection model zoo, including +We have released detection models trained on the Open Images Dataset V4 in our +detection model zoo, including -* Faster R-CNN detector with Inception Resnet V2 feature extractor -* SSD detector with MobileNet V2 feature extractor -* SSD detector with ResNet 101 FPN feature extractor (aka RetinaNet-101) +* Faster R-CNN detector with Inception Resnet V2 feature extractor +* SSD detector with MobileNet V2 feature extractor +* SSD detector with ResNet 101 FPN feature extractor (aka RetinaNet-101) Thanks to contributors: Alina Kuznetsova, Yinxiao Li ### Sep 17, 2018 We have released Faster R-CNN detectors with ResNet-50 / ResNet-101 feature -extractors trained on the [iNaturalist Species Detection Dataset](https://github.com/visipedia/inat_comp/blob/master/2017/README.md#bounding-boxes). +extractors trained on the +[iNaturalist Species Detection Dataset](https://github.com/visipedia/inat_comp/blob/master/2017/README.md#bounding-boxes). The models are trained on the training split of the iNaturalist data for 4M iterations, they achieve 55% and 58% mean AP@.5 over 2854 classes respectively. For more details please refer to this [paper](https://arxiv.org/abs/1707.06642). @@ -210,42 +240,59 @@ For more details please refer to this [paper](https://arxiv.org/abs/1707.06642). There are many new updates in this release, extending the functionality and capability of the API: -* Moving from slim-based training to [Estimator](https://www.tensorflow.org/api_docs/python/tf/estimator/Estimator)-based -training. -* Support for [RetinaNet](https://arxiv.org/abs/1708.02002), and a [MobileNet](https://ai.googleblog.com/2017/06/mobilenets-open-source-models-for.html) -adaptation of RetinaNet. -* A novel SSD-based architecture called the [Pooling Pyramid Network](https://arxiv.org/abs/1807.03284) (PPN). -* Releasing several [TPU](https://cloud.google.com/tpu/)-compatible models. -These can be found in the `samples/configs/` directory with a comment in the -pipeline configuration files indicating TPU compatibility. -* Support for quantized training. -* Updated documentation for new binaries, Cloud training, and [Tensorflow Lite](https://www.tensorflow.org/mobile/tflite/). - -See also our [expanded announcement blogpost](https://ai.googleblog.com/2018/07/accelerated-training-and-inference-with.html) and accompanying tutorial at the [TensorFlow blog](https://medium.com/tensorflow/training-and-serving-a-realtime-mobile-object-detector-in-30-minutes-with-cloud-tpus-b78971cf1193). +* Moving from slim-based training to + [Estimator](https://www.tensorflow.org/api_docs/python/tf/estimator/Estimator)-based + training. +* Support for [RetinaNet](https://arxiv.org/abs/1708.02002), and a + [MobileNet](https://ai.googleblog.com/2017/06/mobilenets-open-source-models-for.html) + adaptation of RetinaNet. +* A novel SSD-based architecture called the + [Pooling Pyramid Network](https://arxiv.org/abs/1807.03284) (PPN). +* Releasing several [TPU](https://cloud.google.com/tpu/)-compatible models. + These can be found in the `samples/configs/` directory with a comment in the + pipeline configuration files indicating TPU compatibility. +* Support for quantized training. +* Updated documentation for new binaries, Cloud training, and + [Tensorflow Lite](https://www.tensorflow.org/mobile/tflite/). + +See also our +[expanded announcement blogpost](https://ai.googleblog.com/2018/07/accelerated-training-and-inference-with.html) +and accompanying tutorial at the +[TensorFlow blog](https://medium.com/tensorflow/training-and-serving-a-realtime-mobile-object-detector-in-30-minutes-with-cloud-tpus-b78971cf1193). Thanks to contributors: Sara Robinson, Aakanksha Chowdhery, Derek Chow, Pengchong Jin, Jonathan Huang, Vivek Rathod, Zhichao Lu, Ronny Votel - ### June 25, 2018 -Additional evaluation tools for the [Open Images Challenge 2018](https://storage.googleapis.com/openimages/web/challenge.html) are out. -Check out our short tutorial on data preparation and running evaluation [here](g3doc/challenge_evaluation.md)! +Additional evaluation tools for the +[Open Images Challenge 2018](https://storage.googleapis.com/openimages/web/challenge.html) +are out. Check out our short tutorial on data preparation and running evaluation +[here](g3doc/challenge_evaluation.md)! Thanks to contributors: Alina Kuznetsova ### June 5, 2018 -We have released the implementation of evaluation metrics for both tracks of the [Open Images Challenge 2018](https://storage.googleapis.com/openimages/web/challenge.html) as a part of the Object Detection API - see the [evaluation protocols](g3doc/evaluation_protocols.md) for more details. -Additionally, we have released a tool for hierarchical labels expansion for the Open Images Challenge: check out [oid_hierarchical_labels_expansion.py](dataset_tools/oid_hierarchical_labels_expansion.py). +We have released the implementation of evaluation metrics for both tracks of the +[Open Images Challenge 2018](https://storage.googleapis.com/openimages/web/challenge.html) +as a part of the Object Detection API - see the +[evaluation protocols](g3doc/evaluation_protocols.md) for more details. +Additionally, we have released a tool for hierarchical labels expansion for the +Open Images Challenge: check out +[oid_hierarchical_labels_expansion.py](dataset_tools/oid_hierarchical_labels_expansion.py). -Thanks to contributors: Alina Kuznetsova, Vittorio Ferrari, Jasper Uijlings +Thanks to contributors: Alina Kuznetsova, Vittorio Ferrari, Jasper +Uijlings ### April 30, 2018 -We have released a Faster R-CNN detector with ResNet-101 feature extractor trained on [AVA](https://research.google.com/ava/) v2.1. -Compared with other commonly used object detectors, it changes the action classification loss function to per-class Sigmoid loss to handle boxes with multiple labels. -The model is trained on the training split of AVA v2.1 for 1.5M iterations, it achieves mean AP of 11.25% over 60 classes on the validation split of AVA v2.1. +We have released a Faster R-CNN detector with ResNet-101 feature extractor +trained on [AVA](https://research.google.com/ava/) v2.1. Compared with other +commonly used object detectors, it changes the action classification loss +function to per-class Sigmoid loss to handle boxes with multiple labels. The +model is trained on the training split of AVA v2.1 for 1.5M iterations, it +achieves mean AP of 11.25% over 60 classes on the validation split of AVA v2.1. For more details please refer to this [paper](https://arxiv.org/abs/1705.08421). Thanks to contributors: Chen Sun, David Ross @@ -255,84 +302,94 @@ For more details please refer to this [paper](https://arxiv.org/abs/1705.08421). Supercharge your mobile phones with the next generation mobile object detector! We are adding support for MobileNet V2 with SSDLite presented in [MobileNetV2: Inverted Residuals and Linear Bottlenecks](https://arxiv.org/abs/1801.04381). -This model is 35% faster than Mobilenet V1 SSD on a Google Pixel phone CPU (200ms vs. 270ms) at the same accuracy. -Along with the model definition, we are also releasing a model checkpoint trained on the COCO dataset. +This model is 35% faster than Mobilenet V1 SSD on a Google Pixel phone CPU +(200ms vs. 270ms) at the same accuracy. Along with the model definition, we are +also releasing a model checkpoint trained on the COCO dataset. -Thanks to contributors: Menglong Zhu, Mark Sandler, Zhichao Lu, Vivek Rathod, Jonathan Huang +Thanks to contributors: Menglong Zhu, Mark Sandler, Zhichao Lu, Vivek +Rathod, Jonathan Huang ### February 9, 2018 -We now support instance segmentation!! In this API update we support a number of instance segmentation models similar to those discussed in the [Mask R-CNN paper](https://arxiv.org/abs/1703.06870). For further details refer to -[our slides](http://presentations.cocodataset.org/Places17-GMRI.pdf) from the 2017 Coco + Places Workshop. -Refer to the section on [Running an Instance Segmentation Model](g3doc/instance_segmentation.md) for instructions on how to configure a model -that predicts masks in addition to object bounding boxes. +We now support instance segmentation!! In this API update we support a number of +instance segmentation models similar to those discussed in the +[Mask R-CNN paper](https://arxiv.org/abs/1703.06870). For further details refer +to [our slides](http://presentations.cocodataset.org/Places17-GMRI.pdf) from the +2017 Coco + Places Workshop. Refer to the section on +[Running an Instance Segmentation Model](g3doc/instance_segmentation.md) for +instructions on how to configure a model that predicts masks in addition to +object bounding boxes. -Thanks to contributors: Alireza Fathi, Zhichao Lu, Vivek Rathod, Ronny Votel, Jonathan Huang +Thanks to contributors: Alireza Fathi, Zhichao Lu, Vivek Rathod, Ronny +Votel, Jonathan Huang ### November 17, 2017 As a part of the Open Images V3 release we have released: -* An implementation of the Open Images evaluation metric and the [protocol](g3doc/evaluation_protocols.md#open-images). -* Additional tools to separate inference of detection and evaluation (see [this tutorial](g3doc/oid_inference_and_evaluation.md)). -* A new detection model trained on the Open Images V2 data release (see [Open Images model](g3doc/detection_model_zoo.md#open-images-models)). +* An implementation of the Open Images evaluation metric and the + [protocol](g3doc/evaluation_protocols.md#open-images). +* Additional tools to separate inference of detection and evaluation (see + [this tutorial](g3doc/oid_inference_and_evaluation.md)). +* A new detection model trained on the Open Images V2 data release (see + [Open Images model](g3doc/detection_model_zoo.md#open-images-models)). -See more information on the [Open Images website](https://github.com/openimages/dataset)! +See more information on the +[Open Images website](https://github.com/openimages/dataset)! Thanks to contributors: Stefan Popov, Alina Kuznetsova ### November 6, 2017 We have re-released faster versions of our (pre-trained) models in the -model zoo. In addition to what -was available before, we are also adding Faster R-CNN models trained on COCO -with Inception V2 and Resnet-50 feature extractors, as well as a Faster R-CNN -with Resnet-101 model trained on the KITTI dataset. +model zoo. In addition to what was +available before, we are also adding Faster R-CNN models trained on COCO with +Inception V2 and Resnet-50 feature extractors, as well as a Faster R-CNN with +Resnet-101 model trained on the KITTI dataset. -Thanks to contributors: Jonathan Huang, Vivek Rathod, Derek Chow, -Tal Remez, Chen Sun. +Thanks to contributors: Jonathan Huang, Vivek Rathod, Derek Chow, Tal +Remez, Chen Sun. ### October 31, 2017 -We have released a new state-of-the-art model for object detection using -the Faster-RCNN with the -[NASNet-A image featurization](https://arxiv.org/abs/1707.07012). This -model achieves mAP of 43.1% on the test-dev validation dataset for COCO, -improving on the best available model in the zoo by 6% in terms -of absolute mAP. +We have released a new state-of-the-art model for object detection using the +Faster-RCNN with the +[NASNet-A image featurization](https://arxiv.org/abs/1707.07012). This model +achieves mAP of 43.1% on the test-dev validation dataset for COCO, improving on +the best available model in the zoo by 6% in terms of absolute mAP. -Thanks to contributors: Barret Zoph, Vijay Vasudevan, Jonathon Shlens, Quoc Le +Thanks to contributors: Barret Zoph, Vijay Vasudevan, Jonathon Shlens, +Quoc Le ### August 11, 2017 -We have released an update to the [Android Detect -demo](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/examples/android) -which will now run models trained using the Tensorflow Object -Detection API on an Android device. By default, it currently runs a -frozen SSD w/Mobilenet detector trained on COCO, but we encourage -you to try out other detection models! +We have released an update to the +[Android Detect demo](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/examples/android) +which will now run models trained using the Tensorflow Object Detection API on +an Android device. By default, it currently runs a frozen SSD w/Mobilenet +detector trained on COCO, but we encourage you to try out other detection +models! Thanks to contributors: Jonathan Huang, Andrew Harp - ### June 15, 2017 -In addition to our base Tensorflow detection model definitions, this -release includes: - -* A selection of trainable detection models, including: - * Single Shot Multibox Detector (SSD) with MobileNet, - * SSD with Inception V2, - * Region-Based Fully Convolutional Networks (R-FCN) with Resnet 101, - * Faster RCNN with Resnet 101, - * Faster RCNN with Inception Resnet v2 -* Frozen weights (trained on the COCO dataset) for each of the above models to - be used for out-of-the-box inference purposes. -* A [Jupyter notebook](object_detection_tutorial.ipynb) for performing - out-of-the-box inference with one of our released models -* Convenient [local training](g3doc/running_locally.md) scripts as well as - distributed training and evaluation pipelines via - [Google Cloud](g3doc/running_on_cloud.md). +In addition to our base Tensorflow detection model definitions, this release +includes: + +* A selection of trainable detection models, including: + * Single Shot Multibox Detector (SSD) with MobileNet, + * SSD with Inception V2, + * Region-Based Fully Convolutional Networks (R-FCN) with Resnet 101, + * Faster RCNN with Resnet 101, + * Faster RCNN with Inception Resnet v2 +* Frozen weights (trained on the COCO dataset) for each of the above models to + be used for out-of-the-box inference purposes. +* A [Jupyter notebook](colab_tutorials/object_detection_tutorial.ipynb) for + performing out-of-the-box inference with one of our released models +* Convenient [local training](g3doc/running_locally.md) scripts as well as + distributed training and evaluation pipelines via + [Google Cloud](g3doc/running_on_cloud.md). Thanks to contributors: Jonathan Huang, Vivek Rathod, Derek Chow, Chen Sun, Menglong Zhu, Matthew Tang, Anoop Korattikara, Alireza Fathi, Ian Fischer, diff --git a/research/object_detection/g3doc/context_rcnn.md b/research/object_detection/g3doc/context_rcnn.md index d322b3fd8..a51e4f041 100644 --- a/research/object_detection/g3doc/context_rcnn.md +++ b/research/object_detection/g3doc/context_rcnn.md @@ -22,6 +22,18 @@ contextual features. We focus on building context from object-centric features generated with a pre-trained Faster R-CNN model, but you can adapt the provided code to use alternative feature extractors. +Each of these data processing scripts uses Apache Beam, which can be installed +using + +``` +pip install apache-beam +``` + +and can be run locally, or on a cluster for efficient processing of large +amounts of data. See the +[Apache Beam documentation](https://beam.apache.org/documentation/runners/dataflow/) +for more information. + ### Generating TfRecords from a set of images and a COCO-CameraTraps style JSON If your data is already stored in TfRecords, you can skip this first step. @@ -99,6 +111,10 @@ python object_detection/export_inference_graph.py \ --additional_output_tensor_names detection_features ``` +Make sure that you have set `output_final_box_features: true` within +your config file before exporting. This is needed to export the features as an +output, but it does not need to be set during training. + To generate and save contextual features for your data, run ``` @@ -111,7 +127,8 @@ python object_detection/dataset_tools/context_rcnn/generate_embedding_data.py \ ### Building up contextual memory banks and storing them for each context group -To build the context features into memory banks, run +To build the context features you just added for each image into memory banks, +run ``` python object_detection/dataset_tools/context_rcnn/add_context_to_examples.py \ @@ -121,6 +138,9 @@ python object_detection/dataset_tools/context_rcnn/add_context_to_examples.py \ --time_horizon month ``` +where the input_tfrecords for add_context_to_examples.py are the +output_tfrecords from generate_embedding_data.py. + For all options, see add_context_to_examples.py. By default, this code builds TfSequenceExamples, which are more data efficient (this allows you to store the context features once for each context group, as opposed to once per image). If diff --git a/research/object_detection/model_lib.py b/research/object_detection/model_lib.py index 579125151..365ea1c02 100644 --- a/research/object_detection/model_lib.py +++ b/research/object_detection/model_lib.py @@ -23,9 +23,9 @@ import functools import os import tensorflow.compat.v1 as tf +import tensorflow.compat.v2 as tf2 import tf_slim as slim - from object_detection import eval_util from object_detection import exporter as exporter_lib from object_detection import inputs @@ -349,7 +349,7 @@ def create_model_fn(detection_model_fn, configs, hparams, use_tpu=False, from tensorflow.python.keras.engine import base_layer_utils # pylint: disable=g-import-not-at-top # Enable v2 behavior, as `mixed_bfloat16` is only supported in TF 2.0. base_layer_utils.enable_v2_dtype_behavior() - tf.compat.v2.keras.mixed_precision.experimental.set_policy( + tf2.keras.mixed_precision.experimental.set_policy( 'mixed_bfloat16') detection_model = detection_model_fn( is_training=is_training, add_summaries=(not use_tpu)) diff --git a/research/object_detection/samples/configs/context_rcnn_resnet101_snapshot_serengeti_sync.config b/research/object_detection/samples/configs/context_rcnn_resnet101_snapshot_serengeti_sync.config new file mode 100644 index 000000000..b96dea467 --- /dev/null +++ b/research/object_detection/samples/configs/context_rcnn_resnet101_snapshot_serengeti_sync.config @@ -0,0 +1,166 @@ +# Context R-CNN configuration for Snapshot Serengeti Dataset, with sequence +# example input data with context_features. +# This model uses attention into contextual features within the Faster R-CNN +# object detection framework to improve object detection performance. +# See https://arxiv.org/abs/1912.03538 for more information. +# Search for "PATH_TO_BE_CONFIGURED" to find the fields that should be +# configured. + +# This config is TPU compatible. + +model { + faster_rcnn { + num_classes: 48 + image_resizer { + fixed_shape_resizer { + height: 640 + width: 640 + } + } + feature_extractor { + type: "faster_rcnn_resnet101" + first_stage_features_stride: 16 + batch_norm_trainable: true + } + first_stage_anchor_generator { + grid_anchor_generator { + height_stride: 16 + width_stride: 16 + scales: 0.25 + scales: 0.5 + scales: 1.0 + scales: 2.0 + aspect_ratios: 0.5 + aspect_ratios: 1.0 + aspect_ratios: 2.0 + } + } + first_stage_box_predictor_conv_hyperparams { + op: CONV + regularizer { + l2_regularizer { + weight: 0.0 + } + } + initializer { + truncated_normal_initializer { + stddev: 0.00999999977648 + } + } + } + first_stage_nms_score_threshold: 0.0 + first_stage_nms_iou_threshold: 0.699999988079 + first_stage_max_proposals: 300 + first_stage_localization_loss_weight: 2.0 + first_stage_objectness_loss_weight: 1.0 + initial_crop_size: 14 + maxpool_kernel_size: 2 + maxpool_stride: 2 + second_stage_box_predictor { + mask_rcnn_box_predictor { + fc_hyperparams { + op: FC + regularizer { + l2_regularizer { + weight: 0.0 + } + } + initializer { + variance_scaling_initializer { + factor: 1.0 + uniform: true + mode: FAN_AVG + } + } + } + use_dropout: false + dropout_keep_probability: 1.0 + share_box_across_classes: true + } + } + second_stage_post_processing { + batch_non_max_suppression { + score_threshold: 0.0 + iou_threshold: 0.600000023842 + max_detections_per_class: 100 + max_total_detections: 300 + } + score_converter: SOFTMAX + } + second_stage_localization_loss_weight: 2.0 + second_stage_classification_loss_weight: 1.0 + use_matmul_crop_and_resize: true + clip_anchors_to_image: true + use_matmul_gather_in_matcher: true + use_static_balanced_label_sampler: true + use_static_shapes: true + context_config { + max_num_context_features: 2000 + context_feature_length: 2057 + } + } +} +train_config { + batch_size: 64 + data_augmentation_options { + random_horizontal_flip { + } + } + sync_replicas: true + optimizer { + momentum_optimizer { + learning_rate { + manual_step_learning_rate { + initial_learning_rate: 0.0 + schedule { + step: 2000 + learning_rate: 0.00200000009499 + } + schedule { + step: 200000 + learning_rate: 0.000199999994948 + } + schedule { + step: 300000 + learning_rate: 1.99999994948e-05 + } + warmup: true + } + } + momentum_optimizer_value: 0.899999976158 + } + use_moving_average: false + } + gradient_clipping_by_norm: 10.0 + fine_tune_checkpoint: "PATH_TO_BE_CONFIGURED/faster_rcnn_resnet101_coco_2018_08_14/model.ckpt" + from_detection_checkpoint: true + num_steps: 500000 + replicas_to_aggregate: 8 + max_number_of_boxes: 100 + unpad_groundtruth_tensors: false + use_bfloat16: true +} +train_input_reader { + label_map_path: "PATH_TO_BE_CONFIGURED/ss_label_map.pbtxt" + tf_record_input_reader { + input_path: "PATH_TO_BE_CONFIGURED/snapshot_serengeti_train-?????-of-?????" + } + load_context_features: true + input_type: TF_SEQUENCE_EXAMPLE +} +eval_config { + max_evals: 50 + metrics_set: "coco_detection_metrics" + use_moving_averages: false + batch_size: 4 +} +eval_input_reader { + label_map_path: "PATH_TO_BE_CONFIGURED/ss_label_map.pbtxt" + shuffle: false + num_epochs: 1 + tf_record_input_reader { + input_path: "PATH_TO_BE_CONFIGURED/snapshot_serengeti_val-?????-of-?????" + } + load_context_features: true + input_type: TF_SEQUENCE_EXAMPLE +} -- GitLab From 7647fbcfb29632248f3f3269c95771c3915ced32 Mon Sep 17 00:00:00 2001 From: Tianqi Liu Date: Thu, 18 Jun 2020 17:06:18 -0700 Subject: [PATCH 27/79] Internal change PiperOrigin-RevId: 317211727 --- official/nlp/data/classifier_data_lib.py | 188 ++++++++++++++++---- official/nlp/data/create_finetuning_data.py | 14 +- 2 files changed, 161 insertions(+), 41 deletions(-) diff --git a/official/nlp/data/classifier_data_lib.py b/official/nlp/data/classifier_data_lib.py index ce17edc1f..a3954d1b6 100644 --- a/official/nlp/data/classifier_data_lib.py +++ b/official/nlp/data/classifier_data_lib.py @@ -191,12 +191,68 @@ class XnliProcessor(DataProcessor): return "XNLI" -class PawsxProcessor(DataProcessor): - """Processor for the PAWS-X data set.""" +class XtremeXnliProcessor(DataProcessor): + """Processor for the XTREME XNLI data set.""" supported_languages = [ - "de", "en", "es", "fr", "ja", "ko", "zh" + "ar", "bg", "de", "el", "en", "es", "fr", "hi", "ru", "sw", "th", "tr", + "ur", "vi", "zh" ] + def get_train_examples(self, data_dir): + """See base class.""" + lines = self._read_tsv(os.path.join(data_dir, "train-en.tsv")) + + examples = [] + for (i, line) in enumerate(lines): + guid = "train-%d" % i + text_a = self.process_text_fn(line[0]) + text_b = self.process_text_fn(line[1]) + label = self.process_text_fn(line[2]) + examples.append( + InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) + return examples + + def get_dev_examples(self, data_dir): + """See base class.""" + lines = self._read_tsv(os.path.join(data_dir, "dev-en.tsv")) + examples = [] + for (i, line) in enumerate(lines): + guid = "dev-%d" % i + text_a = self.process_text_fn(line[0]) + text_b = self.process_text_fn(line[1]) + label = self.process_text_fn(line[2]) + examples.append( + InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) + return examples + + def get_test_examples(self, data_dir): + """See base class.""" + examples_by_lang = {k: [] for k in self.supported_languages} + for lang in self.supported_languages: + lines = self._read_tsv(os.path.join(data_dir, f"test-{lang}.tsv")) + for (i, line) in enumerate(lines): + guid = f"test-{i}" + text_a = self.process_text_fn(line[0]) + text_b = self.process_text_fn(line[1]) + label = self.process_text_fn(line[2]) + examples_by_lang[lang].append( + InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) + return examples_by_lang + + def get_labels(self): + """See base class.""" + return ["contradiction", "entailment", "neutral"] + + @staticmethod + def get_processor_name(): + """See base class.""" + return "XTREME-XNLI" + + +class PawsxProcessor(DataProcessor): + """Processor for the PAWS-X data set.""" + supported_languages = ["de", "en", "es", "fr", "ja", "ko", "zh"] + def __init__(self, language="en", process_text_fn=tokenization.convert_to_unicode): @@ -219,8 +275,7 @@ class PawsxProcessor(DataProcessor): train_tsv = "translated_train.tsv" # Skips the header. lines.extend( - self._read_tsv( - os.path.join(data_dir, language, train_tsv))[1:]) + self._read_tsv(os.path.join(data_dir, language, train_tsv))[1:]) examples = [] for (i, line) in enumerate(lines): @@ -235,34 +290,30 @@ class PawsxProcessor(DataProcessor): def get_dev_examples(self, data_dir): """See base class.""" lines = [] - for language in PawsxProcessor.supported_languages: - # Skips the header. - lines.extend( - self._read_tsv(os.path.join(data_dir, language, "dev_2k.tsv"))[1:]) + for lang in PawsxProcessor.supported_languages: + lines.extend(self._read_tsv(os.path.join(data_dir, f"dev-{lang}.tsv"))) examples = [] for (i, line) in enumerate(lines): guid = "dev-%d" % i - text_a = self.process_text_fn(line[1]) - text_b = self.process_text_fn(line[2]) - label = self.process_text_fn(line[3]) + text_a = self.process_text_fn(line[0]) + text_b = self.process_text_fn(line[1]) + label = self.process_text_fn(line[2]) examples.append( InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) return examples def get_test_examples(self, data_dir): """See base class.""" - examples_by_lang = {k: [] for k in PawsxProcessor.supported_languages} - for language in PawsxProcessor.supported_languages: - lines = self._read_tsv(os.path.join(data_dir, language, "test_2k.tsv")) + examples_by_lang = {k: [] for k in self.supported_languages} + for lang in self.supported_languages: + lines = self._read_tsv(os.path.join(data_dir, f"test-{lang}.tsv")) for (i, line) in enumerate(lines): - if i == 0: - continue guid = "test-%d" % i - text_a = self.process_text_fn(line[1]) - text_b = self.process_text_fn(line[2]) - label = self.process_text_fn(line[3]) - examples_by_lang[language].append( + text_a = self.process_text_fn(line[0]) + text_b = self.process_text_fn(line[1]) + label = self.process_text_fn(line[2]) + examples_by_lang[lang].append( InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) return examples_by_lang @@ -273,7 +324,62 @@ class PawsxProcessor(DataProcessor): @staticmethod def get_processor_name(): """See base class.""" - return "PAWS-X" + return "XTREME-PAWS-X" + + +class XtremePawsxProcessor(DataProcessor): + """Processor for the XTREME PAWS-X data set.""" + supported_languages = ["de", "en", "es", "fr", "ja", "ko", "zh"] + + def get_train_examples(self, data_dir): + """See base class.""" + lines = self._read_tsv(os.path.join(data_dir, "train-en.tsv")) + examples = [] + for (i, line) in enumerate(lines): + guid = "train-%d" % i + text_a = self.process_text_fn(line[0]) + text_b = self.process_text_fn(line[1]) + label = self.process_text_fn(line[2]) + examples.append( + InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) + return examples + + def get_dev_examples(self, data_dir): + """See base class.""" + lines = self._read_tsv(os.path.join(data_dir, "dev_en.tsv")) + + examples = [] + for (i, line) in enumerate(lines): + guid = "dev-%d" % i + text_a = self.process_text_fn(line[0]) + text_b = self.process_text_fn(line[1]) + label = self.process_text_fn(line[2]) + examples.append( + InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) + return examples + + def get_test_examples(self, data_dir): + """See base class.""" + examples_by_lang = {k: [] for k in self.supported_languages} + for lang in self.supported_languages: + lines = self._read_tsv(os.path.join(data_dir, f"test-{lang}.tsv")) + for (i, line) in enumerate(lines): + guid = "test-%d" % i + text_a = self.process_text_fn(line[0]) + text_b = self.process_text_fn(line[1]) + label = self.process_text_fn(line[2]) + examples_by_lang[lang].append( + InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) + return examples_by_lang + + def get_labels(self): + """See base class.""" + return ["0", "1"] + + @staticmethod + def get_processor_name(): + """See base class.""" + return "XTREME-PAWS-X" class MnliProcessor(DataProcessor): @@ -407,8 +513,8 @@ class QqpProcessor(DataProcessor): label = line[5] except IndexError: continue - examples.append(InputExample(guid=guid, text_a=text_a, text_b=text_b, - label=label)) + examples.append( + InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) return examples @@ -583,15 +689,16 @@ class TfdsProcessor(DataProcessor): is_regression: Whether the task is a regression problem (defaults to False). """ - def __init__(self, tfds_params, + def __init__(self, + tfds_params, process_text_fn=tokenization.convert_to_unicode): super(TfdsProcessor, self).__init__(process_text_fn) self._process_tfds_params_str(tfds_params) if self.module_import: importlib.import_module(self.module_import) - self.dataset, info = tfds.load(self.dataset_name, data_dir=self.data_dir, - with_info=True) + self.dataset, info = tfds.load( + self.dataset_name, data_dir=self.data_dir, with_info=True) if self.is_regression: self._labels = None else: @@ -660,8 +767,12 @@ class TfdsProcessor(DataProcessor): if self.weight_key: weight = float(example[self.weight_key]) examples.append( - InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label, - weight=weight)) + InputExample( + guid=guid, + text_a=text_a, + text_b=text_b, + label=label, + weight=weight)) return examples @@ -761,9 +872,12 @@ def convert_single_example(ex_index, example, label_list, max_seq_length, return feature -def file_based_convert_examples_to_features(examples, label_list, - max_seq_length, tokenizer, - output_file, label_type=None): +def file_based_convert_examples_to_features(examples, + label_list, + max_seq_length, + tokenizer, + output_file, + label_type=None): """Convert a set of `InputExample`s to a TFRecord file.""" tf.io.gfile.makedirs(os.path.dirname(output_file)) @@ -779,6 +893,7 @@ def file_based_convert_examples_to_features(examples, label_list, def create_int_feature(values): f = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values))) return f + def create_float_feature(values): f = tf.train.Feature(float_list=tf.train.FloatList(value=list(values))) return f @@ -857,8 +972,7 @@ def generate_tf_record_from_data_file(processor, train_input_data_examples = processor.get_train_examples(data_dir) file_based_convert_examples_to_features(train_input_data_examples, label_list, max_seq_length, tokenizer, - train_data_output_path, - label_type) + train_data_output_path, label_type) num_training_data = len(train_input_data_examples) if eval_data_output_path: @@ -873,10 +987,8 @@ def generate_tf_record_from_data_file(processor, if isinstance(test_input_data_examples, dict): for language, examples in test_input_data_examples.items(): file_based_convert_examples_to_features( - examples, - label_list, max_seq_length, - tokenizer, test_data_output_path.format(language), - label_type) + examples, label_list, max_seq_length, tokenizer, + test_data_output_path.format(language), label_type) else: file_based_convert_examples_to_features(test_input_data_examples, label_list, max_seq_length, diff --git a/official/nlp/data/create_finetuning_data.py b/official/nlp/data/create_finetuning_data.py index 256c1dee0..54a07732b 100644 --- a/official/nlp/data/create_finetuning_data.py +++ b/official/nlp/data/create_finetuning_data.py @@ -48,8 +48,12 @@ flags.DEFINE_string( flags.DEFINE_enum("classification_task_name", "MNLI", ["COLA", "MNLI", "MRPC", "QNLI", "QQP", "SST-2", "XNLI", - "PAWS-X"], - "The name of the task to train BERT classifier.") + "PAWS-X", "XTREME-XNLI", "XTREME-PAWS-X"], + "The name of the task to train BERT classifier. The " + "difference between XTREME-XNLI and XNLI is: 1. the format " + "of input tsv files; 2. the dev set for XTREME is english " + "only and for XNLI is all languages combined. Same for " + "PAWS-X.") # XNLI task specific flag. flags.DEFINE_string( @@ -176,7 +180,11 @@ def generate_classifier_dataset(): language=FLAGS.xnli_language), "paws-x": functools.partial(classifier_data_lib.PawsxProcessor, - language=FLAGS.pawsx_language) + language=FLAGS.pawsx_language), + "xtreme-xnli": + functools.partial(classifier_data_lib.XtremeXnliProcessor), + "xtreme-paws-x": + functools.partial(classifier_data_lib.XtremePawsxProcessor) } task_name = FLAGS.classification_task_name.lower() if task_name not in processors: -- GitLab From 8284ea208468cec3f97088bd5bdfc8376356eb34 Mon Sep 17 00:00:00 2001 From: Jaeyoun Kim Date: Thu, 18 Jun 2020 17:14:09 -0700 Subject: [PATCH 28/79] Update README.md Add Context R-CNN --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 3ff84454a..d4eab2a22 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,7 @@ The TensorFlow Model Garden is a repository with a number of different implement | Date | News | |------|------| +| June 17, 2020 | [Context R-CNN: Long Term Temporal Context for Per-Camera Object Detection](https://github.com/tensorflow/models/tree/master/research/object_detection#june-17th-2020) released | May 21, 2020 | [Unifying Deep Local and Global Features for Image Search (DELG)](https://github.com/tensorflow/models/tree/master/research/delf#delg) code released | May 19, 2020 | [MobileDets: Searching for Object Detection Architectures for Mobile Accelerators](https://github.com/tensorflow/models/tree/master/research/object_detection#may-19th-2020) released | May 7, 2020 | [MnasFPN with MobileNet-V2 backbone](https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/detection_model_zoo.md#mobile-models) released for object detection -- GitLab From c8f9cf196aa7a9c9554e6005782fd3d71f8d80e4 Mon Sep 17 00:00:00 2001 From: Chen Chen Date: Thu, 18 Jun 2020 22:55:27 -0700 Subject: [PATCH 29/79] Support multiple prediction files for SQuAD task. PiperOrigin-RevId: 317253522 --- official/nlp/bert/run_squad_helper.py | 113 ++++++++++++++++++-------- 1 file changed, 80 insertions(+), 33 deletions(-) diff --git a/official/nlp/bert/run_squad_helper.py b/official/nlp/bert/run_squad_helper.py index 7f6ea5bbb..b03e356d9 100644 --- a/official/nlp/bert/run_squad_helper.py +++ b/official/nlp/bert/run_squad_helper.py @@ -61,7 +61,11 @@ def define_common_squad_flags(): flags.DEFINE_integer('train_batch_size', 32, 'Total batch size for training.') # Predict processing related. flags.DEFINE_string('predict_file', None, - 'Prediction data path with train tfrecords.') + 'SQuAD prediction json file path. ' + '`predict` mode supports multiple files: one can use ' + 'wildcard to specify multiple files and it can also be ' + 'multiple file patterns separated by comma. Note that ' + '`eval` mode only supports a single predict file.') flags.DEFINE_bool( 'do_lower_case', True, 'Whether to lower case the input text. Should be True for uncased ' @@ -159,22 +163,9 @@ def get_dataset_fn(input_file_pattern, max_seq_length, global_batch_size, return _dataset_fn -def predict_squad_customized(strategy, - input_meta_data, - bert_config, - checkpoint_path, - predict_tfrecord_path, - num_steps): - """Make predictions using a Bert-based squad model.""" - predict_dataset_fn = get_dataset_fn( - predict_tfrecord_path, - input_meta_data['max_seq_length'], - FLAGS.predict_batch_size, - is_training=False) - predict_iterator = iter( - strategy.experimental_distribute_datasets_from_function( - predict_dataset_fn)) - +def get_squad_model_to_predict(strategy, bert_config, checkpoint_path, + input_meta_data): + """Gets a squad model to make predictions.""" with strategy.scope(): # Prediction always uses float32, even if training uses mixed precision. tf.keras.mixed_precision.experimental.set_policy('float32') @@ -188,6 +179,23 @@ def predict_squad_customized(strategy, logging.info('Restoring checkpoints from %s', checkpoint_path) checkpoint = tf.train.Checkpoint(model=squad_model) checkpoint.restore(checkpoint_path).expect_partial() + return squad_model + + +def predict_squad_customized(strategy, + input_meta_data, + predict_tfrecord_path, + num_steps, + squad_model): + """Make predictions using a Bert-based squad model.""" + predict_dataset_fn = get_dataset_fn( + predict_tfrecord_path, + input_meta_data['max_seq_length'], + FLAGS.predict_batch_size, + is_training=False) + predict_iterator = iter( + strategy.experimental_distribute_datasets_from_function( + predict_dataset_fn)) @tf.function def predict_step(iterator): @@ -287,8 +295,8 @@ def train_squad(strategy, post_allreduce_callbacks=[clip_by_global_norm_callback]) -def prediction_output_squad( - strategy, input_meta_data, tokenizer, bert_config, squad_lib, checkpoint): +def prediction_output_squad(strategy, input_meta_data, tokenizer, squad_lib, + predict_file, squad_model): """Makes predictions for a squad dataset.""" doc_stride = input_meta_data['doc_stride'] max_query_length = input_meta_data['max_query_length'] @@ -296,7 +304,7 @@ def prediction_output_squad( version_2_with_negative = input_meta_data.get('version_2_with_negative', False) eval_examples = squad_lib.read_squad_examples( - input_file=FLAGS.predict_file, + input_file=predict_file, is_training=False, version_2_with_negative=version_2_with_negative) @@ -337,8 +345,7 @@ def prediction_output_squad( num_steps = int(dataset_size / FLAGS.predict_batch_size) all_results = predict_squad_customized( - strategy, input_meta_data, bert_config, - checkpoint, eval_writer.filename, num_steps) + strategy, input_meta_data, eval_writer.filename, num_steps, squad_model) all_predictions, all_nbest_json, scores_diff_json = ( squad_lib.postprocess_output( @@ -356,11 +363,14 @@ def prediction_output_squad( def dump_to_files(all_predictions, all_nbest_json, scores_diff_json, - squad_lib, version_2_with_negative): + squad_lib, version_2_with_negative, file_prefix=''): """Save output to json files.""" - output_prediction_file = os.path.join(FLAGS.model_dir, 'predictions.json') - output_nbest_file = os.path.join(FLAGS.model_dir, 'nbest_predictions.json') - output_null_log_odds_file = os.path.join(FLAGS.model_dir, 'null_odds.json') + output_prediction_file = os.path.join(FLAGS.model_dir, + '%spredictions.json' % file_prefix) + output_nbest_file = os.path.join(FLAGS.model_dir, + '%snbest_predictions.json' % file_prefix) + output_null_log_odds_file = os.path.join(FLAGS.model_dir, file_prefix, + '%snull_odds.json' % file_prefix) logging.info('Writing predictions to: %s', (output_prediction_file)) logging.info('Writing nbest to: %s', (output_nbest_file)) @@ -370,6 +380,22 @@ def dump_to_files(all_predictions, all_nbest_json, scores_diff_json, squad_lib.write_to_json_files(scores_diff_json, output_null_log_odds_file) +def _get_matched_files(input_path): + """Returns all files that matches the input_path.""" + input_patterns = input_path.strip().split(',') + all_matched_files = [] + for input_pattern in input_patterns: + input_pattern = input_pattern.strip() + if not input_pattern: + continue + matched_files = tf.io.gfile.glob(input_pattern) + if not matched_files: + raise ValueError('%s does not match any files.' % input_pattern) + else: + all_matched_files.extend(matched_files) + return sorted(all_matched_files) + + def predict_squad(strategy, input_meta_data, tokenizer, @@ -379,11 +405,24 @@ def predict_squad(strategy, """Get prediction results and evaluate them to hard drive.""" if init_checkpoint is None: init_checkpoint = tf.train.latest_checkpoint(FLAGS.model_dir) - all_predictions, all_nbest_json, scores_diff_json = prediction_output_squad( - strategy, input_meta_data, tokenizer, - bert_config, squad_lib, init_checkpoint) - dump_to_files(all_predictions, all_nbest_json, scores_diff_json, squad_lib, - input_meta_data.get('version_2_with_negative', False)) + + all_predict_files = _get_matched_files(FLAGS.predict_file) + squad_model = get_squad_model_to_predict(strategy, bert_config, + init_checkpoint, input_meta_data) + for idx, predict_file in enumerate(all_predict_files): + all_predictions, all_nbest_json, scores_diff_json = prediction_output_squad( + strategy, input_meta_data, tokenizer, squad_lib, predict_file, + squad_model) + if len(all_predict_files) == 1: + file_prefix = '' + else: + # if predict_file is /path/xquad.ar.json, the `file_prefix` may be + # "xquad.ar-0-" + file_prefix = '%s-' % os.path.splitext( + os.path.basename(all_predict_files[idx]))[0] + dump_to_files(all_predictions, all_nbest_json, scores_diff_json, squad_lib, + input_meta_data.get('version_2_with_negative', False), + file_prefix) def eval_squad(strategy, @@ -395,9 +434,17 @@ def eval_squad(strategy, """Get prediction results and evaluate them against ground truth.""" if init_checkpoint is None: init_checkpoint = tf.train.latest_checkpoint(FLAGS.model_dir) + + all_predict_files = _get_matched_files(FLAGS.predict_file) + if len(all_predict_files) != 1: + raise ValueError('`eval_squad` only supports one predict file, ' + 'but got %s' % all_predict_files) + + squad_model = get_squad_model_to_predict(strategy, bert_config, + init_checkpoint, input_meta_data) all_predictions, all_nbest_json, scores_diff_json = prediction_output_squad( - strategy, input_meta_data, tokenizer, - bert_config, squad_lib, init_checkpoint) + strategy, input_meta_data, tokenizer, squad_lib, all_predict_files[0], + squad_model) dump_to_files(all_predictions, all_nbest_json, scores_diff_json, squad_lib, input_meta_data.get('version_2_with_negative', False)) -- GitLab From 7d210ec009fa596ef69d65350415a05d95268785 Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Fri, 19 Jun 2020 00:44:43 -0700 Subject: [PATCH 30/79] Changing default dtype of Object detection to float32. PiperOrigin-RevId: 317263038 --- official/vision/detection/main.py | 31 ++++++++++++++++++++++++------- 1 file changed, 24 insertions(+), 7 deletions(-) diff --git a/official/vision/detection/main.py b/official/vision/detection/main.py index a4d01510f..542be3a1d 100644 --- a/official/vision/detection/main.py +++ b/official/vision/detection/main.py @@ -19,25 +19,28 @@ from __future__ import division # from __future__ import google_type_annotations from __future__ import print_function -from absl import app -from absl import flags -from absl import logging import functools -import os import pprint + +# pylint: disable=g-bad-import-order import tensorflow as tf +from absl import app +from absl import flags +from absl import logging +# pylint: enable=g-bad-import-order + from official.modeling.hyperparams import params_dict from official.modeling.training import distributed_executor as executor from official.utils import hyperparams_flags +from official.utils.flags import core as flags_core +from official.utils.misc import distribution_utils +from official.utils.misc import keras_utils from official.vision.detection.configs import factory as config_factory from official.vision.detection.dataloader import input_reader from official.vision.detection.dataloader import mode_keys as ModeKeys from official.vision.detection.executor.detection_executor import DetectionDistributedExecutor from official.vision.detection.modeling import factory as model_factory -from official.utils.flags import core as flags_core -from official.utils.misc import distribution_utils -from official.utils.misc import keras_utils hyperparams_flags.initialize_common_flags() flags_core.define_log_steps() @@ -194,6 +197,20 @@ def run(callbacks=None): 'strategy_config': executor.strategy_flags_dict(), }, is_strict=False) + + # Make sure use_tpu and strategy_type are in sync. + params.use_tpu = (params.strategy_type == 'tpu') + + if not params.use_tpu: + params.override({ + 'architecture': { + 'use_bfloat16': False, + }, + 'norm_activation': { + 'use_sync_bn': False, + }, + }, is_strict=True) + params.validate() params.lock() pp = pprint.PrettyPrinter() -- GitLab From 58e805e093620e04168c2e07c92fc60e1e87d553 Mon Sep 17 00:00:00 2001 From: Hongkun Yu Date: Fri, 19 Jun 2020 10:18:50 -0700 Subject: [PATCH 31/79] Move TransformerDecoderLayer to modeling/ PiperOrigin-RevId: 317330705 --- official/nlp/modeling/layers/README.md | 6 +- official/nlp/modeling/layers/__init__.py | 1 + .../layers}/multi_channel_attention.py | 45 ++++-- .../layers}/multi_channel_attention_test.py | 5 +- official/nlp/modeling/layers/transformer.py | 143 +++++++++++++++++ .../nlp/modeling/layers/transformer_test.py | 36 +++++ official/nlp/nhnet/decoder.py | 145 +----------------- official/nlp/nhnet/decoder_test.py | 31 ---- official/nlp/nhnet/models.py | 4 +- 9 files changed, 224 insertions(+), 192 deletions(-) rename official/nlp/{nhnet => modeling/layers}/multi_channel_attention.py (80%) rename official/nlp/{nhnet => modeling/layers}/multi_channel_attention_test.py (92%) diff --git a/official/nlp/modeling/layers/README.md b/official/nlp/modeling/layers/README.md index 212aee22b..e78818732 100644 --- a/official/nlp/modeling/layers/README.md +++ b/official/nlp/modeling/layers/README.md @@ -9,13 +9,17 @@ assemble new layers, networks, or models. initialization parameters. * [MultiHeadAttention](attention.py) implements an optionally masked attention - between two tensors, from_tensor and to_tensor, as described in + between query, key, value tensors as described in ["Attention Is All You Need"](https://arxiv.org/abs/1706.03762). If `from_tensor` and `to_tensor` are the same, then this is self-attention. * [CachedAttention](attention.py) implements an attention layer with cache used for auto-agressive decoding. +* [MultiChannelAttention](multi_channel_attention.py) implements an variant of + multi-head attention which can be used to merge multiple streams for + cross-attentions. + * [TalkingHeadsAttention](talking_heads_attention.py) implements the talking heads attention, as decribed in ["Talking-Heads Attention"](https://arxiv.org/abs/2003.02436). diff --git a/official/nlp/modeling/layers/__init__.py b/official/nlp/modeling/layers/__init__.py index 9c89b0b17..08b6d596a 100644 --- a/official/nlp/modeling/layers/__init__.py +++ b/official/nlp/modeling/layers/__init__.py @@ -20,6 +20,7 @@ from official.nlp.modeling.layers.dense_einsum import DenseEinsum from official.nlp.modeling.layers.gated_feedforward import GatedFeedforward from official.nlp.modeling.layers.masked_lm import MaskedLM from official.nlp.modeling.layers.masked_softmax import MaskedSoftmax +from official.nlp.modeling.layers.multi_channel_attention import * from official.nlp.modeling.layers.on_device_embedding import OnDeviceEmbedding from official.nlp.modeling.layers.position_embedding import PositionEmbedding from official.nlp.modeling.layers.rezero_transformer import ReZeroTransformer diff --git a/official/nlp/nhnet/multi_channel_attention.py b/official/nlp/modeling/layers/multi_channel_attention.py similarity index 80% rename from official/nlp/nhnet/multi_channel_attention.py rename to official/nlp/modeling/layers/multi_channel_attention.py index ce97876d9..499d977c7 100644 --- a/official/nlp/nhnet/multi_channel_attention.py +++ b/official/nlp/modeling/layers/multi_channel_attention.py @@ -13,7 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== -"""Multi-channel decoder.""" +"""Multi-channel Attention.""" +# pylint: disable=g-classes-have-attributes from __future__ import absolute_import from __future__ import division @@ -24,11 +25,25 @@ import math import tensorflow as tf from official.modeling import tf_utils -from official.nlp.modeling import layers - - -class DocAttention(tf.keras.layers.Layer): - """Documents Attention layer.""" +from official.nlp.modeling.layers import attention +from official.nlp.modeling.layers import dense_einsum +from official.nlp.modeling.layers import masked_softmax + + +class VotingAttention(tf.keras.layers.Layer): + """Voting Attention layer. + + Arguments: + num_heads: the number of attention heads. + head_size: per-head hidden size. + kernel_initializer: Initializer for dense layer kernels. + bias_initializer: Initializer for dense layer biases. + kernel_regularizer: Regularizer for dense layer kernels. + bias_regularizer: Regularizer for dense layer biases. + activity_regularizer: Regularizer for dense layer activity. + kernel_constraint: Constraint for dense layer kernels. + bias_constraint: Constraint for dense layer kernels. + """ def __init__(self, num_heads, @@ -41,7 +56,7 @@ class DocAttention(tf.keras.layers.Layer): kernel_constraint=None, bias_constraint=None, **kwargs): - super(DocAttention, self).__init__(**kwargs) + super(VotingAttention, self).__init__(**kwargs) self._num_heads = num_heads self._head_size = head_size self._kernel_initializer = tf.keras.initializers.get(kernel_initializer) @@ -52,7 +67,7 @@ class DocAttention(tf.keras.layers.Layer): self._bias_constraint = tf.keras.constraints.get(bias_constraint) def build(self, unused_input_shapes): - self._query_dense = layers.DenseEinsum( + self._query_dense = dense_einsum.DenseEinsum( output_shape=(self._num_heads, self._head_size), kernel_initializer=self._kernel_initializer, bias_initializer=self._bias_initializer, @@ -63,7 +78,7 @@ class DocAttention(tf.keras.layers.Layer): bias_constraint=self._bias_constraint, dtype=self.dtype, name="encdocatt_query") - self._key_dense = layers.DenseEinsum( + self._key_dense = dense_einsum.DenseEinsum( output_shape=(self._num_heads, self._head_size), kernel_initializer=self._kernel_initializer, bias_initializer=self._bias_initializer, @@ -74,7 +89,7 @@ class DocAttention(tf.keras.layers.Layer): bias_constraint=self._bias_constraint, dtype=self.dtype, name="encdocatt_key") - super(DocAttention, self).build(unused_input_shapes) + super(VotingAttention, self).build(unused_input_shapes) def call(self, encoder_outputs, doc_attention_mask): num_docs = tf_utils.get_shape_list(encoder_outputs, expected_rank=[4])[1] @@ -95,12 +110,16 @@ class DocAttention(tf.keras.layers.Layer): return tf.nn.softmax(doc_attention_probs + infadder) -class MultiChannelAttention(layers.MultiHeadAttention): - """Multi-channel Attention layer.""" +class MultiChannelAttention(attention.MultiHeadAttention): + """Multi-channel Attention layer. + + Introduced in: https://arxiv.org/abs/2001.09386. Expects multiple + cross-attention target sequences. + """ def build(self, input_shape): super(MultiChannelAttention, self).build(input_shape) - self._masked_softmax = layers.MaskedSoftmax(mask_expansion_axes=[2]) + self._masked_softmax = masked_softmax.MaskedSoftmax(mask_expansion_axes=[2]) def call(self, inputs, attention_mask=None): from_tensor = inputs[0] diff --git a/official/nlp/nhnet/multi_channel_attention_test.py b/official/nlp/modeling/layers/multi_channel_attention_test.py similarity index 92% rename from official/nlp/nhnet/multi_channel_attention_test.py rename to official/nlp/modeling/layers/multi_channel_attention_test.py index 76f556fd1..ab6e0e7fe 100644 --- a/official/nlp/nhnet/multi_channel_attention_test.py +++ b/official/nlp/modeling/layers/multi_channel_attention_test.py @@ -22,14 +22,15 @@ from __future__ import print_function import numpy as np import tensorflow as tf -from official.nlp.nhnet import multi_channel_attention +from official.nlp.modeling.layers import multi_channel_attention class MultiChannelAttentionTest(tf.test.TestCase): def test_doc_attention(self): num_heads = 2 - doc_attention = multi_channel_attention.DocAttention(num_heads, head_size=8) + doc_attention = multi_channel_attention.VotingAttention( + num_heads, head_size=8) num_docs = 3 inputs = np.zeros((2, num_docs, 10, 16), dtype=np.float32) doc_mask = np.zeros((2, num_docs), dtype=np.float32) diff --git a/official/nlp/modeling/layers/transformer.py b/official/nlp/modeling/layers/transformer.py index 856c977ae..104ebaef0 100644 --- a/official/nlp/modeling/layers/transformer.py +++ b/official/nlp/modeling/layers/transformer.py @@ -24,6 +24,7 @@ import tensorflow as tf from official.nlp.modeling.layers import attention from official.nlp.modeling.layers import dense_einsum +from official.nlp.modeling.layers import multi_channel_attention from official.nlp.modeling.layers.util import tf_function_if_eager @@ -236,3 +237,145 @@ class CompiledTransformer(Transformer): @tf_function_if_eager(experimental_compile=True) def call(self, inputs): return super(CompiledTransformer, self).call(inputs) + + +@tf.keras.utils.register_keras_serializable(package="Text") +class TransformerDecoderLayer(tf.keras.layers.Layer): + """Single transformer layer for decoder. + + It has three sub-layers: + (1) a multi-head self-attention mechanism. + (2) a encoder-decoder attention. + (3) a positionwise fully connected feed-forward network. + """ + + def __init__(self, + hidden_size=768, + num_attention_heads=12, + intermediate_size=3072, + intermediate_activation="relu", + hidden_dropout_prob=0.0, + attention_probs_dropout_prob=0.0, + initializer_range=0.02, + multi_channel_cross_attention=False, + **kwargs): + super(TransformerDecoderLayer, self).__init__(**kwargs) + self.hidden_size = hidden_size + self.num_attention_heads = num_attention_heads + self.intermediate_size = intermediate_size + self.intermediate_activation = tf.keras.activations.get( + intermediate_activation) + self.hidden_dropout_prob = hidden_dropout_prob + self.attention_probs_dropout_prob = attention_probs_dropout_prob + self.multi_channel_cross_attention = multi_channel_cross_attention + self._kernel_initializer = tf.keras.initializers.TruncatedNormal( + stddev=initializer_range) + self._bias_initializer = tf.keras.initializers.get("zeros") + if self.multi_channel_cross_attention: + self._cross_attention_cls = multi_channel_attention.MultiChannelAttention + else: + self._cross_attention_cls = attention.MultiHeadAttention + + if self.hidden_size % self.num_attention_heads != 0: + raise ValueError( + "The hidden size (%d) is not a multiple of the number of attention " + "heads (%d)" % (self.hidden_size, self.num_attention_heads)) + self.attention_head_size = int(self.hidden_size / self.num_attention_heads) + + def build(self, input_shape): + # Self attention. + self.self_attention = attention.CachedAttention( + num_heads=self.num_attention_heads, + key_size=self.attention_head_size, + dropout=self.attention_probs_dropout_prob, + kernel_initializer=self._kernel_initializer, + name="self_attention") + self.self_attention_output_dense = dense_einsum.DenseEinsum( + output_shape=self.hidden_size, + num_summed_dimensions=2, + kernel_initializer=self._kernel_initializer, + bias_initializer=self._bias_initializer, + name="self_attention_output") + self.self_attention_dropout = tf.keras.layers.Dropout( + rate=self.hidden_dropout_prob) + self.self_attention_layer_norm = ( + tf.keras.layers.LayerNormalization( + name="self_attention_layer_norm", axis=-1, epsilon=1e-12)) + # Encoder-decoder attention. + self.encdec_attention = self._cross_attention_cls( + num_heads=self.num_attention_heads, + key_size=self.attention_head_size, + dropout=self.attention_probs_dropout_prob, + output_shape=self.hidden_size, + kernel_initializer=self._kernel_initializer, + name="attention/encdec") + + self.encdec_attention_dropout = tf.keras.layers.Dropout( + rate=self.hidden_dropout_prob) + self.encdec_attention_layer_norm = ( + tf.keras.layers.LayerNormalization( + name="attention/encdec_output_layer_norm", axis=-1, epsilon=1e-12)) + + # Feed-forward projection. + self.intermediate_dense = dense_einsum.DenseEinsum( + output_shape=self.intermediate_size, + activation=None, + kernel_initializer=self._kernel_initializer, + bias_initializer=self._bias_initializer, + name="intermediate") + self.intermediate_activation_layer = tf.keras.layers.Activation( + self.intermediate_activation) + self.output_dense = dense_einsum.DenseEinsum( + output_shape=self.hidden_size, + kernel_initializer=self._kernel_initializer, + bias_initializer=self._bias_initializer, + name="output") + self.output_dropout = tf.keras.layers.Dropout(rate=self.hidden_dropout_prob) + self.output_layer_norm = tf.keras.layers.LayerNormalization( + name="output_layer_norm", axis=-1, epsilon=1e-12) + super(TransformerDecoderLayer, self).build(input_shape) + + def common_layers_with_encoder(self): + """Gets layer objects that can make a Transformer encoder block.""" + return [ + self.self_attention, self.self_attention_layer_norm, + self.intermediate_dense, self.output_dense, self.output_layer_norm + ] + + def call(self, inputs, cache=None, decode_loop_step=None): + if self.multi_channel_cross_attention: + if len(inputs) != 5: + raise ValueError( + "TransformerDecoderLayer must have 5 inputs, when it uses " + "multi_channel_cross_attention. But it got: %d" % len(inputs)) + elif len(inputs) != 4: + raise ValueError( + "TransformerDecoderLayer must have 4 inputs, but it got: %d" % + len(inputs)) + input_tensor, memory, attention_mask, self_attention_mask = inputs[:4] + self_attention_inputs = [input_tensor, input_tensor] + self_attention_output, cache = self.self_attention( + self_attention_inputs, + attention_mask=self_attention_mask, + cache=cache, + decode_loop_step=decode_loop_step) + self_attention_output = self.self_attention_dropout(self_attention_output) + self_attention_output = self.self_attention_layer_norm( + input_tensor + self_attention_output) + + cross_attn_inputs = [self_attention_output, memory] + if self.multi_channel_cross_attention: + # Accesses the 5-th input tensor for the doc-attention probabilities. + cross_attn_inputs.append(inputs[-1]) + attention_output = self.encdec_attention(cross_attn_inputs, attention_mask) + attention_output = self.encdec_attention_dropout(attention_output) + attention_output = self.encdec_attention_layer_norm(self_attention_output + + attention_output) + + intermediate_output = self.intermediate_dense(attention_output) + intermediate_output = self.intermediate_activation_layer( + intermediate_output) + layer_output = self.output_dense(intermediate_output) + layer_output = self.output_dropout(layer_output) + layer_output = self.output_layer_norm(layer_output + attention_output) + return layer_output, cache diff --git a/official/nlp/modeling/layers/transformer_test.py b/official/nlp/modeling/layers/transformer_test.py index 753ced3e7..1b494ac87 100644 --- a/official/nlp/modeling/layers/transformer_test.py +++ b/official/nlp/modeling/layers/transformer_test.py @@ -215,5 +215,41 @@ class TransformerLayerTest(keras_parameterized.TestCase): self.assertAllEqual([1, input_length, width], output_data.shape) +def _create_cache(batch_size, init_decode_length, num_heads, head_size): + return { + 'key': + tf.zeros([batch_size, init_decode_length, num_heads, head_size], + dtype=tf.float32), + 'value': + tf.zeros([batch_size, init_decode_length, num_heads, head_size], + dtype=tf.float32) + } + + +@keras_parameterized.run_all_keras_modes +class TransformerDecoderLayerTest(keras_parameterized.TestCase): + + def test_decoder_block_with_cache(self): + num_attention_heads = 2 + hidden_size = 16 + decoder_block = transformer.TransformerDecoderLayer( + hidden_size=hidden_size, + num_attention_heads=num_attention_heads, + intermediate_size=32, + intermediate_activation='relu', + hidden_dropout_prob=0.1, + attention_probs_dropout_prob=0.1, + initializer_range=0.1) + # Forward path. + dummy_tensor = tf.zeros([2, 4, 16], dtype=tf.float32) + dummy_mask = tf.zeros([2, 4, 4], dtype=tf.float32) + inputs = [dummy_tensor, dummy_tensor, dummy_mask, dummy_mask] + cache = _create_cache(2, 0, num_attention_heads, + hidden_size // num_attention_heads) + output, cache = decoder_block(inputs, cache) + self.assertEqual(output.shape, (2, 4, hidden_size)) + self.assertEqual(cache['value'].shape, (2, 4, 2, 8)) + + if __name__ == '__main__': tf.test.main() diff --git a/official/nlp/nhnet/decoder.py b/official/nlp/nhnet/decoder.py index 0e46baa5f..c9f676dba 100644 --- a/official/nlp/nhnet/decoder.py +++ b/official/nlp/nhnet/decoder.py @@ -22,151 +22,10 @@ from __future__ import print_function import tensorflow as tf from official.modeling import tf_utils from official.nlp.modeling import layers -from official.nlp.nhnet import multi_channel_attention +from official.nlp.modeling.layers import transformer from official.nlp.transformer import model_utils as transformer_utils -class TransformerDecoderBlock(tf.keras.layers.Layer): - """Single transformer layer for decoder. - - It has three sub-layers: - (1) a multi-head self-attention mechanism. - (2) a encoder-decoder attention. - (3) a positionwise fully connected feed-forward network. - """ - - def __init__(self, - hidden_size=768, - num_attention_heads=12, - intermediate_size=3072, - intermediate_activation="gelu", - hidden_dropout_prob=0.0, - attention_probs_dropout_prob=0.0, - initializer_range=0.02, - multi_channel_cross_attention=False, - **kwargs): - super(TransformerDecoderBlock, self).__init__(**kwargs) - self.hidden_size = hidden_size - self.num_attention_heads = num_attention_heads - self.intermediate_size = intermediate_size - self.intermediate_activation = tf_utils.get_activation( - intermediate_activation) - self.hidden_dropout_prob = hidden_dropout_prob - self.attention_probs_dropout_prob = attention_probs_dropout_prob - self.multi_channel_cross_attention = multi_channel_cross_attention - self._kernel_initializer = tf.keras.initializers.TruncatedNormal( - stddev=initializer_range) - self._bias_initializer = tf.keras.initializers.get("zeros") - if self.multi_channel_cross_attention: - self._cross_attention_cls = multi_channel_attention.MultiChannelAttention - else: - self._cross_attention_cls = layers.MultiHeadAttention - - if self.hidden_size % self.num_attention_heads != 0: - raise ValueError( - "The hidden size (%d) is not a multiple of the number of attention " - "heads (%d)" % (self.hidden_size, self.num_attention_heads)) - self.attention_head_size = int(self.hidden_size / self.num_attention_heads) - - def build(self, input_shape): - # Self attention. - self.self_attention = layers.CachedAttention( - num_heads=self.num_attention_heads, - key_size=self.attention_head_size, - dropout=self.attention_probs_dropout_prob, - kernel_initializer=self._kernel_initializer, - name="self_attention") - self.self_attention_output_dense = layers.DenseEinsum( - output_shape=self.hidden_size, - num_summed_dimensions=2, - kernel_initializer=self._kernel_initializer, - bias_initializer=self._bias_initializer, - name="self_attention_output") - self.self_attention_dropout = tf.keras.layers.Dropout( - rate=self.hidden_dropout_prob) - self.self_attention_layer_norm = ( - tf.keras.layers.LayerNormalization( - name="self_attention_layer_norm", axis=-1, epsilon=1e-12)) - # Encoder-decoder attention. - self.encdec_attention = self._cross_attention_cls( - num_heads=self.num_attention_heads, - key_size=self.attention_head_size, - dropout=self.attention_probs_dropout_prob, - output_shape=self.hidden_size, - kernel_initializer=self._kernel_initializer, - name="attention/encdec") - - self.encdec_attention_dropout = tf.keras.layers.Dropout( - rate=self.hidden_dropout_prob) - self.encdec_attention_layer_norm = ( - tf.keras.layers.LayerNormalization( - name="attention/encdec_output_layer_norm", axis=-1, epsilon=1e-12)) - - # Feed-forward projection. - self.intermediate_dense = layers.DenseEinsum( - output_shape=self.intermediate_size, - activation=None, - kernel_initializer=self._kernel_initializer, - bias_initializer=self._bias_initializer, - name="intermediate") - self.intermediate_activation_layer = tf.keras.layers.Activation( - self.intermediate_activation) - self.output_dense = layers.DenseEinsum( - output_shape=self.hidden_size, - kernel_initializer=self._kernel_initializer, - bias_initializer=self._bias_initializer, - name="output") - self.output_dropout = tf.keras.layers.Dropout(rate=self.hidden_dropout_prob) - self.output_layer_norm = tf.keras.layers.LayerNormalization( - name="output_layer_norm", axis=-1, epsilon=1e-12) - super(TransformerDecoderBlock, self).build(input_shape) - - def common_layers_with_encoder(self): - """Gets layer objects that can make a Transformer encoder block.""" - return [ - self.self_attention, self.self_attention_layer_norm, - self.intermediate_dense, self.output_dense, self.output_layer_norm - ] - - def call(self, inputs, cache=None, decode_loop_step=None): - if self.multi_channel_cross_attention: - if len(inputs) != 5: - raise ValueError( - "TransformerDecoderBlock must have 5 inputs, when it uses " - "multi_channel_cross_attention. But it got: %d" % len(inputs)) - elif len(inputs) != 4: - raise ValueError( - "TransformerDecoderBlock must have 4 inputs, but it got: %d" % - len(inputs)) - input_tensor, memory, attention_mask, self_attention_mask = inputs[:4] - self_attention_inputs = [input_tensor, input_tensor] - self_attention_output, cache = self.self_attention( - self_attention_inputs, - attention_mask=self_attention_mask, - cache=cache, - decode_loop_step=decode_loop_step) - self_attention_output = self.self_attention_dropout(self_attention_output) - self_attention_output = self.self_attention_layer_norm( - input_tensor + self_attention_output) - - cross_attn_inputs = [self_attention_output, memory] - if self.multi_channel_cross_attention: - # Accesses the 5-th input tensor for the doc-attention probabilities. - cross_attn_inputs.append(inputs[-1]) - attention_output = self.encdec_attention(cross_attn_inputs, attention_mask) - attention_output = self.encdec_attention_dropout(attention_output) - attention_output = self.encdec_attention_layer_norm(self_attention_output + - attention_output) - - intermediate_output = self.intermediate_dense(attention_output) - intermediate_output = self.intermediate_activation_layer( - intermediate_output) - layer_output = self.output_dense(intermediate_output) - layer_output = self.output_dropout(layer_output) - layer_output = self.output_layer_norm(layer_output + attention_output) - return layer_output, cache - - class TransformerDecoder(tf.keras.layers.Layer): """Transformer decoder stack.""" @@ -200,7 +59,7 @@ class TransformerDecoder(tf.keras.layers.Layer): self.layers = [] for i in range(self.num_hidden_layers): self.layers.append( - TransformerDecoderBlock( + transformer.TransformerDecoderLayer( hidden_size=self.hidden_size, num_attention_heads=self.num_attention_heads, intermediate_size=self.intermediate_size, diff --git a/official/nlp/nhnet/decoder_test.py b/official/nlp/nhnet/decoder_test.py index 1a58c7e12..f5effbdb0 100644 --- a/official/nlp/nhnet/decoder_test.py +++ b/official/nlp/nhnet/decoder_test.py @@ -26,17 +26,6 @@ from official.nlp.nhnet import decoder from official.nlp.nhnet import utils -def _create_cache(batch_size, init_decode_length, num_heads, head_size): - return { - "key": - tf.zeros([batch_size, init_decode_length, num_heads, head_size], - dtype=tf.float32), - "value": - tf.zeros([batch_size, init_decode_length, num_heads, head_size], - dtype=tf.float32) - } - - class DecoderTest(tf.test.TestCase): def setUp(self): @@ -56,26 +45,6 @@ class DecoderTest(tf.test.TestCase): decoder_block.build(None) self.assertEqual(len(decoder_block.layers), self._config.num_hidden_layers) - def test_decoder_block_with_cache(self): - decoder_block = decoder.TransformerDecoderBlock( - hidden_size=self._config.hidden_size, - num_attention_heads=self._config.num_attention_heads, - intermediate_size=self._config.intermediate_size, - intermediate_activation=self._config.hidden_act, - hidden_dropout_prob=self._config.hidden_dropout_prob, - attention_probs_dropout_prob=self._config.attention_probs_dropout_prob, - initializer_range=self._config.initializer_range) - # Forward path. - dummy_tensor = tf.zeros([2, 4, self._config.hidden_size], dtype=tf.float32) - dummy_mask = tf.zeros([2, 4, 4], dtype=tf.float32) - inputs = [dummy_tensor, dummy_tensor, dummy_mask, dummy_mask] - cache = _create_cache( - 2, 0, self._config.num_attention_heads, - self._config.hidden_size // self._config.num_attention_heads) - output, cache = decoder_block(inputs, cache) - self.assertEqual(output.shape, (2, 4, self._config.hidden_size)) - self.assertEqual(cache["value"].shape, (2, 4, 2, 8)) - def test_bert_decoder(self): seq_length = 10 encoder_input_ids = tf.keras.layers.Input( diff --git a/official/nlp/nhnet/models.py b/official/nlp/nhnet/models.py index 52b6c430b..d6f70e7f3 100644 --- a/official/nlp/nhnet/models.py +++ b/official/nlp/nhnet/models.py @@ -27,9 +27,9 @@ from typing import Optional, Text from official.modeling import tf_utils from official.modeling.hyperparams import params_dict from official.nlp.modeling import networks +from official.nlp.modeling.layers import multi_channel_attention from official.nlp.nhnet import configs from official.nlp.nhnet import decoder -from official.nlp.nhnet import multi_channel_attention from official.nlp.nhnet import utils from official.nlp.transformer import beam_search @@ -273,7 +273,7 @@ class NHNet(Bert2Bert): def __init__(self, params, bert_layer, decoder_layer, name=None): super(NHNet, self).__init__(params, bert_layer, decoder_layer, name=name) - self.doc_attention = multi_channel_attention.DocAttention( + self.doc_attention = multi_channel_attention.VotingAttention( num_heads=params.num_decoder_attn_heads, head_size=params.hidden_size // params.num_decoder_attn_heads) -- GitLab From a9bdee32a3d3d7d2349d4a79baf251f475a61046 Mon Sep 17 00:00:00 2001 From: Chen Chen Date: Fri, 19 Jun 2020 10:44:41 -0700 Subject: [PATCH 32/79] Internal change PiperOrigin-RevId: 317336167 --- official/nlp/configs/encoders.py | 1 + 1 file changed, 1 insertion(+) diff --git a/official/nlp/configs/encoders.py b/official/nlp/configs/encoders.py index 59d43295c..0af5b733d 100644 --- a/official/nlp/configs/encoders.py +++ b/official/nlp/configs/encoders.py @@ -54,6 +54,7 @@ def instantiate_encoder_from_cfg( activation=tf_utils.get_activation(config.hidden_activation), dropout_rate=config.dropout_rate, attention_dropout_rate=config.attention_dropout_rate, + sequence_length=None, max_sequence_length=config.max_position_embeddings, type_vocab_size=config.type_vocab_size, initializer=tf.keras.initializers.TruncatedNormal( -- GitLab From 59e070e4a69c1aab88c3b2e782dd8135f2a4de90 Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Fri, 19 Jun 2020 12:04:54 -0700 Subject: [PATCH 33/79] Internal change PiperOrigin-RevId: 317352848 --- official/nlp/data/classifier_data_lib.py | 49 +++++++++++++++++++++ official/nlp/data/create_finetuning_data.py | 1 + 2 files changed, 50 insertions(+) diff --git a/official/nlp/data/classifier_data_lib.py b/official/nlp/data/classifier_data_lib.py index a3954d1b6..f851557c0 100644 --- a/official/nlp/data/classifier_data_lib.py +++ b/official/nlp/data/classifier_data_lib.py @@ -564,6 +564,55 @@ class ColaProcessor(DataProcessor): return examples +class RteProcessor(DataProcessor): + """Processor for the RTE data set (GLUE version).""" + + def get_train_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "train.tsv")), "train") + + def get_dev_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev") + + def get_test_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "test.tsv")), "test") + + def get_labels(self): + """See base class.""" + # All datasets are converted to 2-class split, where for 3-class datasets we + # collapse neutral and contradiction into not_entailment. + return ["entailment", "not_entailment"] + + @staticmethod + def get_processor_name(): + """See base class.""" + return "RTE" + + def _create_examples(self, lines, set_type): + """Creates examples for the training and dev sets.""" + examples = [] + for i, line in enumerate(lines): + if i == 0: + continue + guid = "%s-%s" % (set_type, i) + if set_type == "test": + text_a = tokenization.convert_to_unicode(line[1]) + text_b = tokenization.convert_to_unicode(line[2]) + label = "entailment" + else: + text_a = tokenization.convert_to_unicode(line[1]) + text_b = tokenization.convert_to_unicode(line[2]) + label = tokenization.convert_to_unicode(line[3]) + examples.append( + InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) + return examples + + class SstProcessor(DataProcessor): """Processor for the SST-2 data set (GLUE version).""" diff --git a/official/nlp/data/create_finetuning_data.py b/official/nlp/data/create_finetuning_data.py index 54a07732b..814ae4060 100644 --- a/official/nlp/data/create_finetuning_data.py +++ b/official/nlp/data/create_finetuning_data.py @@ -173,6 +173,7 @@ def generate_classifier_dataset(): "qnli": classifier_data_lib.QnliProcessor, "qqp": classifier_data_lib.QqpProcessor, + "rte": classifier_data_lib.RteProcessor, "sst-2": classifier_data_lib.SstProcessor, "xnli": -- GitLab From 819c52f0625fe337811193369e407037afa91786 Mon Sep 17 00:00:00 2001 From: Chen Chen Date: Fri, 19 Jun 2020 12:49:01 -0700 Subject: [PATCH 34/79] Internal change PiperOrigin-RevId: 317361519 --- official/nlp/data/classifier_data_lib.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/official/nlp/data/classifier_data_lib.py b/official/nlp/data/classifier_data_lib.py index f851557c0..462501a59 100644 --- a/official/nlp/data/classifier_data_lib.py +++ b/official/nlp/data/classifier_data_lib.py @@ -234,7 +234,7 @@ class XtremeXnliProcessor(DataProcessor): guid = f"test-{i}" text_a = self.process_text_fn(line[0]) text_b = self.process_text_fn(line[1]) - label = self.process_text_fn(line[2]) + label = "contradiction" examples_by_lang[lang].append( InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) return examples_by_lang @@ -346,7 +346,7 @@ class XtremePawsxProcessor(DataProcessor): def get_dev_examples(self, data_dir): """See base class.""" - lines = self._read_tsv(os.path.join(data_dir, "dev_en.tsv")) + lines = self._read_tsv(os.path.join(data_dir, "dev-en.tsv")) examples = [] for (i, line) in enumerate(lines): @@ -367,7 +367,7 @@ class XtremePawsxProcessor(DataProcessor): guid = "test-%d" % i text_a = self.process_text_fn(line[0]) text_b = self.process_text_fn(line[1]) - label = self.process_text_fn(line[2]) + label = "0" examples_by_lang[lang].append( InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) return examples_by_lang -- GitLab From b708fd68e312ddc9a18b6b553c508b8e19507ee0 Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Fri, 19 Jun 2020 17:48:52 -0700 Subject: [PATCH 35/79] Add ELECTRA TF 2.x pretrainer. Contributed by mickeystroller PiperOrigin-RevId: 317411747 --- .../nlp/modeling/models/electra_pretrainer.py | 307 ++++++++++++++++++ .../models/electra_pretrainer_test.py | 156 +++++++++ .../modeling/networks/transformer_encoder.py | 23 +- 3 files changed, 480 insertions(+), 6 deletions(-) create mode 100644 official/nlp/modeling/models/electra_pretrainer.py create mode 100644 official/nlp/modeling/models/electra_pretrainer_test.py diff --git a/official/nlp/modeling/models/electra_pretrainer.py b/official/nlp/modeling/models/electra_pretrainer.py new file mode 100644 index 000000000..21fe3a0d9 --- /dev/null +++ b/official/nlp/modeling/models/electra_pretrainer.py @@ -0,0 +1,307 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Trainer network for ELECTRA models.""" +# pylint: disable=g-classes-have-attributes +from __future__ import absolute_import +from __future__ import division +# from __future__ import google_type_annotations +from __future__ import print_function + +import copy +import tensorflow as tf + +from official.modeling import tf_utils +from official.nlp.modeling import layers + + +@tf.keras.utils.register_keras_serializable(package='Text') +class ElectraPretrainer(tf.keras.Model): + """ELECTRA network training model. + + This is an implementation of the network structure described in "ELECTRA: + Pre-training Text Encoders as Discriminators Rather Than Generators" ( + https://arxiv.org/abs/2003.10555). + + The ElectraPretrainer allows a user to pass in two transformer models, one for + generator, the other for discriminator, and instantiates the masked language + model (at generator side) and classification networks (at discriminator side) + that are used to create the training objectives. + + Arguments: + generator_network: A transformer network for generator, this network should + output a sequence output and an optional classification output. + discriminator_network: A transformer network for discriminator, this network + should output a sequence output + vocab_size: Size of generator output vocabulary + num_classes: Number of classes to predict from the classification network + for the generator network (not used now) + sequence_length: Input sequence length + last_hidden_dim: Last hidden dim of generator transformer output + num_token_predictions: Number of tokens to predict from the masked LM. + mlm_activation: The activation (if any) to use in the masked LM and + classification networks. If None, no activation will be used. + mlm_initializer: The initializer (if any) to use in the masked LM and + classification networks. Defaults to a Glorot uniform initializer. + output_type: The output style for this network. Can be either 'logits' or + 'predictions'. + disallow_correct: Whether to disallow the generator to generate the exact + same token in the original sentence + """ + + def __init__(self, + generator_network, + discriminator_network, + vocab_size, + num_classes, + sequence_length, + last_hidden_dim, + num_token_predictions, + mlm_activation=None, + mlm_initializer='glorot_uniform', + output_type='logits', + disallow_correct=False, + **kwargs): + super(ElectraPretrainer, self).__init__() + self._config = { + 'generator_network': generator_network, + 'discriminator_network': discriminator_network, + 'vocab_size': vocab_size, + 'num_classes': num_classes, + 'sequence_length': sequence_length, + 'last_hidden_dim': last_hidden_dim, + 'num_token_predictions': num_token_predictions, + 'mlm_activation': mlm_activation, + 'mlm_initializer': mlm_initializer, + 'output_type': output_type, + 'disallow_correct': disallow_correct, + } + for k, v in kwargs.items(): + self._config[k] = v + + self.generator_network = generator_network + self.discriminator_network = discriminator_network + self.vocab_size = vocab_size + self.num_classes = num_classes + self.sequence_length = sequence_length + self.last_hidden_dim = last_hidden_dim + self.num_token_predictions = num_token_predictions + self.mlm_activation = mlm_activation + self.mlm_initializer = mlm_initializer + self.output_type = output_type + self.disallow_correct = disallow_correct + self.masked_lm = layers.MaskedLM( + embedding_table=generator_network.get_embedding_table(), + activation=mlm_activation, + initializer=mlm_initializer, + output=output_type, + name='generator_masked_lm') + self.classification = layers.ClassificationHead( + inner_dim=last_hidden_dim, + num_classes=num_classes, + initializer=mlm_initializer, + name='generator_classification_head') + self.discriminator_head = tf.keras.layers.Dense( + units=1, kernel_initializer=mlm_initializer) + + def call(self, inputs): + input_word_ids = inputs['input_word_ids'] + input_mask = inputs['input_mask'] + input_type_ids = inputs['input_type_ids'] + masked_lm_positions = inputs['masked_lm_positions'] + + ### Generator ### + sequence_output, cls_output = self.generator_network( + [input_word_ids, input_mask, input_type_ids]) + + # The generator encoder network may get outputs from all layers. + if isinstance(sequence_output, list): + sequence_output = sequence_output[-1] + if isinstance(cls_output, list): + cls_output = cls_output[-1] + + lm_outputs = self.masked_lm(sequence_output, masked_lm_positions) + sentence_outputs = self.classification(sequence_output) + + ### Sampling from generator ### + fake_data = self._get_fake_data(inputs, lm_outputs, duplicate=True) + + ### Discriminator ### + disc_input = fake_data['inputs'] + disc_label = fake_data['is_fake_tokens'] + disc_sequence_output, _ = self.discriminator_network([ + disc_input['input_word_ids'], disc_input['input_mask'], + disc_input['input_type_ids'] + ]) + + # The discriminator encoder network may get outputs from all layers. + if isinstance(disc_sequence_output, list): + disc_sequence_output = disc_sequence_output[-1] + + disc_logits = self.discriminator_head(disc_sequence_output) + disc_logits = tf.squeeze(disc_logits, axis=-1) + + return lm_outputs, sentence_outputs, disc_logits, disc_label + + def _get_fake_data(self, inputs, mlm_logits, duplicate=True): + """Generate corrupted data for discriminator. + + Args: + inputs: A dict of all inputs, same as the input of call() function + mlm_logits: The generator's output logits + duplicate: Whether to copy the original inputs dict during modifications + + Returns: + A dict of generated fake data + """ + inputs = unmask(inputs, duplicate) + + if self.disallow_correct: + disallow = tf.one_hot( + inputs['masked_lm_ids'], depth=self.vocab_size, dtype=tf.float32) + else: + disallow = None + + sampled_tokens = tf.stop_gradient( + sample_from_softmax(mlm_logits, disallow=disallow)) + sampled_tokids = tf.argmax(sampled_tokens, -1, output_type=tf.int32) + updated_input_ids, masked = scatter_update(inputs['input_word_ids'], + sampled_tokids, + inputs['masked_lm_positions']) + labels = masked * (1 - tf.cast( + tf.equal(updated_input_ids, inputs['input_word_ids']), tf.int32)) + + updated_inputs = get_updated_inputs( + inputs, duplicate, input_word_ids=updated_input_ids) + + return { + 'inputs': updated_inputs, + 'is_fake_tokens': labels, + 'sampled_tokens': sampled_tokens + } + + def get_config(self): + return self._config + + @classmethod + def from_config(cls, config, custom_objects=None): + return cls(**config) + + +def scatter_update(sequence, updates, positions): + """Scatter-update a sequence. + + Args: + sequence: A [batch_size, seq_len] or [batch_size, seq_len, depth] tensor + updates: A tensor of size batch_size*seq_len(*depth) + positions: A [batch_size, n_positions] tensor + + Returns: + updated_sequence: A [batch_size, seq_len] or [batch_size, seq_len, depth] + tensor of "sequence" with elements at "positions" replaced by the values + at "updates". Updates to index 0 are ignored. If there are duplicated + positions the update is only applied once. + updates_mask: A [batch_size, seq_len] mask tensor of which inputs were + updated. + """ + shape = tf_utils.get_shape_list(sequence, expected_rank=[2, 3]) + depth_dimension = (len(shape) == 3) + if depth_dimension: + batch_size, seq_len, depth = shape + else: + batch_size, seq_len = shape + depth = 1 + sequence = tf.expand_dims(sequence, -1) + n_positions = tf_utils.get_shape_list(positions)[1] + + shift = tf.expand_dims(seq_len * tf.range(batch_size), -1) + flat_positions = tf.reshape(positions + shift, [-1, 1]) + flat_updates = tf.reshape(updates, [-1, depth]) + updates = tf.scatter_nd(flat_positions, flat_updates, + [batch_size * seq_len, depth]) + updates = tf.reshape(updates, [batch_size, seq_len, depth]) + + flat_updates_mask = tf.ones([batch_size * n_positions], tf.int32) + updates_mask = tf.scatter_nd(flat_positions, flat_updates_mask, + [batch_size * seq_len]) + updates_mask = tf.reshape(updates_mask, [batch_size, seq_len]) + not_first_token = tf.concat([ + tf.zeros((batch_size, 1), tf.int32), + tf.ones((batch_size, seq_len - 1), tf.int32) + ], -1) + updates_mask *= not_first_token + updates_mask_3d = tf.expand_dims(updates_mask, -1) + + # account for duplicate positions + if sequence.dtype == tf.float32: + updates_mask_3d = tf.cast(updates_mask_3d, tf.float32) + updates /= tf.maximum(1.0, updates_mask_3d) + else: + assert sequence.dtype == tf.int32 + updates = tf.math.floordiv(updates, tf.maximum(1, updates_mask_3d)) + updates_mask = tf.minimum(updates_mask, 1) + updates_mask_3d = tf.minimum(updates_mask_3d, 1) + + updated_sequence = (((1 - updates_mask_3d) * sequence) + + (updates_mask_3d * updates)) + if not depth_dimension: + updated_sequence = tf.squeeze(updated_sequence, -1) + + return updated_sequence, updates_mask + + +def sample_from_softmax(logits, disallow=None): + """Implement softmax sampling using gumbel softmax trick. + + Args: + logits: A [batch_size, num_token_predictions, vocab_size] tensor indicating + the generator output logits for each masked position. + disallow: If `None`, we directly sample tokens from the logits. Otherwise, + this is a tensor of size [batch_size, num_token_predictions, vocab_size] + indicating the true word id in each masked position. + + Returns: + sampled_tokens: A [batch_size, num_token_predictions, vocab_size] one hot + tensor indicating the sampled word id in each masked position. + """ + if disallow is not None: + logits -= 1000.0 * disallow + uniform_noise = tf.random.uniform( + tf_utils.get_shape_list(logits), minval=0, maxval=1) + gumbel_noise = -tf.math.log(-tf.math.log(uniform_noise + 1e-9) + 1e-9) + + # Here we essentially follow the original paper and use temperature 1.0 for + # generator output logits. + sampled_tokens = tf.one_hot( + tf.argmax(tf.nn.softmax(logits + gumbel_noise), -1, output_type=tf.int32), + logits.shape[-1]) + return sampled_tokens + + +def unmask(inputs, duplicate): + unmasked_input_word_ids, _ = scatter_update(inputs['input_word_ids'], + inputs['masked_lm_ids'], + inputs['masked_lm_positions']) + return get_updated_inputs( + inputs, duplicate, input_word_ids=unmasked_input_word_ids) + + +def get_updated_inputs(inputs, duplicate, **kwargs): + if duplicate: + new_inputs = copy.copy(inputs) + else: + new_inputs = inputs + for k, v in kwargs.items(): + new_inputs[k] = v + return new_inputs diff --git a/official/nlp/modeling/models/electra_pretrainer_test.py b/official/nlp/modeling/models/electra_pretrainer_test.py new file mode 100644 index 000000000..b5644ab1a --- /dev/null +++ b/official/nlp/modeling/models/electra_pretrainer_test.py @@ -0,0 +1,156 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for ELECTRA pre trainer network.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import tensorflow as tf + +from tensorflow.python.keras import keras_parameterized # pylint: disable=g-direct-tensorflow-import +from official.nlp.modeling import networks +from official.nlp.modeling.models import electra_pretrainer + + +# This decorator runs the test in V1, V2-Eager, and V2-Functional mode. It +# guarantees forward compatibility of this code for the V2 switchover. +@keras_parameterized.run_all_keras_modes +class ElectraPretrainerTest(keras_parameterized.TestCase): + + def test_electra_pretrainer(self): + """Validate that the Keras object can be created.""" + # Build a transformer network to use within the ELECTRA trainer. + vocab_size = 100 + sequence_length = 512 + test_generator_network = networks.TransformerEncoder( + vocab_size=vocab_size, num_layers=2, sequence_length=sequence_length) + test_discriminator_network = networks.TransformerEncoder( + vocab_size=vocab_size, num_layers=2, sequence_length=sequence_length) + + # Create a ELECTRA trainer with the created network. + num_classes = 3 + num_token_predictions = 2 + eletrca_trainer_model = electra_pretrainer.ElectraPretrainer( + generator_network=test_generator_network, + discriminator_network=test_discriminator_network, + vocab_size=vocab_size, + num_classes=num_classes, + sequence_length=sequence_length, + last_hidden_dim=768, + num_token_predictions=num_token_predictions, + disallow_correct=True) + + # Create a set of 2-dimensional inputs (the first dimension is implicit). + word_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32) + mask = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32) + type_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32) + lm_positions = tf.keras.Input( + shape=(num_token_predictions,), dtype=tf.int32) + lm_ids = tf.keras.Input(shape=(num_token_predictions,), dtype=tf.int32) + inputs = { + 'input_word_ids': word_ids, + 'input_mask': mask, + 'input_type_ids': type_ids, + 'masked_lm_positions': lm_positions, + 'masked_lm_ids': lm_ids + } + + # Invoke the trainer model on the inputs. This causes the layer to be built. + lm_outs, cls_outs, disc_logits, disc_label = eletrca_trainer_model(inputs) + + # Validate that the outputs are of the expected shape. + expected_lm_shape = [None, num_token_predictions, vocab_size] + expected_classification_shape = [None, num_classes] + expected_disc_logits_shape = [None, sequence_length] + expected_disc_label_shape = [None, sequence_length] + self.assertAllEqual(expected_lm_shape, lm_outs.shape.as_list()) + self.assertAllEqual(expected_classification_shape, cls_outs.shape.as_list()) + self.assertAllEqual(expected_disc_logits_shape, disc_logits.shape.as_list()) + self.assertAllEqual(expected_disc_label_shape, disc_label.shape.as_list()) + + def test_electra_trainer_tensor_call(self): + """Validate that the Keras object can be invoked.""" + # Build a transformer network to use within the ELECTRA trainer. (Here, we + # use a short sequence_length for convenience.) + test_generator_network = networks.TransformerEncoder( + vocab_size=100, num_layers=4, sequence_length=3) + test_discriminator_network = networks.TransformerEncoder( + vocab_size=100, num_layers=4, sequence_length=3) + + # Create a ELECTRA trainer with the created network. + eletrca_trainer_model = electra_pretrainer.ElectraPretrainer( + generator_network=test_generator_network, + discriminator_network=test_discriminator_network, + vocab_size=100, + num_classes=2, + sequence_length=3, + last_hidden_dim=768, + num_token_predictions=2) + + # Create a set of 2-dimensional data tensors to feed into the model. + word_ids = tf.constant([[1, 1, 1], [2, 2, 2]], dtype=tf.int32) + mask = tf.constant([[1, 1, 1], [1, 0, 0]], dtype=tf.int32) + type_ids = tf.constant([[1, 1, 1], [2, 2, 2]], dtype=tf.int32) + lm_positions = tf.constant([[0, 1], [0, 2]], dtype=tf.int32) + lm_ids = tf.constant([[10, 20], [20, 30]], dtype=tf.int32) + inputs = { + 'input_word_ids': word_ids, + 'input_mask': mask, + 'input_type_ids': type_ids, + 'masked_lm_positions': lm_positions, + 'masked_lm_ids': lm_ids + } + + # Invoke the trainer model on the tensors. In Eager mode, this does the + # actual calculation. (We can't validate the outputs, since the network is + # too complex: this simply ensures we're not hitting runtime errors.) + _, _, _, _ = eletrca_trainer_model(inputs) + + def test_serialize_deserialize(self): + """Validate that the ELECTRA trainer can be serialized and deserialized.""" + # Build a transformer network to use within the BERT trainer. (Here, we use + # a short sequence_length for convenience.) + test_generator_network = networks.TransformerEncoder( + vocab_size=100, num_layers=4, sequence_length=3) + test_discriminator_network = networks.TransformerEncoder( + vocab_size=100, num_layers=4, sequence_length=3) + + # Create a ELECTRA trainer with the created network. (Note that all the args + # are different, so we can catch any serialization mismatches.) + electra_trainer_model = electra_pretrainer.ElectraPretrainer( + generator_network=test_generator_network, + discriminator_network=test_discriminator_network, + vocab_size=100, + num_classes=2, + sequence_length=3, + last_hidden_dim=768, + num_token_predictions=2) + + # Create another BERT trainer via serialization and deserialization. + config = electra_trainer_model.get_config() + new_electra_trainer_model = electra_pretrainer.ElectraPretrainer.from_config( + config) + + # Validate that the config can be forced to JSON. + _ = new_electra_trainer_model.to_json() + + # If the serialization was successful, the new config should match the old. + self.assertAllEqual(electra_trainer_model.get_config(), + new_electra_trainer_model.get_config()) + + +if __name__ == '__main__': + tf.test.main() diff --git a/official/nlp/modeling/networks/transformer_encoder.py b/official/nlp/modeling/networks/transformer_encoder.py index 6d75f3405..7c6054ddc 100644 --- a/official/nlp/modeling/networks/transformer_encoder.py +++ b/official/nlp/modeling/networks/transformer_encoder.py @@ -60,7 +60,7 @@ class TransformerEncoder(tf.keras.Model): initializer: The initialzer to use for all weights in this encoder. return_all_encoder_outputs: Whether to output sequence embedding outputs of all encoder transformer layers. - output_range: the sequence output range, [0, output_range), by slicing the + output_range: The sequence output range, [0, output_range), by slicing the target sequence of the last transformer layer. `None` means the entire target sequence will attend to the source sequence, which yeilds the full output. @@ -69,6 +69,10 @@ class TransformerEncoder(tf.keras.Model): two matrices in the shape of ['vocab_size', 'embedding_width'] and ['embedding_width', 'hidden_size'] ('embedding_width' is usually much smaller than 'hidden_size'). + embedding_layer: The word embedding layer. `None` means we will create a new + embedding layer. Otherwise, we will reuse the given embedding layer. This + parameter is originally added for ELECTRA model which needs to tie the + generator embeddings with the discriminator embeddings. """ def __init__(self, @@ -87,6 +91,7 @@ class TransformerEncoder(tf.keras.Model): return_all_encoder_outputs=False, output_range=None, embedding_width=None, + embedding_layer=None, **kwargs): activation = tf.keras.activations.get(activation) initializer = tf.keras.initializers.get(initializer) @@ -121,11 +126,14 @@ class TransformerEncoder(tf.keras.Model): if embedding_width is None: embedding_width = hidden_size - self._embedding_layer = layers.OnDeviceEmbedding( - vocab_size=vocab_size, - embedding_width=embedding_width, - initializer=initializer, - name='word_embeddings') + if embedding_layer is None: + self._embedding_layer = layers.OnDeviceEmbedding( + vocab_size=vocab_size, + embedding_width=embedding_width, + initializer=initializer, + name='word_embeddings') + else: + self._embedding_layer = embedding_layer word_embeddings = self._embedding_layer(word_ids) # Always uses dynamic slicing for simplicity. @@ -209,6 +217,9 @@ class TransformerEncoder(tf.keras.Model): def get_embedding_table(self): return self._embedding_layer.embeddings + def get_embedding_layer(self): + return self._embedding_layer + def get_config(self): return self._config_dict -- GitLab From b3e4fefd3116b2b4fcbcba3bf05b5ddb738f1b1d Mon Sep 17 00:00:00 2001 From: Hongkun Yu Date: Fri, 19 Jun 2020 22:06:51 -0700 Subject: [PATCH 36/79] Support numpy-based metrics through Orbit. PiperOrigin-RevId: 317432167 --- official/core/base_task.py | 8 +++ official/nlp/tasks/sentence_prediction.py | 58 ++++++++++++++++++- .../nlp/tasks/sentence_prediction_test.py | 46 ++++++++++++--- 3 files changed, 102 insertions(+), 10 deletions(-) diff --git a/official/core/base_task.py b/official/core/base_task.py index f5dfdd4f5..31811cbe6 100644 --- a/official/core/base_task.py +++ b/official/core/base_task.py @@ -247,6 +247,14 @@ class Task(tf.Module): """Performs the forward step.""" return model(inputs, training=False) + def aggregate_logs(self, state, step_logs): + """Optional aggregation over logs returned from a validation step.""" + pass + + def reduce_aggregated_logs(self, aggregated_logs): + """Optional reduce of aggregated logs over validation steps.""" + return {} + _REGISTERED_TASK_CLS = {} diff --git a/official/nlp/tasks/sentence_prediction.py b/official/nlp/tasks/sentence_prediction.py index d3d3d0034..4c5b57b99 100644 --- a/official/nlp/tasks/sentence_prediction.py +++ b/official/nlp/tasks/sentence_prediction.py @@ -14,8 +14,11 @@ # limitations under the License. # ============================================================================== """Sentence prediction (classification) task.""" -import logging +from absl import logging import dataclasses +import numpy as np +from scipy import stats +from sklearn import metrics as sklearn_metrics import tensorflow as tf import tensorflow_hub as hub @@ -33,6 +36,7 @@ class SentencePredictionConfig(cfg.TaskConfig): # be specified. init_checkpoint: str = '' hub_module_url: str = '' + metric_type: str = 'accuracy' network: bert.BertPretrainerConfig = bert.BertPretrainerConfig( num_masked_tokens=0, # No masked language modeling head. cls_heads=[ @@ -59,6 +63,7 @@ class SentencePredictionTask(base_task.Task): self._hub_module = hub.load(params.hub_module_url) else: self._hub_module = None + self.metric_type = params.metric_type def build_model(self): if self._hub_module: @@ -123,6 +128,57 @@ class SentencePredictionTask(base_task.Task): def process_compiled_metrics(self, compiled_metrics, labels, model_outputs): compiled_metrics.update_state(labels, model_outputs['sentence_prediction']) + def validation_step(self, inputs, model: tf.keras.Model, metrics=None): + if self.metric_type == 'accuracy': + return super(SentencePredictionTask, + self).validation_step(inputs, model, metrics) + features, labels = inputs + outputs = self.inference_step(features, model) + loss = self.build_losses( + labels=labels, model_outputs=outputs, aux_losses=model.losses) + if self.metric_type == 'matthews_corrcoef': + return { + self.loss: + loss, + 'sentence_prediction': + tf.expand_dims( + tf.math.argmax(outputs['sentence_prediction'], axis=1), + axis=0), + 'labels': + labels, + } + if self.metric_type == 'pearson_spearman_corr': + return { + self.loss: loss, + 'sentence_prediction': outputs['sentence_prediction'], + 'labels': labels, + } + + def aggregate_logs(self, state=None, step_outputs=None): + if state is None: + state = {'sentence_prediction': [], 'labels': []} + state['sentence_prediction'].append( + np.concatenate([v.numpy() for v in step_outputs['sentence_prediction']], + axis=0)) + state['labels'].append( + np.concatenate([v.numpy() for v in step_outputs['labels']], axis=0)) + return state + + def reduce_aggregated_logs(self, aggregated_logs): + if self.metric_type == 'matthews_corrcoef': + preds = np.concatenate(aggregated_logs['sentence_prediction'], axis=0) + labels = np.concatenate(aggregated_logs['labels'], axis=0) + return { + self.metric_type: sklearn_metrics.matthews_corrcoef(preds, labels) + } + if self.metric_type == 'pearson_spearman_corr': + preds = np.concatenate(aggregated_logs['sentence_prediction'], axis=0) + labels = np.concatenate(aggregated_logs['labels'], axis=0) + pearson_corr = stats.pearsonr(preds, labels)[0] + spearman_corr = stats.spearmanr(preds, labels)[0] + corr_metric = (pearson_corr + spearman_corr) / 2 + return {self.metric_type: corr_metric} + def initialize(self, model): """Load a pretrained checkpoint (if exists) and then train from iter 0.""" ckpt_dir_or_file = self.task_config.init_checkpoint diff --git a/official/nlp/tasks/sentence_prediction_test.py b/official/nlp/tasks/sentence_prediction_test.py index fc7676333..09419f54c 100644 --- a/official/nlp/tasks/sentence_prediction_test.py +++ b/official/nlp/tasks/sentence_prediction_test.py @@ -16,6 +16,8 @@ """Tests for official.nlp.tasks.sentence_prediction.""" import functools import os + +from absl.testing import parameterized import tensorflow as tf from official.nlp.bert import configs @@ -25,20 +27,24 @@ from official.nlp.configs import encoders from official.nlp.tasks import sentence_prediction -class SentencePredictionTaskTest(tf.test.TestCase): +class SentencePredictionTaskTest(tf.test.TestCase, parameterized.TestCase): def setUp(self): super(SentencePredictionTaskTest, self).setUp() - self._network_config = bert.BertPretrainerConfig( + self._train_data_config = bert.SentencePredictionDataConfig( + input_path="dummy", seq_length=128, global_batch_size=1) + + def get_network_config(self, num_classes): + return bert.BertPretrainerConfig( encoder=encoders.TransformerEncoderConfig( vocab_size=30522, num_layers=1), num_masked_tokens=0, cls_heads=[ bert.ClsHeadConfig( - inner_dim=10, num_classes=3, name="sentence_prediction") + inner_dim=10, + num_classes=num_classes, + name="sentence_prediction") ]) - self._train_data_config = bert.SentencePredictionDataConfig( - input_path="dummy", seq_length=128, global_batch_size=1) def _run_task(self, config): task = sentence_prediction.SentencePredictionTask(config) @@ -57,7 +63,7 @@ class SentencePredictionTaskTest(tf.test.TestCase): def test_task(self): config = sentence_prediction.SentencePredictionConfig( init_checkpoint=self.get_temp_dir(), - network=self._network_config, + network=self.get_network_config(2), train_data=self._train_data_config) task = sentence_prediction.SentencePredictionTask(config) model = task.build_model() @@ -84,12 +90,34 @@ class SentencePredictionTaskTest(tf.test.TestCase): ckpt.save(config.init_checkpoint) task.initialize(model) - def test_task_with_fit(self): + @parameterized.parameters(("matthews_corrcoef", 2), + ("pearson_spearman_corr", 1)) + def test_np_metrics(self, metric_type, num_classes): config = sentence_prediction.SentencePredictionConfig( - network=self._network_config, + metric_type=metric_type, + init_checkpoint=self.get_temp_dir(), + network=self.get_network_config(num_classes), train_data=self._train_data_config) task = sentence_prediction.SentencePredictionTask(config) model = task.build_model() + dataset = task.build_inputs(config.train_data) + + iterator = iter(dataset) + strategy = tf.distribute.get_strategy() + distributed_outputs = strategy.run( + functools.partial(task.validation_step, model=model), + args=(next(iterator),)) + outputs = tf.nest.map_structure(strategy.experimental_local_results, + distributed_outputs) + aggregated = task.aggregate_logs(step_outputs=outputs) + aggregated = task.aggregate_logs(state=aggregated, step_outputs=outputs) + self.assertIn(metric_type, task.reduce_aggregated_logs(aggregated)) + + def test_task_with_fit(self): + config = sentence_prediction.SentencePredictionConfig( + network=self.get_network_config(2), train_data=self._train_data_config) + task = sentence_prediction.SentencePredictionTask(config) + model = task.build_model() model = task.compile_model( model, optimizer=tf.keras.optimizers.SGD(lr=0.1), @@ -126,7 +154,7 @@ class SentencePredictionTaskTest(tf.test.TestCase): hub_module_url = self._export_bert_tfhub() config = sentence_prediction.SentencePredictionConfig( hub_module_url=hub_module_url, - network=self._network_config, + network=self.get_network_config(2), train_data=self._train_data_config) self._run_task(config) -- GitLab From ae3b0a676647cb75fdca5c74987ab6c998384c17 Mon Sep 17 00:00:00 2001 From: Naveenkhasyap Date: Sun, 21 Jun 2020 08:49:37 +0530 Subject: [PATCH 37/79] #7532 Adding model save support for Sentiment Analysis model (#7715) * #7532 Adding model save option for Sentiment model * Updated the code as per review comments. --- research/sentiment_analysis/sentiment_main.py | 29 +++++++++++++++++-- 1 file changed, 26 insertions(+), 3 deletions(-) diff --git a/research/sentiment_analysis/sentiment_main.py b/research/sentiment_analysis/sentiment_main.py index a749d25f3..8b9ba5f92 100644 --- a/research/sentiment_analysis/sentiment_main.py +++ b/research/sentiment_analysis/sentiment_main.py @@ -10,17 +10,20 @@ from __future__ import division from __future__ import print_function import argparse +import os import tensorflow as tf from data import dataset import sentiment_model + + _DROPOUT_RATE = 0.95 def run_model(dataset_name, emb_dim, voc_size, sen_len, - hid_dim, batch_size, epochs): + hid_dim, batch_size, epochs, model_save_dir): """Run training loop and an evaluation at the end. Args: @@ -48,9 +51,23 @@ def run_model(dataset_name, emb_dim, voc_size, sen_len, x_train, y_train, x_test, y_test = dataset.load( dataset_name, voc_size, sen_len) + if not os.path.exists(model_save_dir): + os.makedirs(model_save_dir) + + filepath=model_save_dir+"/model-{epoch:02d}.hdf5" + + checkpoint_callback = tf.keras.callbacks.ModelCheckpoint(filepath, monitor='val_accuracy', + verbose=1,save_best_only=True, + save_weights_only=True,mode='auto') + + model.fit(x_train, y_train, batch_size=batch_size, - validation_split=0.4, epochs=epochs) + validation_split=0.4, epochs=epochs, callbacks=[checkpoint_callback]) + score = model.evaluate(x_test, y_test, batch_size=batch_size) + + model.save(os.path.join(model_save_dir, "full-model.h5")) + tf.logging.info("Score: {}".format(score)) if __name__ == "__main__": @@ -85,8 +102,14 @@ if __name__ == "__main__": help="The number of epochs for training.", type=int, default=55) + parser.add_argument("-f", "--folder", + help="folder/dir to save trained model", + type=str, default=None) args = parser.parse_args() + if args.folder is None: + parser.error("-f argument folder/dir to save is None,provide path to save model.") + run_model(args.dataset, args.embedding_dim, args.vocabulary_size, args.sentence_length, args.hidden_dim, - args.batch_size, args.epochs) + args.batch_size, args.epochs, args.folder) -- GitLab From 6e4f52c654e0b92eba418cdc42b392a651ec0c88 Mon Sep 17 00:00:00 2001 From: Jaeyoun Kim Date: Sun, 21 Jun 2020 12:55:01 -0700 Subject: [PATCH 38/79] Update README.md Add conference info --- research/README.md | 142 ++++++++++++++++++++++----------------------- 1 file changed, 71 insertions(+), 71 deletions(-) diff --git a/research/README.md b/research/README.md index 9cebb9118..f9e84fb86 100644 --- a/research/README.md +++ b/research/README.md @@ -20,49 +20,49 @@ The research models are maintained by their respective authors. | Directory | Name | Description | Maintainer(s) | |-----------|------|-------------|---------------| -| [object_detection](object_detection) | TensorFlow Object Detection API | A framework that makes it easy to construct, train and deploy object detection models

    A collection of object detection models pre-trained on the COCO dataset, the Kitti dataset, the Open Images dataset, the AVA v2.1 dataset, and the iNaturalist Species Detection Dataset| @jch1, @tombstone, @pkulzc | -| [slim](slim) | TensorFlow-Slim Image Classification Model Library | A lightweight high-level API of TensorFlow for defining, training and evaluating image classification models
    • Inception V1/V2/V3/V4
    • Inception-ResNet-v2
    • ResNet V1/V2
    • VGG 16/19
    • MobileNet V1/V2/V3
    • NASNet-A_Mobile/Large
    • PNASNet-5_Large/Mobile | @sguada, @marksandler2 | +| [object_detection](object_detection) | TensorFlow Object Detection API | A framework that makes it easy to construct, train and deploy object detection models

    A collection of object detection models pre-trained on the COCO dataset, the Kitti dataset, the Open Images dataset, the AVA v2.1 dataset, and the iNaturalist Species Detection Dataset| jch1, tombstone, pkulzc | +| [slim](slim) | TensorFlow-Slim Image Classification Model Library | A lightweight high-level API of TensorFlow for defining, training and evaluating image classification models
    • Inception V1/V2/V3/V4
    • Inception-ResNet-v2
    • ResNet V1/V2
    • VGG 16/19
    • MobileNet V1/V2/V3
    • NASNet-A_Mobile/Large
    • PNASNet-5_Large/Mobile | sguada, marksandler2 | ## Models and Implementations ### Computer Vision -| Directory | Referenece (Paper) | Maintainer(s) | -|-----------|--------------------|---------------| -| [attention_ocr](attention_ocr) | [Attention-based Extraction of Structured Information from Street View Imagery](https://arxiv.org/abs/1704.03549) | xavigibert | -| [autoaugment](autoaugment) | [1] [AutoAugment](https://arxiv.org/abs/1805.09501)
    [2] [Wide Residual Networks](https://arxiv.org/abs/1605.07146)
    [3] [Shake-Shake regularization](https://arxiv.org/abs/1705.07485)
    [4] [ShakeDrop Regularization for Deep Residual Learning](https://arxiv.org/abs/1802.02375) | barretzoph | -| [deeplab](deeplab) | [1] [DeepLabv1](https://arxiv.org/abs/1412.7062)
    [2] [DeepLabv2](https://arxiv.org/abs/1606.00915)
    [3] [DeepLabv3](https://arxiv.org/abs/1802.02611)
    [4] [DeepLabv3+](https://arxiv.org/abs/1706.05587) | aquariusjay, yknzhu | -| [delf](delf) | [1] DELF (DEep Local Features): [Large-Scale Image Retrieval with Attentive Deep Local Features](https://arxiv.org/abs/1612.06321)
    [2] [Detect-to-Retrieve](https://arxiv.org/abs/1812.01584) | andrefaraujo | -| [lstm_object_detection](lstm_object_detection) | [Mobile Video Object Detection with Temporally-Aware Feature Maps](https://arxiv.org/abs/1711.06368) | yinxiaoli, yongzhe2160, lzyuan | -| [marco](marco) | [Classification of crystallization outcomes using deep convolutional neural networks](https://arxiv.org/abs/1803.10342) | vincentvanhoucke | -| [vid2depth](vid2depth) | [Unsupervised Learning of Depth and Ego-Motion from Monocular Video Using 3D Geometric Constraints](https://arxiv.org/abs/1802.05522) | rezama | +| Directory | Paper(s) | Conference | Maintainer(s) | +|-----------|----------|------------|---------------| +| [attention_ocr](attention_ocr) | [Attention-based Extraction of Structured Information from Street View Imagery](https://arxiv.org/abs/1704.03549) | ICDAR 2017 | xavigibert | +| [autoaugment](autoaugment) | [1] [AutoAugment](https://arxiv.org/abs/1805.09501)
    [2] [Wide Residual Networks](https://arxiv.org/abs/1605.07146)
    [3] [Shake-Shake regularization](https://arxiv.org/abs/1705.07485)
    [4] [ShakeDrop Regularization for Deep Residual Learning](https://arxiv.org/abs/1802.02375) | [1] CVPR 2019
    [2] BMVC 2016
    [3] ICLR 2017
    [4] ICLR 2018 | barretzoph | +| [deeplab](deeplab) | [1] [DeepLabv1: Semantic Image Segmentation with Deep Convolutional Nets and Fully Connected CRFs](https://arxiv.org/abs/1412.7062)
    [2] [DeepLabv2: Semantic Image Segmentation with Deep Convolutional Nets, Atrous Convolution, and Fully Connected CRFs](https://arxiv.org/abs/1606.00915)
    [3] [DeepLabv3: Rethinking Atrous Convolution for Semantic Image Segmentation](https://arxiv.org/abs/1706.05587)
    [4] [DeepLabv3+: Encoder-Decoder with Atrous Separable Convolution for Semantic Image Segmentation](https://arxiv.org/abs/1802.02611)
    | [1] ICLR 2015
    [2] TPAMI 2017
    [4] ECCV 2018 | aquariusjay, yknzhu | +| [delf](delf) | [1] DELF (DEep Local Features): [Large-Scale Image Retrieval with Attentive Deep Local Features](https://arxiv.org/abs/1612.06321)
    [2] [Detect-to-Retrieve: Efficient Regional Aggregation for Image Search](https://arxiv.org/abs/1812.01584)
    [3] DELG (DEep Local and Global features): [Unifying Deep Local and Global Features for Image Search](https://arxiv.org/abs/2001.05027)
    [4] GLDv2: [Google Landmarks Dataset v2 -- A Large-Scale Benchmark for Instance-Level Recognition and Retrieval](https://arxiv.org/abs/2004.01804) | [1] ICCV 2017
    [2] CVPR 2019
    [4] CVPR 2020 | andrefaraujo | +| [lstm_object_detection](lstm_object_detection) | [Mobile Video Object Detection with Temporally-Aware Feature Maps](https://arxiv.org/abs/1711.06368) | CVPR 2018 | yinxiaoli, yongzhe2160, lzyuan | +| [marco](marco) | MARCO: [Classification of crystallization outcomes using deep convolutional neural networks](https://arxiv.org/abs/1803.10342) | | vincentvanhoucke | +| [vid2depth](vid2depth) | [Unsupervised Learning of Depth and Ego-Motion from Monocular Video Using 3D Geometric Constraints](https://arxiv.org/abs/1802.05522) | CVPR 2018 | rezama | ### Natural Language Processing -| Directory | Referenece (Paper) | Maintainer(s) | -|-----------|--------------------|---------------| -| [adversarial_text](adversarial_text) | [1] [Adversarial Training Methods for Semi-Supervised Text](https://arxiv.org/abs/1605.07725) Classification
    [2] [Semi-supervised Sequence Learning](https://arxiv.org/abs/1511.01432) | rsepassi, a-dai | -| [cvt_text](cvt_text) | [Semi-supervised sequence learning with cross-view training](https://arxiv.org/abs/1809.08370) | clarkkev, lmthang | +| Directory | Paper(s) | Conference | Maintainer(s) | +|-----------|----------|------------|---------------| +| [adversarial_text](adversarial_text) | [1] [Adversarial Training Methods for Semi-Supervised Text](https://arxiv.org/abs/1605.07725) Classification
    [2] [Semi-supervised Sequence Learning](https://arxiv.org/abs/1511.01432) | [1] ICLR 2017
    [2] NIPS 2015 | rsepassi, a-dai | +| [cvt_text](cvt_text) | [Semi-Supervised Sequence Modeling with Cross-View Training](https://arxiv.org/abs/1809.08370) | EMNLP 2018 | clarkkev, lmthang | ### Audio and Speech -| Directory | Referenece (Paper) | Maintainer(s) | -|-----------|--------------------|---------------| -| [audioset](audioset) | [1] [AudioSet: A Large Scale Dataset of Audio Events](https://research.google/pubs/pub45857/)
    [2] [CNN Architectures for Large-Scale Audio Classification](https://research.google/pubs/pub45611/) | plakal, dpwe | +| Directory | Paper(s) | Conference | Maintainer(s) | +|-----------|----------|------------|---------------| +| [audioset](audioset) | [1] [Audio Set: An ontology and human-labeled dataset for audio events](https://research.google/pubs/pub45857/)
    [2] [CNN Architectures for Large-Scale Audio Classification](https://research.google/pubs/pub45611/) | ICASSP 2017 | plakal, dpwe | ### Reinforcement Learning -| Directory | Referenece (Paper) | Maintainer(s) | -|-----------|--------------------|---------------| -| [efficient-hrl](efficient-hrl) | [1] [Data-Efficient Hierarchical Reinforcement Learning](https://arxiv.org/abs/1805.08296)
    [2] [Near-Optimal Representation Learning for Hierarchical Reinforcement Learning](https://arxiv.org/abs/1810.01257) | ofirnachum | -| [pcl_rl](pcl_rl) | [1] [Improving Policy Gradient by Exploring Under-appreciated Rewards](https://arxiv.org/abs/1611.09321)
    [2] [Bridging the Gap Between Value and Policy Based Reinforcement Learning](https://arxiv.org/abs/1702.08892)
    [3] [Trust-PCL: An Off-Policy Trust Region Method for Continuous Control](https://arxiv.org/abs/1707.01891) | ofirnachum | +| Directory | Paper(s) | Conference | Maintainer(s) | +|-----------|----------|------------|---------------| +| [efficient-hrl](efficient-hrl) | [1] [Data-Efficient Hierarchical Reinforcement Learning](https://arxiv.org/abs/1805.08296)
    [2] [Near-Optimal Representation Learning for Hierarchical Reinforcement Learning](https://arxiv.org/abs/1810.01257) | [1] NIPS 2018
    [2] ICLR 2019 | ofirnachum | +| [pcl_rl](pcl_rl) | [1] [Improving Policy Gradient by Exploring Under-appreciated Rewards](https://arxiv.org/abs/1611.09321)
    [2] [Bridging the Gap Between Value and Policy Based Reinforcement Learning](https://arxiv.org/abs/1702.08892)
    [3] [Trust-PCL: An Off-Policy Trust Region Method for Continuous Control](https://arxiv.org/abs/1707.01891) | [1] ICLR 2017
    [2] NIPS 2017
    [3] ICLR 2018 | ofirnachum | ### Others -| Directory | Referenece (Paper) | Maintainer(s) | -|-----------|--------------------|---------------| -| [lfads](lfads) | [LFADS - Latent Factor Analysis via Dynamical Systems](https://doi.org/10.1101/152884) | jazcollins, sussillo | -| [rebar](rebar) | [REBAR: Low-variance, unbiased gradient estimates for discrete latent variable models](https://arxiv.org/abs/1703.07370) | gjtucker | +| Directory | Paper(s) | Conference | Maintainer(s) | +|-----------|----------|------------|---------------| +| [lfads](lfads) | [LFADS - Latent Factor Analysis via Dynamical Systems](https://arxiv.org/abs/1608.06315) | | jazcollins, sussillo | +| [rebar](rebar) | [REBAR: Low-variance, unbiased gradient estimates for discrete latent variable models](https://arxiv.org/abs/1703.07370) | NIPS 2017 | gjtucker | --- @@ -70,55 +70,55 @@ The research models are maintained by their respective authors. The following research models are no longer maintained. -**Note**: We will remove archived models from the master branch in June, 2020. +**Note**: We will remove archived models from the master branch in June, 2020. After removal, you will still be able to access archived models in the archive branch. -| Directory | Referenece (Paper) | Maintainer(s) | -|-----------|--------------------|---------------| -| [adv_imagenet_models](adv_imagenet_models) | [1] [Adversarial Machine Learning at Scale](https://arxiv.org/abs/1611.01236)
    [2] [Ensemble Adversarial Training: Attacks and Defenses](https://arxiv.org/abs/1705.07204) | alexeykurakin | -| [adversarial_crypto](adversarial_crypto) | [Learning to Protect Communications with Adversarial Neural Cryptography](https://arxiv.org/abs/1610.06918) | dave-andersen | -| [adversarial_logit_pairing](adversarial_logit_pairing) | [Adversarial Logit Pairing](https://arxiv.org/abs/1803.06373) | alexeykurakin | -| [autoencoder](autoencoder) | Various autoencoders | snurkabill | -| [brain_coder](brain_coder) | [Neural Program Synthesis with Priority Queue Training](https://arxiv.org/abs/1801.03526) | danabo, mnorouzi | -| [cognitive_mapping_and_planning](cognitive_mapping_and_planning) | [Cognitive Mapping and Planning for Visual Navigation](https://arxiv.org/abs/1702.03920) | s-gupta | -| [compression](compression) | [Full Resolution Image Compression with Recurrent Neural Networks](https://arxiv.org/abs/1608.05148) | nmjohn | -| [deep_contextual_bandits](deep_contextual_bandits) | [Deep Bayesian Bandits Showdown: An Empirical Comparison of Bayesian Deep Networks for Thompson Sampling](https://arxiv.org/abs/1802.09127) | rikel | -| [deep_speech](deep_speech) | [Deep Speech 2](https://arxiv.org/abs/1512.02595) | yhliang2018 | -| [domain_adaptation](domain_adaptation) | [1] [Domain Separation Networks](https://arxiv.org/abs/1608.06019)
    [2] [Unsupervised Pixel-Level Domain Adaptation with Generative Adversarial Networks](https://arxiv.org/abs/1612.05424) | bousmalis, dmrd | -| [feelvos](feelvos)| [FEELVOS](https://arxiv.org/abs/1902.09513) | pvoigtlaender, yuningchai, aquariusjay | -| [fivo](fivo)| [Filtering variational objectives for training generative sequence models](https://arxiv.org/abs/1705.09279) | dieterichlawson | -| [global_objectives](global_objectives) | [Scalable Learning of Non-Decomposable Objectives](https://arxiv.org/abs/1608.04802) | mackeya-google | -| [im2txt](im2txt) | [Show and Tell: Lessons learned from the 2015 MSCOCO Image Captioning Challenge](https://arxiv.org/abs/1609.06647) | cshallue | -| [inception](inception) | [Rethinking the Inception Architecture for Computer Vision](https://arxiv.org/abs/1512.00567) | shlens, vincentvanhoucke | -| [keypointnet](keypointnet) | [KeypointNet](https://arxiv.org/abs/1807.03146) | mnorouzi | -| [learned_optimizer](learned_optimizer) | [Learned Optimizers that Scale and Generalize](https://arxiv.org/abs/1703.04813) | olganw, nirum | -| [learning_to_remember_rare_events](learning_to_remember_rare_events) | [Learning to Remember Rare Events](https://arxiv.org/abs/1703.03129) | lukaszkaiser, ofirnachum | -| [learning_unsupervised_learning](learning_unsupervised_learning) | [Meta-Learning Update Rules for Unsupervised Representation Learning](https://arxiv.org/abs/1804.00222) | lukemetz, nirum | -| [lexnet_nc](lexnet_nc) | [Olive Oil is Made of Olives, Baby Oil is Made for Babies: Interpreting Noun Compounds using Paraphrases in a Neural Model](https://arxiv.org/abs/1803.08073) | vered1986, waterson | -| [lm_1b](lm_1b) | [Exploring the Limits of Language Modeling](https://arxiv.org/abs/1602.02410) | oriolvinyals, panyx0718 | -| [lm_commonsense](lm_commonsense) | [A Simple Method for Commonsense Reasoning](https://arxiv.org/abs/1806.02847) | thtrieu | -| [maskgan](maskgan)| [MaskGAN: Better Text Generation via Filling in the______](https://arxiv.org/abs/1801.07736) | liamb315, a-dai | -| [namignizer](namignizer)| Namignizer | knathanieltucker | -| [neural_gpu](neural_gpu)| [Neural GPUs Learn Algorithms](https://arxiv.org/abs/1511.08228) | lukaszkaiser | -| [neural_programmer](neural_programmer) | [Learning a Natural Language Interface with Neural Programmer](https://arxiv.org/abs/1611.08945) | arvind2505 | -| [next_frame_prediction](next_frame_prediction) | [Visual Dynamics](https://arxiv.org/abs/1607.02586) | panyx0718 | -| [ptn](ptn) | [Perspective Transformer Nets](https://arxiv.org/abs/1612.00814) | xcyan, arkanath, hellojas, honglaklee | -| [qa_kg](qa_kg) | [Learning to Reason](https://arxiv.org/abs/1704.05526) | yuyuz | -| [real_nvp](real_nvp) | [Density estimation using Real NVP](https://arxiv.org/abs/1605.08803) | laurent-dinh | -| [sentiment_analysis](sentiment_analysis)| [Effective Use of Word Order for Text Categorization with Convolutional Neural Networks](https://arxiv.org/abs/1412.1058) | sculd | -| [seq2species](seq2species) | [Seq2Species: A deep learning approach to pattern recognition for short DNA sequences](https://doi.org/10.1101/353474) | apbusia, depristo | -| [skip_thoughts](skip_thoughts) | [Skip-Thought Vectors](https://arxiv.org/abs/1506.06726) | cshallue | -| [steve](steve) | [Sample-Efficient Reinforcement Learning with Stochastic Ensemble Value Expansion](https://arxiv.org/abs/1807.01675) | buckman-google | -| [street](street) | [End-to-End Interpretation of the French Street Name Signs Dataset](https://arxiv.org/abs/1702.03970) | theraysmith | -| [struct2depth](struct2depth)| [Depth Prediction Without the Sensors: Leveraging Structure for Unsupervised Learning from Monocular Videos](https://arxiv.org/abs/1811.06152) | aneliaangelova | -| [swivel](swivel) | [Swivel: Improving Embeddings by Noticing What's Missing](https://arxiv.org/abs/1602.02215) | waterson | -| [tcn](tcn) | [Time-Contrastive Networks: Self-Supervised Learning from Video](https://arxiv.org/abs/1704.06888) | coreylynch, sermanet | -| [textsum](textsum)| [A Neural Attention Model for Abstractive Sentence Summarization](https://arxiv.org/abs/1509.00685) | panyx0718, peterjliu | -| [transformer](transformer) | [Spatial Transformer Network](https://arxiv.org/abs/1506.02025) | daviddao| -| [video_prediction](video_prediction) | [Unsupervised Learning for Physical Interaction through Video Prediction](https://arxiv.org/abs/1605.07157) | cbfinn | +| Directory | Paper(s) | Conference | Maintainer(s) | +|-----------|----------|------------|---------------| +| [adv_imagenet_models](adv_imagenet_models) | [1] [Adversarial Machine Learning at Scale](https://arxiv.org/abs/1611.01236)
    [2] [Ensemble Adversarial Training: Attacks and Defenses](https://arxiv.org/abs/1705.07204) | [1] ICLR 2017
    [2] ICLR 2018 | alexeykurakin | +| [adversarial_crypto](adversarial_crypto) | [Learning to Protect Communications with Adversarial Neural Cryptography](https://arxiv.org/abs/1610.06918) | | dave-andersen | +| [adversarial_logit_pairing](adversarial_logit_pairing) | [Adversarial Logit Pairing](https://arxiv.org/abs/1803.06373) | | alexeykurakin | +| [autoencoder](autoencoder) | Various autoencoders | | snurkabill | +| [brain_coder](brain_coder) | [Neural Program Synthesis with Priority Queue Training](https://arxiv.org/abs/1801.03526) | | danabo, mnorouzi | +| [cognitive_mapping_and_planning](cognitive_mapping_and_planning) | [Cognitive Mapping and Planning for Visual Navigation](https://arxiv.org/abs/1702.03920) | CVPR 2017 | s-gupta | +| [compression](compression) | [Full Resolution Image Compression with Recurrent Neural Networks](https://arxiv.org/abs/1608.05148) | CVPR 2017 | nmjohn | +| [deep_contextual_bandits](deep_contextual_bandits) | [Deep Bayesian Bandits Showdown: An Empirical Comparison of Bayesian Deep Networks for Thompson Sampling](https://arxiv.org/abs/1802.09127) | ICLR 2018 | rikel | +| [deep_speech](deep_speech) | [Deep Speech 2](https://arxiv.org/abs/1512.02595) | ICLR 2016 | yhliang2018 | +| [domain_adaptation](domain_adaptation) | [1] [Domain Separation Networks](https://arxiv.org/abs/1608.06019)
    [2] [Unsupervised Pixel-Level Domain Adaptation with Generative Adversarial Networks](https://arxiv.org/abs/1612.05424) | NIPS 2016 | bousmalis, dmrd | +| [feelvos](feelvos)| [FEELVOS](https://arxiv.org/abs/1902.09513) | CVPR 2019 | pvoigtlaender, yuningchai, aquariusjay | +| [fivo](fivo)| [Filtering variational objectives for training generative sequence models](https://arxiv.org/abs/1705.09279) | NIPS 2017 | dieterichlawson | +| [global_objectives](global_objectives) | [Scalable Learning of Non-Decomposable Objectives](https://arxiv.org/abs/1608.04802) | AISTATS 2017 | mackeya-google | +| [im2txt](im2txt) | [Show and Tell: Lessons learned from the 2015 MSCOCO Image Captioning Challenge](https://arxiv.org/abs/1609.06647) | TPAMI 2016 | cshallue | +| [inception](inception) | [Rethinking the Inception Architecture for Computer Vision](https://arxiv.org/abs/1512.00567) | CVPR 2016 | shlens, vincentvanhoucke | +| [keypointnet](keypointnet) | [KeypointNet](https://arxiv.org/abs/1807.03146) | | mnorouzi | +| [learned_optimizer](learned_optimizer) | [Learned Optimizers that Scale and Generalize](https://arxiv.org/abs/1703.04813) | ICML 2017 | olganw, nirum | +| [learning_to_remember_rare_events](learning_to_remember_rare_events) | [Learning to Remember Rare Events](https://arxiv.org/abs/1703.03129) | ICLR 2017| lukaszkaiser, ofirnachum | +| [learning_unsupervised_learning](learning_unsupervised_learning) | [Meta-Learning Update Rules for Unsupervised Representation Learning](https://arxiv.org/abs/1804.00222) | ICLR 2019 | lukemetz, nirum | +| [lexnet_nc](lexnet_nc) | [Olive Oil is Made of Olives, Baby Oil is Made for Babies: Interpreting Noun Compounds using Paraphrases in a Neural Model](https://arxiv.org/abs/1803.08073) | NAACL 2018 | vered1986, waterson | +| [lm_1b](lm_1b) | [Exploring the Limits of Language Modeling](https://arxiv.org/abs/1602.02410) | | oriolvinyals, panyx0718 | +| [lm_commonsense](lm_commonsense) | [A Simple Method for Commonsense Reasoning](https://arxiv.org/abs/1806.02847) | | thtrieu | +| [maskgan](maskgan)| [MaskGAN: Better Text Generation via Filling in the](https://arxiv.org/abs/1801.07736) | ICLR 2018 | liamb315, a-dai | +| [namignizer](namignizer)| Namignizer | | knathanieltucker | +| [neural_gpu](neural_gpu)| [Neural GPUs Learn Algorithms](https://arxiv.org/abs/1511.08228) | | lukaszkaiser | +| [neural_programmer](neural_programmer) | [Learning a Natural Language Interface with Neural Programmer](https://arxiv.org/abs/1611.08945) | ICLR 2017 | arvind2505 | +| [next_frame_prediction](next_frame_prediction) | [Visual Dynamics: Probabilistic Future Frame Synthesis via Cross Convolutional Networks](https://arxiv.org/abs/1607.02586) | NIPS 2016 | panyx0718 | +| [ptn](ptn) | [Perspective Transformer Nets: Learning Single-View 3D Object Reconstruction without 3D Supervision](https://arxiv.org/abs/1612.00814) | NIPS 2016 | xcyan, arkanath, hellojas, honglaklee | +| [qa_kg](qa_kg) | [Learning to Reason: End-to-End Module Networks for Visual Question Answering](https://arxiv.org/abs/1704.05526) | ICCV 2017 | yuyuz | +| [real_nvp](real_nvp) | [Density estimation using Real NVP](https://arxiv.org/abs/1605.08803) | ICLR 2017 | laurent-dinh | +| [sentiment_analysis](sentiment_analysis)| [Effective Use of Word Order for Text Categorization with Convolutional Neural Networks](https://arxiv.org/abs/1412.1058) | NAACL HLT 2015 | sculd | +| [seq2species](seq2species) | [Seq2Species: A deep learning approach to pattern recognition for short DNA sequences](https://doi.org/10.1101/353474) | | apbusia, depristo | +| [skip_thoughts](skip_thoughts) | [Skip-Thought Vectors](https://arxiv.org/abs/1506.06726) | | cshallue | +| [steve](steve) | [Sample-Efficient Reinforcement Learning with Stochastic Ensemble Value Expansion](https://arxiv.org/abs/1807.01675) | NeurIPS 2018 | buckman-google | +| [street](street) | [End-to-End Interpretation of the French Street Name Signs Dataset](https://arxiv.org/abs/1702.03970) | ECCV 2016 | theraysmith | +| [struct2depth](struct2depth)| [Depth Prediction Without the Sensors: Leveraging Structure for Unsupervised Learning from Monocular Videos](https://arxiv.org/abs/1811.06152) | AAAI 2019 | aneliaangelova | +| [swivel](swivel) | [Swivel: Improving Embeddings by Noticing What's Missing](https://arxiv.org/abs/1602.02215) | | waterson | +| [tcn](tcn) | [Time-Contrastive Networks: Self-Supervised Learning from Video](https://arxiv.org/abs/1704.06888) | ICRA 2018 | coreylynch, sermanet | +| [textsum](textsum)| [A Neural Attention Model for Abstractive Sentence Summarization](https://arxiv.org/abs/1509.00685) | EMNLP 2015 | panyx0718, peterjliu | +| [transformer](transformer) | [Spatial Transformer Network](https://arxiv.org/abs/1506.02025) | NIPS 2015 | daviddao| +| [video_prediction](video_prediction) | [Unsupervised Learning for Physical Interaction through Video Prediction](https://arxiv.org/abs/1605.07157) | NIPS 2016 | cbfinn | --- ## Contributions -If you want to contribute, please review the [contribution guidelines](../../../wiki/How-to-contribute). +If you want to contribute, please review the [contribution guidelines](https://github.com/tensorflow/models/wiki/How-to-contribute). -- GitLab From eb355ec0801fe59600c53e2302c3e1b81ba7e9c0 Mon Sep 17 00:00:00 2001 From: Jaeyoun Kim Date: Sun, 21 Jun 2020 13:02:39 -0700 Subject: [PATCH 39/79] Update README.md Update links --- community/README.md | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/community/README.md b/community/README.md index eea11fc2b..c399c31ef 100644 --- a/community/README.md +++ b/community/README.md @@ -6,13 +6,12 @@ This repository provides a curated list of the GitHub repositories with machine **Note**: Contributing companies or individuals are responsible for maintaining their repositories. -## Models / Implementations +## Computer Vision -### Computer Vision +### Image Recognition -#### Image Recognition -| Model | Reference (Paper) | Features | Maintainer | -|-------|-------------------|----------|------------| +| Model | Paper | Features | Maintainer | +|-------|-------|----------|------------| | [DenseNet 169](https://github.com/IntelAI/models/tree/master/benchmarks/image_recognition/tensorflow/densenet169) | [Densely Connected Convolutional Networks](https://arxiv.org/pdf/1608.06993) | • FP32 Inference | [Intel](https://github.com/IntelAI) | | [Inception V3](https://github.com/IntelAI/models/tree/master/benchmarks/image_recognition/tensorflow/inceptionv3) | [Rethinking the Inception Architecture
    for Computer Vision](https://arxiv.org/pdf/1512.00567.pdf) | • Int8 Inference
    • FP32 Inference | [Intel](https://github.com/IntelAI) | | [Inception V4](https://github.com/IntelAI/models/tree/master/benchmarks/image_recognition/tensorflow/inceptionv4) | [Inception-v4, Inception-ResNet and the Impact
    of Residual Connections on Learning](https://arxiv.org/pdf/1602.07261) | • Int8 Inference
    • FP32 Inference | [Intel](https://github.com/IntelAI) | @@ -21,12 +20,13 @@ This repository provides a curated list of the GitHub repositories with machine | [ResNet 50](https://github.com/IntelAI/models/tree/master/benchmarks/image_recognition/tensorflow/resnet50) | [Deep Residual Learning for Image Recognition](https://arxiv.org/pdf/1512.03385) | • Int8 Inference
    • FP32 Inference | [Intel](https://github.com/IntelAI) | | [ResNet 50v1.5](https://github.com/IntelAI/models/tree/master/benchmarks/image_recognition/tensorflow/resnet50v1_5) | [Deep Residual Learning for Image Recognition](https://arxiv.org/pdf/1512.03385) | • Int8 Inference
    • FP32 Inference
    • FP32 Training | [Intel](https://github.com/IntelAI) | -#### Segmentation -| Model | Reference (Paper) |       Features       | Maintainer | -|-------|-------------------|----------|------------| +### Segmentation + +| Model | Paper | Features | Maintainer | +|-------|-------|----------|------------| | [Mask R-CNN](https://github.com/NVIDIA/DeepLearningExamples/tree/master/TensorFlow2/Segmentation/MaskRCNN) | [Mask R-CNN](https://arxiv.org/abs/1703.06870) | • Automatic Mixed Precision
    • Multi-GPU training support with Horovod
    • TensorRT | [NVIDIA](https://github.com/NVIDIA) | | [U-Net Medical Image Segmentation](https://github.com/NVIDIA/DeepLearningExamples/tree/master/TensorFlow2/Segmentation/UNet_Medical) | [U-Net: Convolutional Networks for Biomedical Image Segmentation](https://arxiv.org/abs/1505.04597) | • Automatic Mixed Precision
    • Multi-GPU training support with Horovod
    • TensorRT | [NVIDIA](https://github.com/NVIDIA) | ## Contributions -If you want to contribute, please review the [contribution guidelines](../../../wiki/How-to-contribute). +If you want to contribute, please review the [contribution guidelines](https://github.com/tensorflow/models/wiki/How-to-contribute). -- GitLab From 4b0cec67221923d05d854631a221bd3dc4606664 Mon Sep 17 00:00:00 2001 From: Hongkun Yu Date: Sun, 21 Jun 2020 14:04:22 -0700 Subject: [PATCH 40/79] Internal change PiperOrigin-RevId: 317560325 --- official/modeling/tf_utils.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/official/modeling/tf_utils.py b/official/modeling/tf_utils.py index 34f8f66e7..279208239 100644 --- a/official/modeling/tf_utils.py +++ b/official/modeling/tf_utils.py @@ -173,3 +173,18 @@ def assert_rank(tensor, expected_rank, name=None): "For the tensor `%s`, the actual tensor rank `%d` (shape = %s) is not " "equal to the expected tensor rank `%s`" % (name, actual_rank, str(tensor.shape), str(expected_rank))) + + +def safe_mean(losses): + """Computes a safe mean of the losses. + + Args: + losses: `Tensor` whose elements contain individual loss measurements. + + Returns: + A scalar representing the mean of `losses`. If `num_present` is zero, + then zero is returned. + """ + total = tf.reduce_sum(losses) + num_elements = tf.cast(tf.size(losses), dtype=losses.dtype) + return tf.math.divide_no_nan(total, num_elements) -- GitLab From 8aa44501abd01cc95bcddd27fd686aa52ce138c2 Mon Sep 17 00:00:00 2001 From: Hongkun Yu Date: Sun, 21 Jun 2020 23:04:57 -0700 Subject: [PATCH 41/79] Internal change PiperOrigin-RevId: 317596394 --- official/nlp/modeling/layers/README.md | 8 +- official/nlp/modeling/layers/__init__.py | 2 +- official/nlp/modeling/layers/transformer.py | 108 +++++++++++++----- .../nlp/modeling/layers/transformer_test.py | 6 +- official/nlp/nhnet/decoder.py | 8 +- 5 files changed, 95 insertions(+), 37 deletions(-) diff --git a/official/nlp/modeling/layers/README.md b/official/nlp/modeling/layers/README.md index e78818732..42f299a3f 100644 --- a/official/nlp/modeling/layers/README.md +++ b/official/nlp/modeling/layers/README.md @@ -28,6 +28,10 @@ assemble new layers, networks, or models. described in ["Attention Is All You Need"](https://arxiv.org/abs/1706.03762). +* [TransformerDecoderLayer](transformer.py) TransformerDecoderLayer is made up + of self multi-head attention, cross multi-head attention and + feedforward network. + * [ReZeroTransformer](rezero_transformer.py) implements Transformer with ReZero described in ["ReZero is All You Need: Fast Convergence at Large Depth"](https://arxiv.org/abs/2003.04887). @@ -49,8 +53,8 @@ assemble new layers, networks, or models. should be masked), the output will have masked positions set to approximately zero. -* [`MaskedLM`](masked_lm.py) implements a masked language model. It assumes the - embedding table variable is passed to it. +* [`MaskedLM`](masked_lm.py) implements a masked language model. It assumes + the embedding table variable is passed to it. * [ClassificationHead](cls_head.py) A pooling head over a sequence of embeddings, commonly used by classification tasks. diff --git a/official/nlp/modeling/layers/__init__.py b/official/nlp/modeling/layers/__init__.py index 08b6d596a..2cd8e7b9e 100644 --- a/official/nlp/modeling/layers/__init__.py +++ b/official/nlp/modeling/layers/__init__.py @@ -26,5 +26,5 @@ from official.nlp.modeling.layers.position_embedding import PositionEmbedding from official.nlp.modeling.layers.rezero_transformer import ReZeroTransformer from official.nlp.modeling.layers.self_attention_mask import SelfAttentionMask from official.nlp.modeling.layers.talking_heads_attention import TalkingHeadsAttention -from official.nlp.modeling.layers.transformer import Transformer +from official.nlp.modeling.layers.transformer import * from official.nlp.modeling.layers.transformer_scaffold import TransformerScaffold diff --git a/official/nlp/modeling/layers/transformer.py b/official/nlp/modeling/layers/transformer.py index 104ebaef0..92f509cf2 100644 --- a/official/nlp/modeling/layers/transformer.py +++ b/official/nlp/modeling/layers/transformer.py @@ -79,6 +79,7 @@ class Transformer(tf.keras.layers.Layer): self._bias_initializer = tf.keras.initializers.get(bias_initializer) self._kernel_regularizer = tf.keras.regularizers.get(kernel_regularizer) self._bias_regularizer = tf.keras.regularizers.get(bias_regularizer) + self._activity_regularizer = tf.keras.regularizers.get(activity_regularizer) self._kernel_constraint = tf.keras.constraints.get(kernel_constraint) self._bias_constraint = tf.keras.constraints.get(bias_constraint) @@ -247,57 +248,96 @@ class TransformerDecoderLayer(tf.keras.layers.Layer): (1) a multi-head self-attention mechanism. (2) a encoder-decoder attention. (3) a positionwise fully connected feed-forward network. + + Arguments: + num_attention_heads: Number of attention heads. + intermediate_size: Size of the intermediate layer. + intermediate_activation: Activation for the intermediate layer. + dropout_rate: Dropout probability for the post-attention and output dropout. + attention_dropout_rate: Dropout probability for within the attention layer. + multi_channel_cross_attention: Whether to use `MultiChannelAttention` for + cross-attention between target sequences and source sequences. + kernel_initializer: Initializer for dense layer kernels. + bias_initializer: Initializer for dense layer biases. + kernel_regularizer: Regularizer for dense layer kernels. + bias_regularizer: Regularizer for dense layer biases. + activity_regularizer: Regularizer for dense layer activity. + kernel_constraint: Constraint for dense layer kernels. + bias_constraint: Constraint for dense layer kernels. """ def __init__(self, - hidden_size=768, - num_attention_heads=12, - intermediate_size=3072, - intermediate_activation="relu", - hidden_dropout_prob=0.0, - attention_probs_dropout_prob=0.0, - initializer_range=0.02, + num_attention_heads, + intermediate_size, + intermediate_activation, + dropout_rate=0.0, + attention_dropout_rate=0.0, multi_channel_cross_attention=False, + kernel_initializer="glorot_uniform", + bias_initializer="zeros", + kernel_regularizer=None, + bias_regularizer=None, + activity_regularizer=None, + kernel_constraint=None, + bias_constraint=None, **kwargs): super(TransformerDecoderLayer, self).__init__(**kwargs) - self.hidden_size = hidden_size self.num_attention_heads = num_attention_heads self.intermediate_size = intermediate_size self.intermediate_activation = tf.keras.activations.get( intermediate_activation) - self.hidden_dropout_prob = hidden_dropout_prob - self.attention_probs_dropout_prob = attention_probs_dropout_prob + self.dropout_rate = dropout_rate + self.attention_dropout_rate = attention_dropout_rate self.multi_channel_cross_attention = multi_channel_cross_attention - self._kernel_initializer = tf.keras.initializers.TruncatedNormal( - stddev=initializer_range) - self._bias_initializer = tf.keras.initializers.get("zeros") + self._kernel_initializer = tf.keras.initializers.get(kernel_initializer) + self._bias_initializer = tf.keras.initializers.get(bias_initializer) + self._kernel_regularizer = tf.keras.regularizers.get(kernel_regularizer) + self._bias_regularizer = tf.keras.regularizers.get(bias_regularizer) + self._activity_regularizer = tf.keras.regularizers.get(activity_regularizer) + self._kernel_constraint = tf.keras.constraints.get(kernel_constraint) + self._bias_constraint = tf.keras.constraints.get(bias_constraint) if self.multi_channel_cross_attention: self._cross_attention_cls = multi_channel_attention.MultiChannelAttention else: self._cross_attention_cls = attention.MultiHeadAttention - if self.hidden_size % self.num_attention_heads != 0: + def build(self, input_shape): + target_tensor_shape = tf.TensorShape(input_shape[0]) + if len(target_tensor_shape) != 3: + raise ValueError("TransformerLayer expects a three-dimensional input of " + "shape [batch, sequence, width].") + hidden_size = target_tensor_shape[2] + if hidden_size % self.num_attention_heads != 0: raise ValueError( "The hidden size (%d) is not a multiple of the number of attention " - "heads (%d)" % (self.hidden_size, self.num_attention_heads)) - self.attention_head_size = int(self.hidden_size / self.num_attention_heads) - - def build(self, input_shape): + "heads (%d)" % (hidden_size, self.num_attention_heads)) + self.attention_head_size = int(hidden_size / self.num_attention_heads) # Self attention. self.self_attention = attention.CachedAttention( num_heads=self.num_attention_heads, key_size=self.attention_head_size, - dropout=self.attention_probs_dropout_prob, + dropout=self.attention_dropout_rate, kernel_initializer=self._kernel_initializer, + bias_initializer=self._bias_initializer, + kernel_regularizer=self._kernel_regularizer, + bias_regularizer=self._bias_regularizer, + activity_regularizer=self._activity_regularizer, + kernel_constraint=self._kernel_constraint, + bias_constraint=self._bias_constraint, name="self_attention") self.self_attention_output_dense = dense_einsum.DenseEinsum( - output_shape=self.hidden_size, + output_shape=hidden_size, num_summed_dimensions=2, kernel_initializer=self._kernel_initializer, bias_initializer=self._bias_initializer, + kernel_regularizer=self._kernel_regularizer, + bias_regularizer=self._bias_regularizer, + activity_regularizer=self._activity_regularizer, + kernel_constraint=self._kernel_constraint, + bias_constraint=self._bias_constraint, name="self_attention_output") self.self_attention_dropout = tf.keras.layers.Dropout( - rate=self.hidden_dropout_prob) + rate=self.dropout_rate) self.self_attention_layer_norm = ( tf.keras.layers.LayerNormalization( name="self_attention_layer_norm", axis=-1, epsilon=1e-12)) @@ -305,13 +345,19 @@ class TransformerDecoderLayer(tf.keras.layers.Layer): self.encdec_attention = self._cross_attention_cls( num_heads=self.num_attention_heads, key_size=self.attention_head_size, - dropout=self.attention_probs_dropout_prob, - output_shape=self.hidden_size, + dropout=self.attention_dropout_rate, + output_shape=hidden_size, kernel_initializer=self._kernel_initializer, + bias_initializer=self._bias_initializer, + kernel_regularizer=self._kernel_regularizer, + bias_regularizer=self._bias_regularizer, + activity_regularizer=self._activity_regularizer, + kernel_constraint=self._kernel_constraint, + bias_constraint=self._bias_constraint, name="attention/encdec") self.encdec_attention_dropout = tf.keras.layers.Dropout( - rate=self.hidden_dropout_prob) + rate=self.dropout_rate) self.encdec_attention_layer_norm = ( tf.keras.layers.LayerNormalization( name="attention/encdec_output_layer_norm", axis=-1, epsilon=1e-12)) @@ -322,15 +368,25 @@ class TransformerDecoderLayer(tf.keras.layers.Layer): activation=None, kernel_initializer=self._kernel_initializer, bias_initializer=self._bias_initializer, + kernel_regularizer=self._kernel_regularizer, + bias_regularizer=self._bias_regularizer, + activity_regularizer=self._activity_regularizer, + kernel_constraint=self._kernel_constraint, + bias_constraint=self._bias_constraint, name="intermediate") self.intermediate_activation_layer = tf.keras.layers.Activation( self.intermediate_activation) self.output_dense = dense_einsum.DenseEinsum( - output_shape=self.hidden_size, + output_shape=hidden_size, kernel_initializer=self._kernel_initializer, bias_initializer=self._bias_initializer, + kernel_regularizer=self._kernel_regularizer, + bias_regularizer=self._bias_regularizer, + activity_regularizer=self._activity_regularizer, + kernel_constraint=self._kernel_constraint, + bias_constraint=self._bias_constraint, name="output") - self.output_dropout = tf.keras.layers.Dropout(rate=self.hidden_dropout_prob) + self.output_dropout = tf.keras.layers.Dropout(rate=self.dropout_rate) self.output_layer_norm = tf.keras.layers.LayerNormalization( name="output_layer_norm", axis=-1, epsilon=1e-12) super(TransformerDecoderLayer, self).build(input_shape) diff --git a/official/nlp/modeling/layers/transformer_test.py b/official/nlp/modeling/layers/transformer_test.py index 1b494ac87..841feb994 100644 --- a/official/nlp/modeling/layers/transformer_test.py +++ b/official/nlp/modeling/layers/transformer_test.py @@ -233,13 +233,11 @@ class TransformerDecoderLayerTest(keras_parameterized.TestCase): num_attention_heads = 2 hidden_size = 16 decoder_block = transformer.TransformerDecoderLayer( - hidden_size=hidden_size, num_attention_heads=num_attention_heads, intermediate_size=32, intermediate_activation='relu', - hidden_dropout_prob=0.1, - attention_probs_dropout_prob=0.1, - initializer_range=0.1) + dropout_rate=0.1, + attention_dropout_rate=0.1) # Forward path. dummy_tensor = tf.zeros([2, 4, 16], dtype=tf.float32) dummy_mask = tf.zeros([2, 4, 4], dtype=tf.float32) diff --git a/official/nlp/nhnet/decoder.py b/official/nlp/nhnet/decoder.py index c9f676dba..b38fa2a6b 100644 --- a/official/nlp/nhnet/decoder.py +++ b/official/nlp/nhnet/decoder.py @@ -60,13 +60,13 @@ class TransformerDecoder(tf.keras.layers.Layer): for i in range(self.num_hidden_layers): self.layers.append( transformer.TransformerDecoderLayer( - hidden_size=self.hidden_size, num_attention_heads=self.num_attention_heads, intermediate_size=self.intermediate_size, intermediate_activation=self.intermediate_activation, - hidden_dropout_prob=self.hidden_dropout_prob, - attention_probs_dropout_prob=self.attention_probs_dropout_prob, - initializer_range=self.initializer_range, + dropout_rate=self.hidden_dropout_prob, + attention_dropout_rate=self.attention_probs_dropout_prob, + kernel_initializer=tf.keras.initializers.TruncatedNormal( + stddev=self.initializer_range), multi_channel_cross_attention=self.multi_channel_cross_attention, name=("layer_%d" % i))) super(TransformerDecoder, self).build(unused_input_shapes) -- GitLab From 1357ce1904eb522f7fbfa46f7edcf091162287b7 Mon Sep 17 00:00:00 2001 From: Jeremiah Harmsen Date: Mon, 22 Jun 2020 05:56:41 -0700 Subject: [PATCH 42/79] Internal change PiperOrigin-RevId: 317638173 --- official/nlp/bert/bert_models.py | 28 ++++++++++++++++------------ 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/official/nlp/bert/bert_models.py b/official/nlp/bert/bert_models.py index e26c2a0ca..9d16150d0 100644 --- a/official/nlp/bert/bert_models.py +++ b/official/nlp/bert/bert_models.py @@ -25,7 +25,6 @@ import tensorflow_hub as hub from official.modeling import tf_utils from official.nlp.albert import configs as albert_configs from official.nlp.bert import configs -from official.nlp.modeling import losses from official.nlp.modeling import models from official.nlp.modeling import networks @@ -67,22 +66,27 @@ class BertPretrainLossAndMetricLayer(tf.keras.layers.Layer): next_sentence_loss, name='next_sentence_loss', aggregation='mean') def call(self, - lm_output, - sentence_output, + lm_output_logits, + sentence_output_logits, lm_label_ids, lm_label_weights, sentence_labels=None): """Implements call() for the layer.""" lm_label_weights = tf.cast(lm_label_weights, tf.float32) - lm_output = tf.cast(lm_output, tf.float32) + lm_output_logits = tf.cast(lm_output_logits, tf.float32) - mask_label_loss = losses.weighted_sparse_categorical_crossentropy_loss( - labels=lm_label_ids, predictions=lm_output, weights=lm_label_weights) + lm_prediction_losses = tf.keras.losses.sparse_categorical_crossentropy( + lm_label_ids, lm_output_logits, from_logits=True) + lm_numerator_loss = tf.reduce_sum(lm_prediction_losses * lm_label_weights) + lm_denominator_loss = tf.reduce_sum(lm_label_weights) + mask_label_loss = tf.math.divide_no_nan(lm_numerator_loss, + lm_denominator_loss) if sentence_labels is not None: - sentence_output = tf.cast(sentence_output, tf.float32) - sentence_loss = losses.weighted_sparse_categorical_crossentropy_loss( - labels=sentence_labels, predictions=sentence_output) + sentence_output_logits = tf.cast(sentence_output_logits, tf.float32) + sentence_loss = tf.keras.losses.sparse_categorical_crossentropy( + sentence_labels, sentence_output_logits, from_logits=True) + sentence_loss = tf.reduce_mean(sentence_loss) loss = mask_label_loss + sentence_loss else: sentence_loss = None @@ -92,8 +96,8 @@ class BertPretrainLossAndMetricLayer(tf.keras.layers.Layer): # TODO(hongkuny): Avoids the hack and switches add_loss. final_loss = tf.fill(batch_shape, loss) - self._add_metrics(lm_output, lm_label_ids, lm_label_weights, - mask_label_loss, sentence_output, sentence_labels, + self._add_metrics(lm_output_logits, lm_label_ids, lm_label_weights, + mask_label_loss, sentence_output_logits, sentence_labels, sentence_loss) return final_loss @@ -228,7 +232,7 @@ def pretrain_model(bert_config, activation=tf_utils.get_activation(bert_config.hidden_act), num_token_predictions=max_predictions_per_seq, initializer=initializer, - output='predictions') + output='logits') outputs = pretrainer_model( [input_word_ids, input_mask, input_type_ids, masked_lm_positions]) -- GitLab From 28a18f7febaf7768af8fdd64760f2269cbf9b8b5 Mon Sep 17 00:00:00 2001 From: freezestudio Date: Mon, 22 Jun 2020 22:26:44 +0800 Subject: [PATCH 43/79] Update maskrcnn_parser.py Fix a small spelling error --- official/vision/detection/dataloader/maskrcnn_parser.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/official/vision/detection/dataloader/maskrcnn_parser.py b/official/vision/detection/dataloader/maskrcnn_parser.py index 933e1b75c..35db6f147 100644 --- a/official/vision/detection/dataloader/maskrcnn_parser.py +++ b/official/vision/detection/dataloader/maskrcnn_parser.py @@ -185,12 +185,12 @@ class Parser(object): is_crowds = data['groundtruth_is_crowd'] # Skips annotations with `is_crowd` = True. if self._skip_crowd_during_training and self._is_training: - num_groundtrtuhs = tf.shape(classes)[0] - with tf.control_dependencies([num_groundtrtuhs, is_crowds]): + num_groundtruths = tf.shape(classes)[0] + with tf.control_dependencies([num_groundtruths, is_crowds]): indices = tf.cond( tf.greater(tf.size(is_crowds), 0), lambda: tf.where(tf.logical_not(is_crowds))[:, 0], - lambda: tf.cast(tf.range(num_groundtrtuhs), tf.int64)) + lambda: tf.cast(tf.range(num_groundtruths), tf.int64)) classes = tf.gather(classes, indices) boxes = tf.gather(boxes, indices) if self._include_mask: -- GitLab From d4f5c193d638df1e6547a343f8747a32c5d157b5 Mon Sep 17 00:00:00 2001 From: Chen Chen Date: Mon, 22 Jun 2020 11:05:32 -0700 Subject: [PATCH 44/79] Internal change PiperOrigin-RevId: 317691679 --- official/nlp/configs/bert.py | 19 +++ official/nlp/data/tagging_data_loader.py | 64 ++++++++ .../modeling/models/bert_token_classifier.py | 5 + official/nlp/tasks/question_answering.py | 15 +- official/nlp/tasks/sentence_prediction.py | 14 +- official/nlp/tasks/tagging.py | 147 ++++++++++++++++++ official/nlp/tasks/tagging_test.py | 125 +++++++++++++++ official/nlp/tasks/utils.py | 34 ++++ 8 files changed, 398 insertions(+), 25 deletions(-) create mode 100644 official/nlp/data/tagging_data_loader.py create mode 100644 official/nlp/tasks/tagging.py create mode 100644 official/nlp/tasks/tagging_test.py create mode 100644 official/nlp/tasks/utils.py diff --git a/official/nlp/configs/bert.py b/official/nlp/configs/bert.py index 0f6a74e31..48b83107f 100644 --- a/official/nlp/configs/bert.py +++ b/official/nlp/configs/bert.py @@ -130,3 +130,22 @@ class QADevDataConfig(cfg.DataConfig): is_training: bool = False seq_length: int = 384 drop_remainder: bool = False + + +@dataclasses.dataclass +class TaggingDataConfig(cfg.DataConfig): + """Data config for tagging (tasks/tagging).""" + input_path: str = "" + global_batch_size: int = 48 + is_training: bool = True + seq_length: int = 384 + + +@dataclasses.dataclass +class TaggingDevDataConfig(cfg.DataConfig): + """Dev Data config for tagging (tasks/tagging).""" + input_path: str = "" + global_batch_size: int = 48 + is_training: bool = False + seq_length: int = 384 + drop_remainder: bool = False diff --git a/official/nlp/data/tagging_data_loader.py b/official/nlp/data/tagging_data_loader.py new file mode 100644 index 000000000..127a5e004 --- /dev/null +++ b/official/nlp/data/tagging_data_loader.py @@ -0,0 +1,64 @@ +# Lint as: python3 +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Loads dataset for the tagging (e.g., NER/POS) task.""" +from typing import Mapping, Optional +import tensorflow as tf + +from official.core import input_reader + + +class TaggingDataLoader: + """A class to load dataset for tagging (e.g., NER and POS) task.""" + + def __init__(self, params): + self._params = params + self._seq_length = params.seq_length + + def _decode(self, record: tf.Tensor): + """Decodes a serialized tf.Example.""" + name_to_features = { + 'input_ids': tf.io.FixedLenFeature([self._seq_length], tf.int64), + 'input_mask': tf.io.FixedLenFeature([self._seq_length], tf.int64), + 'segment_ids': tf.io.FixedLenFeature([self._seq_length], tf.int64), + 'label_ids': tf.io.FixedLenFeature([self._seq_length], tf.int64), + } + example = tf.io.parse_single_example(record, name_to_features) + + # tf.Example only supports tf.int64, but the TPU only supports tf.int32. + # So cast all int64 to int32. + for name in example: + t = example[name] + if t.dtype == tf.int64: + t = tf.cast(t, tf.int32) + example[name] = t + + return example + + def _parse(self, record: Mapping[str, tf.Tensor]): + """Parses raw tensors into a dict of tensors to be consumed by the model.""" + x = { + 'input_word_ids': record['input_ids'], + 'input_mask': record['input_mask'], + 'input_type_ids': record['segment_ids'] + } + y = record['label_ids'] + return (x, y) + + def load(self, input_context: Optional[tf.distribute.InputContext] = None): + """Returns a tf.dataset.Dataset.""" + reader = input_reader.InputReader( + params=self._params, decoder_fn=self._decode, parser_fn=self._parse) + return reader.read(input_context) diff --git a/official/nlp/modeling/models/bert_token_classifier.py b/official/nlp/modeling/models/bert_token_classifier.py index 8054cb1ff..4967d7177 100644 --- a/official/nlp/modeling/models/bert_token_classifier.py +++ b/official/nlp/modeling/models/bert_token_classifier.py @@ -55,6 +55,7 @@ class BertTokenClassifier(tf.keras.Model): dropout_rate=0.1, **kwargs): self._self_setattr_tracking = False + self._network = network self._config = { 'network': network, 'num_classes': num_classes, @@ -84,6 +85,10 @@ class BertTokenClassifier(tf.keras.Model): super(BertTokenClassifier, self).__init__( inputs=inputs, outputs=predictions, **kwargs) + @property + def checkpoint_items(self): + return dict(encoder=self._network) + def get_config(self): return self._config diff --git a/official/nlp/tasks/question_answering.py b/official/nlp/tasks/question_answering.py index c6bc58d96..7b3cb7f4e 100644 --- a/official/nlp/tasks/question_answering.py +++ b/official/nlp/tasks/question_answering.py @@ -24,6 +24,7 @@ from official.modeling.hyperparams import config_definitions as cfg from official.nlp.bert import input_pipeline from official.nlp.configs import encoders from official.nlp.modeling import models +from official.nlp.tasks import utils @dataclasses.dataclass @@ -57,19 +58,7 @@ class QuestionAnsweringTask(base_task.Task): def build_model(self): if self._hub_module: - # TODO(lehou): maybe add the hub_module building logic to a util function. - input_word_ids = tf.keras.layers.Input( - shape=(None,), dtype=tf.int32, name='input_word_ids') - input_mask = tf.keras.layers.Input( - shape=(None,), dtype=tf.int32, name='input_mask') - input_type_ids = tf.keras.layers.Input( - shape=(None,), dtype=tf.int32, name='input_type_ids') - bert_model = hub.KerasLayer(self._hub_module, trainable=True) - pooled_output, sequence_output = bert_model( - [input_word_ids, input_mask, input_type_ids]) - encoder_network = tf.keras.Model( - inputs=[input_word_ids, input_mask, input_type_ids], - outputs=[sequence_output, pooled_output]) + encoder_network = utils.get_encoder_from_hub(self._hub_module) else: encoder_network = encoders.instantiate_encoder_from_cfg( self.task_config.network) diff --git a/official/nlp/tasks/sentence_prediction.py b/official/nlp/tasks/sentence_prediction.py index 4c5b57b99..b2eb0bf47 100644 --- a/official/nlp/tasks/sentence_prediction.py +++ b/official/nlp/tasks/sentence_prediction.py @@ -27,6 +27,7 @@ from official.modeling.hyperparams import config_definitions as cfg from official.nlp.configs import bert from official.nlp.data import sentence_prediction_dataloader from official.nlp.modeling import losses as loss_lib +from official.nlp.tasks import utils @dataclasses.dataclass @@ -67,18 +68,7 @@ class SentencePredictionTask(base_task.Task): def build_model(self): if self._hub_module: - input_word_ids = tf.keras.layers.Input( - shape=(None,), dtype=tf.int32, name='input_word_ids') - input_mask = tf.keras.layers.Input( - shape=(None,), dtype=tf.int32, name='input_mask') - input_type_ids = tf.keras.layers.Input( - shape=(None,), dtype=tf.int32, name='input_type_ids') - bert_model = hub.KerasLayer(self._hub_module, trainable=True) - pooled_output, sequence_output = bert_model( - [input_word_ids, input_mask, input_type_ids]) - encoder_from_hub = tf.keras.Model( - inputs=[input_word_ids, input_mask, input_type_ids], - outputs=[sequence_output, pooled_output]) + encoder_from_hub = utils.get_encoder_from_hub(self._hub_module) return bert.instantiate_bertpretrainer_from_cfg( self.task_config.network, encoder_network=encoder_from_hub) else: diff --git a/official/nlp/tasks/tagging.py b/official/nlp/tasks/tagging.py new file mode 100644 index 000000000..a1f20b136 --- /dev/null +++ b/official/nlp/tasks/tagging.py @@ -0,0 +1,147 @@ +# Lint as: python3 +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tagging (e.g., NER/POS) task.""" +import logging +import dataclasses +import tensorflow as tf +import tensorflow_hub as hub + +from official.core import base_task +from official.modeling.hyperparams import config_definitions as cfg +from official.nlp.configs import encoders +from official.nlp.data import tagging_data_loader +from official.nlp.modeling import models +from official.nlp.tasks import utils + + +@dataclasses.dataclass +class TaggingConfig(cfg.TaskConfig): + """The model config.""" + # At most one of `init_checkpoint` and `hub_module_url` can be specified. + init_checkpoint: str = '' + hub_module_url: str = '' + network: encoders.TransformerEncoderConfig = ( + encoders.TransformerEncoderConfig()) + num_classes: int = 0 + # The ignored label id will not contribute to loss. + # A word may be tokenized into multiple word_pieces tokens, and we usually + # assign the real label id for the first token of the word, and + # `ignore_label_id` for the remaining tokens. + ignore_label_id: int = 0 + train_data: cfg.DataConfig = cfg.DataConfig() + validation_data: cfg.DataConfig = cfg.DataConfig() + + +@base_task.register_task_cls(TaggingConfig) +class TaggingTask(base_task.Task): + """Task object for tagging (e.g., NER or POS).""" + + def __init__(self, params=cfg.TaskConfig): + super(TaggingTask, self).__init__(params) + if params.hub_module_url and params.init_checkpoint: + raise ValueError('At most one of `hub_module_url` and ' + '`init_checkpoint` can be specified.') + if params.num_classes == 0: + raise ValueError('TaggingConfig.num_classes cannot be 0.') + + if params.hub_module_url: + self._hub_module = hub.load(params.hub_module_url) + else: + self._hub_module = None + + def build_model(self): + if self._hub_module: + encoder_network = utils.get_encoder_from_hub(self._hub_module) + else: + encoder_network = encoders.instantiate_encoder_from_cfg( + self.task_config.network) + + return models.BertTokenClassifier( + network=encoder_network, + num_classes=self.task_config.num_classes, + initializer=tf.keras.initializers.TruncatedNormal( + stddev=self.task_config.network.initializer_range), + dropout_rate=self.task_config.network.dropout_rate, + output='logits') + + def build_losses(self, labels, model_outputs, aux_losses=None) -> tf.Tensor: + model_outputs = tf.cast(model_outputs, tf.float32) + loss = tf.keras.losses.sparse_categorical_crossentropy( + labels, model_outputs, from_logits=True) + # `ignore_label_id` will not contribute to loss. + label_weights = tf.cast( + tf.not_equal(labels, self.task_config.ignore_label_id), + dtype=tf.float32) + numerator_loss = tf.reduce_sum(loss * label_weights) + denominator_loss = tf.reduce_sum(label_weights) + loss = tf.math.divide_no_nan(numerator_loss, denominator_loss) + return loss + + def build_inputs(self, params, input_context=None): + """Returns tf.data.Dataset for sentence_prediction task.""" + if params.input_path == 'dummy': + + def dummy_data(_): + dummy_ids = tf.zeros((1, params.seq_length), dtype=tf.int32) + x = dict( + input_word_ids=dummy_ids, + input_mask=dummy_ids, + input_type_ids=dummy_ids) + y = tf.ones((1, params.seq_length), dtype=tf.int32) + return (x, y) + + dataset = tf.data.Dataset.range(1) + dataset = dataset.repeat() + dataset = dataset.map( + dummy_data, num_parallel_calls=tf.data.experimental.AUTOTUNE) + return dataset + + dataset = tagging_data_loader.TaggingDataLoader(params).load(input_context) + return dataset + + def build_metrics(self, training=None): + del training + # TODO(chendouble): evaluate using seqeval's f1/precision/recall. + return [tf.keras.metrics.SparseCategoricalAccuracy(name='accuracy')] + + def process_metrics(self, metrics, labels, model_outputs): + # `ignore_label_id` will not contribute to metrics. + sample_weight = tf.cast( + tf.not_equal(labels, self.task_config.ignore_label_id), + dtype=tf.float32) + for metric in metrics: + metric.update_state(labels, model_outputs, sample_weight) + + def process_compiled_metrics(self, compiled_metrics, labels, model_outputs): + # `ignore_label_id` will not contribute to metrics. + sample_weight = tf.cast( + tf.not_equal(labels, self.task_config.ignore_label_id), + dtype=tf.float32) + compiled_metrics.update_state(labels, model_outputs, sample_weight) + + def initialize(self, model): + """Load a pretrained checkpoint (if exists) and then train from iter 0.""" + ckpt_dir_or_file = self.task_config.init_checkpoint + if tf.io.gfile.isdir(ckpt_dir_or_file): + ckpt_dir_or_file = tf.train.latest_checkpoint(ckpt_dir_or_file) + if not ckpt_dir_or_file: + return + + ckpt = tf.train.Checkpoint(**model.checkpoint_items) + status = ckpt.restore(ckpt_dir_or_file) + status.expect_partial().assert_existing_objects_matched() + logging.info('finished loading pretrained checkpoint from %s', + ckpt_dir_or_file) diff --git a/official/nlp/tasks/tagging_test.py b/official/nlp/tasks/tagging_test.py new file mode 100644 index 000000000..6707a50a8 --- /dev/null +++ b/official/nlp/tasks/tagging_test.py @@ -0,0 +1,125 @@ +# Lint as: python3 +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for official.nlp.tasks.tagging.""" +import functools +import os +import tensorflow as tf + +from official.nlp.bert import configs +from official.nlp.bert import export_tfhub +from official.nlp.configs import bert +from official.nlp.configs import encoders +from official.nlp.tasks import tagging + + +class TaggingTest(tf.test.TestCase): + + def setUp(self): + super(TaggingTest, self).setUp() + self._encoder_config = encoders.TransformerEncoderConfig( + vocab_size=30522, num_layers=1) + self._train_data_config = bert.TaggingDataConfig( + input_path="dummy", seq_length=128, global_batch_size=1) + + def _run_task(self, config): + task = tagging.TaggingTask(config) + model = task.build_model() + metrics = task.build_metrics() + + strategy = tf.distribute.get_strategy() + dataset = strategy.experimental_distribute_datasets_from_function( + functools.partial(task.build_inputs, config.train_data)) + + iterator = iter(dataset) + optimizer = tf.keras.optimizers.SGD(lr=0.1) + task.train_step(next(iterator), model, optimizer, metrics=metrics) + task.validation_step(next(iterator), model, metrics=metrics) + + def test_task(self): + # Saves a checkpoint. + encoder = encoders.instantiate_encoder_from_cfg(self._encoder_config) + ckpt = tf.train.Checkpoint(encoder=encoder) + saved_path = ckpt.save(self.get_temp_dir()) + + config = tagging.TaggingConfig( + init_checkpoint=saved_path, + network=self._encoder_config, + train_data=self._train_data_config, + num_classes=3) + task = tagging.TaggingTask(config) + model = task.build_model() + metrics = task.build_metrics() + dataset = task.build_inputs(config.train_data) + + iterator = iter(dataset) + optimizer = tf.keras.optimizers.SGD(lr=0.1) + task.train_step(next(iterator), model, optimizer, metrics=metrics) + task.validation_step(next(iterator), model, metrics=metrics) + task.initialize(model) + + def test_task_with_fit(self): + config = tagging.TaggingConfig( + network=self._encoder_config, + train_data=self._train_data_config, + num_classes=3) + + task = tagging.TaggingTask(config) + model = task.build_model() + model = task.compile_model( + model, + optimizer=tf.keras.optimizers.SGD(lr=0.1), + train_step=task.train_step, + metrics=[tf.keras.metrics.SparseCategoricalAccuracy(name="accuracy")]) + dataset = task.build_inputs(config.train_data) + logs = model.fit(dataset, epochs=1, steps_per_epoch=2) + self.assertIn("loss", logs.history) + self.assertIn("accuracy", logs.history) + + def _export_bert_tfhub(self): + bert_config = configs.BertConfig( + vocab_size=30522, + hidden_size=16, + intermediate_size=32, + max_position_embeddings=128, + num_attention_heads=2, + num_hidden_layers=1) + _, encoder = export_tfhub.create_bert_model(bert_config) + model_checkpoint_dir = os.path.join(self.get_temp_dir(), "checkpoint") + checkpoint = tf.train.Checkpoint(model=encoder) + checkpoint.save(os.path.join(model_checkpoint_dir, "test")) + model_checkpoint_path = tf.train.latest_checkpoint(model_checkpoint_dir) + + vocab_file = os.path.join(self.get_temp_dir(), "uncased_vocab.txt") + with tf.io.gfile.GFile(vocab_file, "w") as f: + f.write("dummy content") + + hub_destination = os.path.join(self.get_temp_dir(), "hub") + export_tfhub.export_bert_tfhub(bert_config, model_checkpoint_path, + hub_destination, vocab_file) + return hub_destination + + def test_task_with_hub(self): + hub_module_url = self._export_bert_tfhub() + config = tagging.TaggingConfig( + hub_module_url=hub_module_url, + network=self._encoder_config, + num_classes=4, + train_data=self._train_data_config) + self._run_task(config) + + +if __name__ == "__main__": + tf.test.main() diff --git a/official/nlp/tasks/utils.py b/official/nlp/tasks/utils.py new file mode 100644 index 000000000..467dafe31 --- /dev/null +++ b/official/nlp/tasks/utils.py @@ -0,0 +1,34 @@ +# Lint as: python3 +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Common utils for tasks.""" +import tensorflow as tf +import tensorflow_hub as hub + + +def get_encoder_from_hub(hub_module: str) -> tf.keras.Model: + """Gets an encoder from hub.""" + input_word_ids = tf.keras.layers.Input( + shape=(None,), dtype=tf.int32, name='input_word_ids') + input_mask = tf.keras.layers.Input( + shape=(None,), dtype=tf.int32, name='input_mask') + input_type_ids = tf.keras.layers.Input( + shape=(None,), dtype=tf.int32, name='input_type_ids') + hub_layer = hub.KerasLayer(hub_module, trainable=True) + pooled_output, sequence_output = hub_layer( + [input_word_ids, input_mask, input_type_ids]) + return tf.keras.Model( + inputs=[input_word_ids, input_mask, input_type_ids], + outputs=[sequence_output, pooled_output]) -- GitLab From f9ac96187031ea9c90829ee2b4723814c68d4d64 Mon Sep 17 00:00:00 2001 From: Hongkun Yu Date: Mon, 22 Jun 2020 17:54:42 -0700 Subject: [PATCH 45/79] Remove this r1 folder from the master branch in June, 2020. PiperOrigin-RevId: 317772122 --- official/r1/README.md | 23 - official/r1/__init__.py | 0 official/r1/boosted_trees/README.md | 117 --- official/r1/boosted_trees/__init__.py | 0 official/r1/boosted_trees/data_download.py | 97 -- official/r1/boosted_trees/train_higgs.py | 295 ------- official/r1/mnist/README.md | 91 -- official/r1/mnist/__init__.py | 0 official/r1/mnist/dataset.py | 117 --- official/r1/mnist/example3.png | Bin 368 -> 0 bytes official/r1/mnist/example5.png | Bin 367 -> 0 bytes official/r1/mnist/examples.npy | Bin 12624 -> 0 bytes official/r1/mnist/mnist.py | 247 ------ official/r1/mnist/mnist_eager.py | 214 ----- official/r1/mnist/mnist_test.py | 140 --- official/r1/mnist/mnist_tpu.py | 202 ----- official/r1/ncf/README.md | 7 - official/r1/ncf/ncf_estimator_main.py | 187 ---- official/r1/resnet/README.md | 156 ---- official/r1/resnet/__init__.py | 0 .../r1/resnet/cifar10_download_and_extract.py | 63 -- official/r1/resnet/cifar10_main.py | 297 ------- official/r1/resnet/cifar10_test.py | 183 ---- official/r1/resnet/estimator_benchmark.py | 500 ----------- official/r1/resnet/imagenet_main.py | 393 --------- official/r1/resnet/imagenet_preprocessing.py | 262 ------ official/r1/resnet/imagenet_test.py | 325 ------- official/r1/resnet/resnet_model.py | 548 ------------ official/r1/resnet/resnet_run_loop.py | 831 ------------------ official/r1/transformer/README.md | 380 -------- official/r1/transformer/__init__.py | 0 official/r1/transformer/attention_layer.py | 148 ---- official/r1/transformer/dataset.py | 284 ------ official/r1/transformer/embedding_layer.py | 108 --- official/r1/transformer/ffn_layer.py | 89 -- official/r1/transformer/schedule.py | 130 --- official/r1/transformer/schedule_test.py | 84 -- official/r1/transformer/transformer.py | 417 --------- official/r1/transformer/transformer_main.py | 710 --------------- official/r1/transformer/translate.py | 237 ----- official/r1/utils/__init__.py | 0 official/r1/utils/data/__init__.py | 0 official/r1/utils/data/file_io.py | 207 ----- official/r1/utils/data/file_io_test.py | 197 ----- official/r1/utils/export.py | 49 -- official/r1/utils/export_test.py | 63 -- official/r1/utils/logs/__init__.py | 0 official/r1/utils/logs/cloud_lib.py | 34 - official/r1/utils/logs/cloud_lib_test.py | 48 - official/r1/utils/logs/guidelines.md | 58 -- official/r1/utils/logs/hooks.py | 130 --- official/r1/utils/logs/hooks_helper.py | 173 ---- official/r1/utils/logs/hooks_test.py | 159 ---- official/r1/utils/logs/logger.py | 305 ------- official/r1/utils/logs/logger_test.py | 253 ------ official/r1/utils/logs/metric_hook.py | 97 -- official/r1/utils/logs/metric_hook_test.py | 217 ----- official/r1/utils/logs/mlperf_helper.py | 192 ---- official/r1/utils/logs/mock_lib.py | 36 - official/r1/utils/tpu.py | 116 --- official/r1/utils/tpu_test.py | 108 --- official/r1/wide_deep/README.md | 102 --- official/r1/wide_deep/__init__.py | 0 official/r1/wide_deep/census_dataset.py | 205 ----- official/r1/wide_deep/census_main.py | 115 --- official/r1/wide_deep/census_test.csv | 30 - official/r1/wide_deep/census_test.py | 163 ---- official/r1/wide_deep/movielens_dataset.py | 165 ---- official/r1/wide_deep/movielens_main.py | 114 --- official/r1/wide_deep/wide_deep_run_loop.py | 133 --- 70 files changed, 11351 deletions(-) delete mode 100644 official/r1/README.md delete mode 100644 official/r1/__init__.py delete mode 100644 official/r1/boosted_trees/README.md delete mode 100644 official/r1/boosted_trees/__init__.py delete mode 100644 official/r1/boosted_trees/data_download.py delete mode 100644 official/r1/boosted_trees/train_higgs.py delete mode 100644 official/r1/mnist/README.md delete mode 100644 official/r1/mnist/__init__.py delete mode 100644 official/r1/mnist/dataset.py delete mode 100644 official/r1/mnist/example3.png delete mode 100644 official/r1/mnist/example5.png delete mode 100644 official/r1/mnist/examples.npy delete mode 100644 official/r1/mnist/mnist.py delete mode 100644 official/r1/mnist/mnist_eager.py delete mode 100644 official/r1/mnist/mnist_test.py delete mode 100644 official/r1/mnist/mnist_tpu.py delete mode 100644 official/r1/ncf/README.md delete mode 100644 official/r1/ncf/ncf_estimator_main.py delete mode 100644 official/r1/resnet/README.md delete mode 100644 official/r1/resnet/__init__.py delete mode 100644 official/r1/resnet/cifar10_download_and_extract.py delete mode 100644 official/r1/resnet/cifar10_main.py delete mode 100644 official/r1/resnet/cifar10_test.py delete mode 100644 official/r1/resnet/estimator_benchmark.py delete mode 100644 official/r1/resnet/imagenet_main.py delete mode 100644 official/r1/resnet/imagenet_preprocessing.py delete mode 100644 official/r1/resnet/imagenet_test.py delete mode 100644 official/r1/resnet/resnet_model.py delete mode 100644 official/r1/resnet/resnet_run_loop.py delete mode 100644 official/r1/transformer/README.md delete mode 100644 official/r1/transformer/__init__.py delete mode 100644 official/r1/transformer/attention_layer.py delete mode 100644 official/r1/transformer/dataset.py delete mode 100644 official/r1/transformer/embedding_layer.py delete mode 100644 official/r1/transformer/ffn_layer.py delete mode 100644 official/r1/transformer/schedule.py delete mode 100644 official/r1/transformer/schedule_test.py delete mode 100644 official/r1/transformer/transformer.py delete mode 100644 official/r1/transformer/transformer_main.py delete mode 100644 official/r1/transformer/translate.py delete mode 100644 official/r1/utils/__init__.py delete mode 100644 official/r1/utils/data/__init__.py delete mode 100644 official/r1/utils/data/file_io.py delete mode 100644 official/r1/utils/data/file_io_test.py delete mode 100644 official/r1/utils/export.py delete mode 100644 official/r1/utils/export_test.py delete mode 100644 official/r1/utils/logs/__init__.py delete mode 100644 official/r1/utils/logs/cloud_lib.py delete mode 100644 official/r1/utils/logs/cloud_lib_test.py delete mode 100644 official/r1/utils/logs/guidelines.md delete mode 100644 official/r1/utils/logs/hooks.py delete mode 100644 official/r1/utils/logs/hooks_helper.py delete mode 100644 official/r1/utils/logs/hooks_test.py delete mode 100644 official/r1/utils/logs/logger.py delete mode 100644 official/r1/utils/logs/logger_test.py delete mode 100644 official/r1/utils/logs/metric_hook.py delete mode 100644 official/r1/utils/logs/metric_hook_test.py delete mode 100644 official/r1/utils/logs/mlperf_helper.py delete mode 100644 official/r1/utils/logs/mock_lib.py delete mode 100644 official/r1/utils/tpu.py delete mode 100644 official/r1/utils/tpu_test.py delete mode 100644 official/r1/wide_deep/README.md delete mode 100644 official/r1/wide_deep/__init__.py delete mode 100644 official/r1/wide_deep/census_dataset.py delete mode 100644 official/r1/wide_deep/census_main.py delete mode 100644 official/r1/wide_deep/census_test.csv delete mode 100644 official/r1/wide_deep/census_test.py delete mode 100644 official/r1/wide_deep/movielens_dataset.py delete mode 100644 official/r1/wide_deep/movielens_main.py delete mode 100644 official/r1/wide_deep/wide_deep_run_loop.py diff --git a/official/r1/README.md b/official/r1/README.md deleted file mode 100644 index 72514177d..000000000 --- a/official/r1/README.md +++ /dev/null @@ -1,23 +0,0 @@ -![No Maintenance Intended](https://img.shields.io/badge/No%20Maintenance%20Intended-%E2%9C%95-red.svg) -![TensorFlow Requirement: 1.x](https://img.shields.io/badge/TensorFlow%20Requirement-1.x-brightgreen) -![TensorFlow 2 Not Supported](https://img.shields.io/badge/TensorFlow%202%20Not%20Supported-%E2%9C%95-red.svg) - -# Legacy Models - -The **r1** folder contains legacy model implementations developed -using TensorFlow 1.x. - -**Note: We will remove this r1 folder from the master branch in June, 2020.** - -After removal, you will still be able to access legacy models -in the previous releases. -(e.g., [v2.1.0](https://github.com/tensorflow/models/releases/tag/v2.1.0)) - -| Model | Description | Reference | -| ----- | ----------- | --------- | -| [Gradient Boosted Trees](boosted_trees) | A gradient boosted trees model to classify higgs boson process from HIGGS dataset | [Link](https://en.wikipedia.org/wiki/Gradient_boosting) | -| [MNIST](mnist) | A basic model to classify digits from the MNIST dataset | [Link](http://yann.lecun.com/exdb/mnist/) | -| [NCF](ncf) | NCF Estimator implementation | [arXiv:1708.05031](https://arxiv.org/abs/1708.05031) | -| [ResNet](resnet) | A deep residual network for image recognition | [arXiv:1512.03385](https://arxiv.org/abs/1512.03385) | -| [Transformer](transformer) | A transformer model to translate the WMT English to German dataset | [arXiv:1706.03762](https://arxiv.org/abs/1706.03762) | -| [Wide & Deep Learning](wide_deep) | A model that combines a wide linear model and deep neural network for recommender systems | [arXiv:1606.07792](https://arxiv.org/abs/1606.07792) | diff --git a/official/r1/__init__.py b/official/r1/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/official/r1/boosted_trees/README.md b/official/r1/boosted_trees/README.md deleted file mode 100644 index 56c40aa50..000000000 --- a/official/r1/boosted_trees/README.md +++ /dev/null @@ -1,117 +0,0 @@ -![No Maintenance Intended](https://img.shields.io/badge/No%20Maintenance%20Intended-%E2%9C%95-red.svg) -![TensorFlow Requirement: 1.x](https://img.shields.io/badge/TensorFlow%20Requirement-1.x-brightgreen) -![TensorFlow 2 Not Supported](https://img.shields.io/badge/TensorFlow%202%20Not%20Supported-%E2%9C%95-red.svg) - -# Classifying Higgs boson processes in the HIGGS Data Set - -## Overview -The [HIGGS Data Set](https://archive.ics.uci.edu/ml/datasets/HIGGS) contains 11 million samples with 28 features, and is for the classification problem to distinguish between a signal process which produces Higgs bosons and a background process which does not. - -We use Gradient Boosted Trees algorithm to distinguish the two classes. - ---- - -The code sample uses the high level `tf.estimator.Estimator` and `tf.data.Dataset`. These APIs are great for fast iteration and quickly adapting models to your own datasets without major code overhauls. It allows you to move from single-worker training to distributed training, and makes it easy to export model binaries for prediction. Here, for further simplicity and faster execution, we use a utility function `tf.contrib.estimator.boosted_trees_classifier_train_in_memory`. This utility function is especially effective when the input is provided as in-memory data sets like numpy arrays. - -An input function for the `Estimator` typically uses `tf.data.Dataset` API, which can handle various data control like streaming, batching, transform and shuffling. However `boosted_trees_classifier_train_in_memory()` utility function requires that the entire data is provided as a single batch (i.e. without using `batch()` API). Thus in this practice, simply `Dataset.from_tensors()` is used to convert numpy arrays into structured tensors, and `Dataset.zip()` is used to put features and label together. -For further references of `Dataset`, [Read more here](https://www.tensorflow.org/guide/datasets). - -## Running the code -First make sure you've [added the models folder to your Python path](/official/#running-the-models); otherwise you may encounter an error like `ImportError: No module named official.boosted_trees`. - -### Setup -The [HIGGS Data Set](https://archive.ics.uci.edu/ml/datasets/HIGGS) that this sample uses for training is hosted by the [UC Irvine Machine Learning Repository](https://archive.ics.uci.edu/ml/datasets/). We have provided a script that downloads and cleans the necessary files. - -``` -python data_download.py -``` - -This will download a file and store the processed file under the directory designated by `--data_dir` (defaults to `/tmp/higgs_data/`). To change the target directory, set the `--data_dir` flag. The directory could be network storages that Tensorflow supports (like Google Cloud Storage, `gs:////`). -The file downloaded to the local temporary folder is about 2.8 GB, and the processed file is about 0.8 GB, so there should be enough storage to handle them. - - -### Training - -This example uses about 3 GB of RAM during training. -You can run the code locally as follows: - -``` -python train_higgs.py -``` - -The model is by default saved to `/tmp/higgs_model`, which can be changed using the `--model_dir` flag. -Note that the model_dir is cleaned up before every time training starts. - -Model parameters can be adjusted by flags, like `--n_trees`, `--max_depth`, `--learning_rate` and so on. Check out the code for details. - -The final accuracy will be around 74% and loss will be around 0.516 over the eval set, when trained with the default parameters. - -By default, the first 1 million examples among 11 millions are used for training, and the last 1 million examples are used for evaluation. -The training/evaluation data can be selected as index ranges by flags `--train_start`, `--train_count`, `--eval_start`, `--eval_count`, etc. - -### TensorBoard - -Run TensorBoard to inspect the details about the graph and training progression. - -``` -tensorboard --logdir=/tmp/higgs_model # set logdir as --model_dir set during training. -``` - -## Inference with SavedModel -You can export the model into Tensorflow [SavedModel](https://www.tensorflow.org/guide/saved_model) format by using the argument `--export_dir`: - -``` -python train_higgs.py --export_dir /tmp/higgs_boosted_trees_saved_model -``` - -After the model finishes training, use [`saved_model_cli`](https://www.tensorflow.org/guide/saved_model#cli_to_inspect_and_execute_savedmodel) to inspect and execute the SavedModel. - -Try the following commands to inspect the SavedModel: - -**Replace `${TIMESTAMP}` with the folder produced (e.g. 1524249124)** -``` -# List possible tag_sets. Only one metagraph is saved, so there will be one option. -saved_model_cli show --dir /tmp/higgs_boosted_trees_saved_model/${TIMESTAMP}/ - -# Show SignatureDefs for tag_set=serve. SignatureDefs define the outputs to show. -saved_model_cli show --dir /tmp/higgs_boosted_trees_saved_model/${TIMESTAMP}/ \ - --tag_set serve --all -``` - -### Inference -Let's use the model to predict the income group of two examples. -Note that this model exports SavedModel with the custom parsing module that accepts csv lines as features. (Each line is an example with 28 columns; be careful to not add a label column, unlike in the training data.) - -``` -saved_model_cli run --dir /tmp/boosted_trees_higgs_saved_model/${TIMESTAMP}/ \ - --tag_set serve --signature_def="predict" \ - --input_exprs='inputs=["0.869293,-0.635082,0.225690,0.327470,-0.689993,0.754202,-0.248573,-1.092064,0.0,1.374992,-0.653674,0.930349,1.107436,1.138904,-1.578198,-1.046985,0.0,0.657930,-0.010455,-0.045767,3.101961,1.353760,0.979563,0.978076,0.920005,0.721657,0.988751,0.876678", "1.595839,-0.607811,0.007075,1.818450,-0.111906,0.847550,-0.566437,1.581239,2.173076,0.755421,0.643110,1.426367,0.0,0.921661,-1.190432,-1.615589,0.0,0.651114,-0.654227,-1.274345,3.101961,0.823761,0.938191,0.971758,0.789176,0.430553,0.961357,0.957818"]' -``` - -This will print out the predicted classes and class probabilities. Something like: - -``` -Result for output key class_ids: -[[1] - [0]] -Result for output key classes: -[['1'] - ['0']] -Result for output key logistic: -[[0.6440273 ] - [0.10902369]] -Result for output key logits: -[[ 0.59288704] - [-2.1007526 ]] -Result for output key probabilities: -[[0.3559727 0.6440273] - [0.8909763 0.1090237]] -``` - -Please note that "predict" signature_def gives out different (more detailed) results than "classification" or "serving_default". - -## Additional Links - -If you are interested in distributed training, take a look at [Distributed TensorFlow](https://www.tensorflow.org/deploy/distributed). - -You can also [train models on Cloud ML Engine](https://cloud.google.com/ml-engine/docs/getting-started-training-prediction), which provides [hyperparameter tuning](https://cloud.google.com/ml-engine/docs/getting-started-training-prediction#hyperparameter_tuning) to maximize your model's results and enables [deploying your model for prediction](https://cloud.google.com/ml-engine/docs/getting-started-training-prediction#deploy_a_model_to_support_prediction). diff --git a/official/r1/boosted_trees/__init__.py b/official/r1/boosted_trees/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/official/r1/boosted_trees/data_download.py b/official/r1/boosted_trees/data_download.py deleted file mode 100644 index 1b6fc050d..000000000 --- a/official/r1/boosted_trees/data_download.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Downloads the UCI HIGGS Dataset and prepares train data. - -The details on the dataset are in https://archive.ics.uci.edu/ml/datasets/HIGGS - -It takes a while as it needs to download 2.8 GB over the network, process, then -store it into the specified location as a compressed numpy file. - -Usage: -$ python data_download.py --data_dir=/tmp/higgs_data -""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import gzip -import os -import tempfile - -# pylint: disable=g-bad-import-order -import numpy as np -import pandas as pd -from six.moves import urllib -from absl import app as absl_app -from absl import flags -import tensorflow as tf - -from official.utils.flags import core as flags_core - -URL_ROOT = "https://archive.ics.uci.edu/ml/machine-learning-databases/00280" -INPUT_FILE = "HIGGS.csv.gz" -NPZ_FILE = "HIGGS.csv.gz.npz" # numpy compressed file to contain "data" array. - - -def _download_higgs_data_and_save_npz(data_dir): - """Download higgs data and store as a numpy compressed file.""" - input_url = URL_ROOT + "/" + INPUT_FILE - np_filename = os.path.join(data_dir, NPZ_FILE) - if tf.gfile.Exists(np_filename): - raise ValueError("data_dir already has the processed data file: {}".format( - np_filename)) - if not tf.gfile.Exists(data_dir): - tf.gfile.MkDir(data_dir) - # 2.8 GB to download. - try: - tf.logging.info("Data downloading...") - temp_filename, _ = urllib.request.urlretrieve(input_url) - # Reading and parsing 11 million csv lines takes 2~3 minutes. - tf.logging.info("Data processing... taking multiple minutes...") - with gzip.open(temp_filename, "rb") as csv_file: - data = pd.read_csv( - csv_file, - dtype=np.float32, - names=["c%02d" % i for i in range(29)] # label + 28 features. - ).as_matrix() - finally: - tf.gfile.Remove(temp_filename) - - # Writing to temporary location then copy to the data_dir (0.8 GB). - f = tempfile.NamedTemporaryFile() - np.savez_compressed(f, data=data) - tf.gfile.Copy(f.name, np_filename) - tf.logging.info("Data saved to: {}".format(np_filename)) - - -def main(unused_argv): - if not tf.gfile.Exists(FLAGS.data_dir): - tf.gfile.MkDir(FLAGS.data_dir) - _download_higgs_data_and_save_npz(FLAGS.data_dir) - - -def define_data_download_flags(): - """Add flags specifying data download arguments.""" - flags.DEFINE_string( - name="data_dir", default="/tmp/higgs_data", - help=flags_core.help_wrap( - "Directory to download higgs dataset and store training/eval data.")) - - -if __name__ == "__main__": - tf.logging.set_verbosity(tf.logging.INFO) - define_data_download_flags() - FLAGS = flags.FLAGS - absl_app.run(main) diff --git a/official/r1/boosted_trees/train_higgs.py b/official/r1/boosted_trees/train_higgs.py deleted file mode 100644 index 5f3f2547e..000000000 --- a/official/r1/boosted_trees/train_higgs.py +++ /dev/null @@ -1,295 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -r"""A script that builds boosted trees over higgs data. - -If you haven't, please run data_download.py beforehand to prepare the data. - -For some more details on this example, please refer to README.md as well. - -Note that the model_dir is cleaned up before starting the training. - -Usage: -$ python train_higgs.py --n_trees=100 --max_depth=6 --learning_rate=0.1 \ - --model_dir=/tmp/higgs_model - -Note that BoostedTreesClassifier is available since Tensorflow 1.8.0. -So you need to install recent enough version of Tensorflow to use this example. - -The training data is by default the first million examples out of 11M examples, -and eval data is by default the last million examples. -They are controlled by --train_start, --train_count, --eval_start, --eval_count. -e.g. to train over the first 10 million examples instead of 1 million: -$ python train_higgs.py --n_trees=100 --max_depth=6 --learning_rate=0.1 \ - --model_dir=/tmp/higgs_model --train_count=10000000 - -Training history and metrics can be inspected using tensorboard. -Set --logdir as the --model_dir set by flag when training -(or the default /tmp/higgs_model). -$ tensorboard --logdir=/tmp/higgs_model -""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import os - -from absl import app as absl_app -from absl import flags -import numpy as np -import tensorflow.compat.v1 as tf - -from official.r1.utils.logs import logger -from official.utils.flags import core as flags_core -from official.utils.flags._conventions import help_wrap - -NPZ_FILE = "HIGGS.csv.gz.npz" # numpy compressed file containing "data" array - - -def read_higgs_data(data_dir, train_start, train_count, eval_start, eval_count): - """Reads higgs data from csv and returns train and eval data. - - Args: - data_dir: A string, the directory of higgs dataset. - train_start: An integer, the start index of train examples within the data. - train_count: An integer, the number of train examples within the data. - eval_start: An integer, the start index of eval examples within the data. - eval_count: An integer, the number of eval examples within the data. - - Returns: - Numpy array of train data and eval data. - """ - npz_filename = os.path.join(data_dir, NPZ_FILE) - try: - # gfile allows numpy to read data from network data sources as well. - with tf.gfile.Open(npz_filename, "rb") as npz_file: - with np.load(npz_file) as npz: - data = npz["data"] - except tf.errors.NotFoundError as e: - raise RuntimeError( - "Error loading data; use data_download.py to prepare the data.\n{}: {}" - .format(type(e).__name__, e)) - return (data[train_start:train_start+train_count], - data[eval_start:eval_start+eval_count]) - - -# This showcases how to make input_fn when the input data is available in the -# form of numpy arrays. -def make_inputs_from_np_arrays(features_np, label_np): - """Makes and returns input_fn and feature_columns from numpy arrays. - - The generated input_fn will return tf.data.Dataset of feature dictionary and a - label, and feature_columns will consist of the list of - tf.feature_column.BucketizedColumn. - - Note, for in-memory training, tf.data.Dataset should contain the whole data - as a single tensor. Don't use batch. - - Args: - features_np: A numpy ndarray (shape=[batch_size, num_features]) for - float32 features. - label_np: A numpy ndarray (shape=[batch_size, 1]) for labels. - - Returns: - input_fn: A function returning a Dataset of feature dict and label. - feature_names: A list of feature names. - feature_column: A list of tf.feature_column.BucketizedColumn. - """ - num_features = features_np.shape[1] - features_np_list = np.split(features_np, num_features, axis=1) - # 1-based feature names. - feature_names = ["feature_%02d" % (i + 1) for i in range(num_features)] - - # Create source feature_columns and bucketized_columns. - def get_bucket_boundaries(feature): - """Returns bucket boundaries for feature by percentiles.""" - return np.unique(np.percentile(feature, range(0, 100))).tolist() - source_columns = [ - tf.feature_column.numeric_column( - feature_name, dtype=tf.float32, - # Although higgs data have no missing values, in general, default - # could be set as 0 or some reasonable value for missing values. - default_value=0.0) - for feature_name in feature_names - ] - bucketized_columns = [ - tf.feature_column.bucketized_column( - source_columns[i], - boundaries=get_bucket_boundaries(features_np_list[i])) - for i in range(num_features) - ] - - # Make an input_fn that extracts source features. - def input_fn(): - """Returns features as a dictionary of numpy arrays, and a label.""" - features = { - feature_name: tf.constant(features_np_list[i]) - for i, feature_name in enumerate(feature_names) - } - return tf.data.Dataset.zip((tf.data.Dataset.from_tensors(features), - tf.data.Dataset.from_tensors(label_np),)) - - return input_fn, feature_names, bucketized_columns - - -def make_eval_inputs_from_np_arrays(features_np, label_np): - """Makes eval input as streaming batches.""" - num_features = features_np.shape[1] - features_np_list = np.split(features_np, num_features, axis=1) - # 1-based feature names. - feature_names = ["feature_%02d" % (i + 1) for i in range(num_features)] - - def input_fn(): - features = { - feature_name: tf.constant(features_np_list[i]) - for i, feature_name in enumerate(feature_names) - } - return tf.data.Dataset.zip(( - tf.data.Dataset.from_tensor_slices(features), - tf.data.Dataset.from_tensor_slices(label_np),)).batch(1000) - - return input_fn - - -def _make_csv_serving_input_receiver_fn(column_names, column_defaults): - """Returns serving_input_receiver_fn for csv. - - The input arguments are relevant to `tf.decode_csv()`. - - Args: - column_names: a list of column names in the order within input csv. - column_defaults: a list of default values with the same size of - column_names. Each entity must be either a list of one scalar, or an - empty list to denote the corresponding column is required. - e.g. [[""], [2.5], []] indicates the third column is required while - the first column must be string and the second must be float/double. - - Returns: - a serving_input_receiver_fn that handles csv for serving. - """ - def serving_input_receiver_fn(): - csv = tf.placeholder(dtype=tf.string, shape=[None], name="csv") - features = dict(zip(column_names, tf.decode_csv(csv, column_defaults))) - receiver_tensors = {"inputs": csv} - return tf.estimator.export.ServingInputReceiver(features, receiver_tensors) - - return serving_input_receiver_fn - - -def train_boosted_trees(flags_obj): - """Train boosted_trees estimator on HIGGS data. - - Args: - flags_obj: An object containing parsed flag values. - """ - # Clean up the model directory if present. - if tf.gfile.Exists(flags_obj.model_dir): - tf.gfile.DeleteRecursively(flags_obj.model_dir) - tf.logging.info("## Data loading...") - train_data, eval_data = read_higgs_data( - flags_obj.data_dir, flags_obj.train_start, flags_obj.train_count, - flags_obj.eval_start, flags_obj.eval_count) - tf.logging.info("## Data loaded; train: {}{}, eval: {}{}".format( - train_data.dtype, train_data.shape, eval_data.dtype, eval_data.shape)) - # Data consists of one label column followed by 28 feature columns. - train_input_fn, feature_names, feature_columns = make_inputs_from_np_arrays( - features_np=train_data[:, 1:], label_np=train_data[:, 0:1]) - eval_input_fn = make_eval_inputs_from_np_arrays( - features_np=eval_data[:, 1:], label_np=eval_data[:, 0:1]) - tf.logging.info("## Features prepared. Training starts...") - - # Create benchmark logger to log info about the training and metric values - run_params = { - "train_start": flags_obj.train_start, - "train_count": flags_obj.train_count, - "eval_start": flags_obj.eval_start, - "eval_count": flags_obj.eval_count, - "n_trees": flags_obj.n_trees, - "max_depth": flags_obj.max_depth, - } - benchmark_logger = logger.config_benchmark_logger(flags_obj) - benchmark_logger.log_run_info( - model_name="boosted_trees", - dataset_name="higgs", - run_params=run_params, - test_id=flags_obj.benchmark_test_id) - - # Though BoostedTreesClassifier is under tf.estimator, faster in-memory - # training is yet provided as a contrib library. - from tensorflow.contrib import estimator as contrib_estimator # pylint: disable=g-import-not-at-top - classifier = contrib_estimator.boosted_trees_classifier_train_in_memory( - train_input_fn, - feature_columns, - model_dir=flags_obj.model_dir or None, - n_trees=flags_obj.n_trees, - max_depth=flags_obj.max_depth, - learning_rate=flags_obj.learning_rate) - - # Evaluation. - eval_results = classifier.evaluate(eval_input_fn) - # Benchmark the evaluation results - benchmark_logger.log_evaluation_result(eval_results) - - # Exporting the savedmodel with csv parsing. - if flags_obj.export_dir is not None: - classifier.export_savedmodel( - flags_obj.export_dir, - _make_csv_serving_input_receiver_fn( - column_names=feature_names, - # columns are all floats. - column_defaults=[[0.0]] * len(feature_names)), - strip_default_attrs=True) - - -def main(_): - train_boosted_trees(flags.FLAGS) - - -def define_train_higgs_flags(): - """Add tree related flags as well as training/eval configuration.""" - flags_core.define_base(clean=False, stop_threshold=False, batch_size=False, - num_gpu=False, export_dir=True) - flags_core.define_benchmark() - flags.adopt_module_key_flags(flags_core) - - flags.DEFINE_integer( - name="train_start", default=0, - help=help_wrap("Start index of train examples within the data.")) - flags.DEFINE_integer( - name="train_count", default=1000000, - help=help_wrap("Number of train examples within the data.")) - flags.DEFINE_integer( - name="eval_start", default=10000000, - help=help_wrap("Start index of eval examples within the data.")) - flags.DEFINE_integer( - name="eval_count", default=1000000, - help=help_wrap("Number of eval examples within the data.")) - - flags.DEFINE_integer( - "n_trees", default=100, help=help_wrap("Number of trees to build.")) - flags.DEFINE_integer( - "max_depth", default=6, help=help_wrap("Maximum depths of each tree.")) - flags.DEFINE_float( - "learning_rate", default=0.1, - help=help_wrap("The learning rate.")) - - flags_core.set_defaults(data_dir="/tmp/higgs_data", - model_dir="/tmp/higgs_model") - - -if __name__ == "__main__": - # Training progress and eval results are shown as logging.INFO; so enables it. - tf.logging.set_verbosity(tf.logging.INFO) - define_train_higgs_flags() - absl_app.run(main) diff --git a/official/r1/mnist/README.md b/official/r1/mnist/README.md deleted file mode 100644 index 55f352384..000000000 --- a/official/r1/mnist/README.md +++ /dev/null @@ -1,91 +0,0 @@ -![No Maintenance Intended](https://img.shields.io/badge/No%20Maintenance%20Intended-%E2%9C%95-red.svg) -![TensorFlow Requirement: 1.x](https://img.shields.io/badge/TensorFlow%20Requirement-1.x-brightgreen) -![TensorFlow 2 Not Supported](https://img.shields.io/badge/TensorFlow%202%20Not%20Supported-%E2%9C%95-red.svg) - -# MNIST in TensorFlow - -This directory builds a convolutional neural net to classify the [MNIST -dataset](http://yann.lecun.com/exdb/mnist/) using the -[tf.data](https://www.tensorflow.org/api_docs/python/tf/data), -[tf.estimator.Estimator](https://www.tensorflow.org/api_docs/python/tf/estimator/Estimator), -and -[tf.layers](https://www.tensorflow.org/api_docs/python/tf/layers) -APIs. - - -## Setup - -To begin, you'll simply need the latest version of TensorFlow installed. -First make sure you've [added the models folder to your Python path]: - -```shell -export PYTHONPATH="$PYTHONPATH:/path/to/models" -``` - -Otherwise you may encounter an error like `ImportError: No module named official.mnist`. - -Then to train the model, run the following: - -``` -python mnist.py -``` - -The model will begin training and will automatically evaluate itself on the -validation data. - -Illustrative unit tests and benchmarks can be run with: - -``` -python mnist_test.py -python mnist_test.py --benchmarks=. -``` - -## Exporting the model - -You can export the model into Tensorflow [SavedModel](https://www.tensorflow.org/guide/saved_model) format by using the argument `--export_dir`: - -``` -python mnist.py --export_dir /tmp/mnist_saved_model -``` - -The SavedModel will be saved in a timestamped directory under `/tmp/mnist_saved_model/` (e.g. `/tmp/mnist_saved_model/1513630966/`). - -**Getting predictions with SavedModel** -Use [`saved_model_cli`](https://www.tensorflow.org/guide/saved_model#cli_to_inspect_and_execute_savedmodel) to inspect and execute the SavedModel. - -``` -saved_model_cli run --dir /tmp/mnist_saved_model/TIMESTAMP --tag_set serve --signature_def classify --inputs image=examples.npy -``` - -`examples.npy` contains the data from `example5.png` and `example3.png` in a numpy array, in that order. The array values are normalized to values between 0 and 1. - -The output should look similar to below: -``` -Result for output key classes: -[5 3] -Result for output key probabilities: -[[ 1.53558474e-07 1.95694142e-13 1.31193523e-09 5.47467265e-03 - 5.85711526e-22 9.94520664e-01 3.48423509e-06 2.65365645e-17 - 9.78631419e-07 3.15522470e-08] - [ 1.22413359e-04 5.87615965e-08 1.72251271e-06 9.39960718e-01 - 3.30306928e-11 2.87386645e-02 2.82353517e-02 8.21146413e-18 - 2.52568233e-03 4.15460236e-04]] -``` - -## Experimental: Eager Execution - -[Eager execution](https://research.googleblog.com/2017/10/eager-execution-imperative-define-by.html) -(an preview feature in TensorFlow 1.5) is an imperative interface to TensorFlow. -The exact same model defined in `mnist.py` can be trained without creating a -TensorFlow graph using: - -``` -python mnist_eager.py -``` - -## Experimental: TPU Acceleration - -`mnist.py` (and `mnist_eager.py`) demonstrate training a neural network to -classify digits on CPUs and GPUs. `mnist_tpu.py` can be used to train the -same model using TPUs for hardware acceleration. More information in -the [tensorflow/tpu](https://github.com/tensorflow/tpu) repository. diff --git a/official/r1/mnist/__init__.py b/official/r1/mnist/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/official/r1/mnist/dataset.py b/official/r1/mnist/dataset.py deleted file mode 100644 index 2bdd155d9..000000000 --- a/official/r1/mnist/dataset.py +++ /dev/null @@ -1,117 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""tf.data.Dataset interface to the MNIST dataset.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import gzip -import os -import shutil -import tempfile - -import numpy as np -from six.moves import urllib -import tensorflow as tf - - -def read32(bytestream): - """Read 4 bytes from bytestream as an unsigned 32-bit integer.""" - dt = np.dtype(np.uint32).newbyteorder('>') - return np.frombuffer(bytestream.read(4), dtype=dt)[0] - - -def check_image_file_header(filename): - """Validate that filename corresponds to images for the MNIST dataset.""" - with tf.io.gfile.GFile(filename, 'rb') as f: - magic = read32(f) - read32(f) # num_images, unused - rows = read32(f) - cols = read32(f) - if magic != 2051: - raise ValueError('Invalid magic number %d in MNIST file %s' % (magic, - f.name)) - if rows != 28 or cols != 28: - raise ValueError( - 'Invalid MNIST file %s: Expected 28x28 images, found %dx%d' % - (f.name, rows, cols)) - - -def check_labels_file_header(filename): - """Validate that filename corresponds to labels for the MNIST dataset.""" - with tf.io.gfile.GFile(filename, 'rb') as f: - magic = read32(f) - read32(f) # num_items, unused - if magic != 2049: - raise ValueError('Invalid magic number %d in MNIST file %s' % (magic, - f.name)) - - -def download(directory, filename): - """Download (and unzip) a file from the MNIST dataset if not already done.""" - filepath = os.path.join(directory, filename) - if tf.io.gfile.exists(filepath): - return filepath - if not tf.io.gfile.exists(directory): - tf.io.gfile.makedirs(directory) - # CVDF mirror of http://yann.lecun.com/exdb/mnist/ - url = 'https://storage.googleapis.com/cvdf-datasets/mnist/' + filename + '.gz' - _, zipped_filepath = tempfile.mkstemp(suffix='.gz') - print('Downloading %s to %s' % (url, zipped_filepath)) - urllib.request.urlretrieve(url, zipped_filepath) - with gzip.open(zipped_filepath, 'rb') as f_in, \ - tf.io.gfile.GFile(filepath, 'wb') as f_out: - shutil.copyfileobj(f_in, f_out) - os.remove(zipped_filepath) - return filepath - - -def dataset(directory, images_file, labels_file): - """Download and parse MNIST dataset.""" - - images_file = download(directory, images_file) - labels_file = download(directory, labels_file) - - check_image_file_header(images_file) - check_labels_file_header(labels_file) - - def decode_image(image): - # Normalize from [0, 255] to [0.0, 1.0] - image = tf.io.decode_raw(image, tf.uint8) - image = tf.cast(image, tf.float32) - image = tf.reshape(image, [784]) - return image / 255.0 - - def decode_label(label): - label = tf.io.decode_raw(label, tf.uint8) # tf.string -> [tf.uint8] - label = tf.reshape(label, []) # label is a scalar - return tf.cast(label, tf.int32) - - images = tf.data.FixedLengthRecordDataset( - images_file, 28 * 28, header_bytes=16).map(decode_image) - labels = tf.data.FixedLengthRecordDataset( - labels_file, 1, header_bytes=8).map(decode_label) - return tf.data.Dataset.zip((images, labels)) - - -def train(directory): - """tf.data.Dataset object for MNIST training data.""" - return dataset(directory, 'train-images-idx3-ubyte', - 'train-labels-idx1-ubyte') - - -def test(directory): - """tf.data.Dataset object for MNIST test data.""" - return dataset(directory, 't10k-images-idx3-ubyte', 't10k-labels-idx1-ubyte') diff --git a/official/r1/mnist/example3.png b/official/r1/mnist/example3.png deleted file mode 100644 index bb7f5b8842d2e61a3878dc0aade03773cdc6aceb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 368 zcmeAS@N?(olHy`uVBq!ia0vp^G9b*s1SJ3FdmIK*jKx9jP7LeL$-D$|SkfJR9T^xl z_H+M9WCij$3p^r=85m^SL71`s>Bm%{23AiO#}E(iw^KF>HXHD`#u_oHJ~m)dRgDe+ zvXTv?RDYKLI?1WhqIh!YNo@k~B{4m9;sTcjp4dZoCQlbMD2eXs-TYOE zQKs5uXQD*Fol@E3xAz$W)s|%>UpDEfO7VPNIcqP_~7i_ z;OnR7Jak&jlAC^_Ad+jvndx4OABP3Z)Nl3U+K{>m}N44$rj JF6*2UngExClBNIv diff --git a/official/r1/mnist/example5.png b/official/r1/mnist/example5.png deleted file mode 100644 index 68496bcce524dda1b51eb5197ba2653ce7ea9475..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 367 zcmeAS@N?(olHy`uVBq!ia0vp^G9b*s1SJ3FdmIK*jKx9jP7LeL$-D$|SkfJR9T^xl z_H+M9WCij$3p^r=85m^SL71`s>Bm%{1{O~j#}E(iw^I~_nhgY8Cwiz%TC6b9qp#r! z8;FpQ;p}v|WdCG(V@vELKWovq!ahP~`IFwLUHrxP?@~fr;)%~P@!w?kEctA+J$H6$ zWQjuGk`*-mjNm6`2Ym;6G~x67-AWAf=ucUBAa zF4E``i8gE&n{~y&S2)`Lfoafzg~s25#8Wy?m3s#Idrp#AT=MtulCVWfUdtgTe~ HDWM4fUe}C_ diff --git a/official/r1/mnist/examples.npy b/official/r1/mnist/examples.npy deleted file mode 100644 index 85d78b1b6dadb1df44128ca173426aff9866c2c0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12624 zcmeHMy>8S%5H>AU1Sp9Rg*y}Js#Fm|a+eN7LqUa3dUF?)v*?XJ>XU&+p#9bML{WXft}2Y>g(5 z#>uTXxxKxS4B}*acl>lbeDZL2yfqU2o5P*SDDR&<9`23u_RaM{yuKm+)&}u@EaY+| zB}+poG!XOKI(_Fsy z?*D%O67q$OzloFDx3VncZSTAYQ7jl|xg#*e;uY`%@ii%L+V3I7ysKA)ysqg7cH8&< zncttD3i-;{yB5!(9kYZV^RLwXipdLY|9BxTkR86L*{5#r{_x}a7gzEJvLL&DxxB7R zdCxz;5`g{tT$~s;ue_-ApPHYUKVIPGmfZUZtyh1#$_xDU_*=){)>HZo$My@nLMb%R z|M>~~5cVP1;s1LM_4{S@yDs7;-uvbUxc4yP6-B=Aos}2(p(LL8ox>fww&icQht0;{ z9e!=U&x6 - # can then be used to see the recorded summaries. - train_dir = os.path.join(flags_obj.output_dir, 'train') - test_dir = os.path.join(flags_obj.output_dir, 'eval') - tf.gfile.MakeDirs(flags_obj.output_dir) - else: - train_dir = None - test_dir = None - summary_writer = tf.compat.v2.summary.create_file_writer( - train_dir, flush_millis=10000) - test_summary_writer = tf.compat.v2.summary.create_file_writer( - test_dir, flush_millis=10000, name='test') - - # Create and restore checkpoint (if one exists on the path) - checkpoint_prefix = os.path.join(flags_obj.model_dir, 'ckpt') - step_counter = tf.train.get_or_create_global_step() - checkpoint = tf.train.Checkpoint( - model=model, optimizer=optimizer, step_counter=step_counter) - # Restore variables on creation if a checkpoint exists. - checkpoint.restore(tf.train.latest_checkpoint(flags_obj.model_dir)) - - # Train and evaluate for a set number of epochs. - with tf.device(device): - for _ in range(flags_obj.train_epochs): - start = time.time() - with summary_writer.as_default(): - train(model, optimizer, train_ds, step_counter, - flags_obj.log_interval) - end = time.time() - print('\nTrain time for epoch #%d (%d total steps): %f' % - (checkpoint.save_counter.numpy() + 1, - step_counter.numpy(), - end - start)) - with test_summary_writer.as_default(): - test(model, test_ds) - checkpoint.save(checkpoint_prefix) - - -def define_mnist_eager_flags(): - """Defined flags and defaults for MNIST in eager mode.""" - flags_core.define_base(clean=True, train_epochs=True, export_dir=True, - distribution_strategy=True) - flags_core.define_image() - flags.adopt_module_key_flags(flags_core) - - flags.DEFINE_integer( - name='log_interval', short_name='li', default=10, - help=flags_core.help_wrap('batches between logging training status')) - - flags.DEFINE_string( - name='output_dir', short_name='od', default=None, - help=flags_core.help_wrap('Directory to write TensorBoard summaries')) - - flags.DEFINE_float(name='learning_rate', short_name='lr', default=0.01, - help=flags_core.help_wrap('Learning rate.')) - - flags.DEFINE_float(name='momentum', short_name='m', default=0.5, - help=flags_core.help_wrap('SGD momentum.')) - - flags.DEFINE_bool(name='no_gpu', short_name='nogpu', default=False, - help=flags_core.help_wrap( - 'disables GPU usage even if a GPU is available')) - - flags_core.set_defaults( - data_dir='/tmp/tensorflow/mnist/input_data', - model_dir='/tmp/tensorflow/mnist/checkpoints/', - batch_size=100, - train_epochs=10, - ) - - -def main(_): - run_mnist_eager(flags.FLAGS) - - -if __name__ == '__main__': - define_mnist_eager_flags() - absl_app.run(main=main) diff --git a/official/r1/mnist/mnist_test.py b/official/r1/mnist/mnist_test.py deleted file mode 100644 index 87e057123..000000000 --- a/official/r1/mnist/mnist_test.py +++ /dev/null @@ -1,140 +0,0 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import time - -import tensorflow.compat.v1 as tf # pylint: disable=g-bad-import-order -from absl import logging -from official.r1.mnist import mnist - -BATCH_SIZE = 100 - - -def dummy_input_fn(): - image = tf.random.uniform([BATCH_SIZE, 784]) - labels = tf.random.uniform([BATCH_SIZE, 1], maxval=9, dtype=tf.int32) - return image, labels - - -def make_estimator(): - data_format = 'channels_last' - if tf.test.is_built_with_cuda(): - data_format = 'channels_first' - return tf.estimator.Estimator( - model_fn=mnist.model_fn, params={ - 'data_format': data_format - }) - - -class Tests(tf.test.TestCase): - """Run tests for MNIST model. - - MNIST uses contrib and will not work with TF 2.0. All tests are disabled if - using TF 2.0. - """ - - def test_mnist(self): - classifier = make_estimator() - classifier.train(input_fn=dummy_input_fn, steps=2) - eval_results = classifier.evaluate(input_fn=dummy_input_fn, steps=1) - - loss = eval_results['loss'] - global_step = eval_results['global_step'] - accuracy = eval_results['accuracy'] - self.assertEqual(loss.shape, ()) - self.assertEqual(2, global_step) - self.assertEqual(accuracy.shape, ()) - - input_fn = lambda: tf.random.uniform([3, 784]) - predictions_generator = classifier.predict(input_fn) - for _ in range(3): - predictions = next(predictions_generator) - self.assertEqual(predictions['probabilities'].shape, (10,)) - self.assertEqual(predictions['classes'].shape, ()) - - def mnist_model_fn_helper(self, mode, multi_gpu=False): - features, labels = dummy_input_fn() - image_count = features.shape[0] - spec = mnist.model_fn(features, labels, mode, { - 'data_format': 'channels_last', - 'multi_gpu': multi_gpu - }) - - if mode == tf.estimator.ModeKeys.PREDICT: - predictions = spec.predictions - self.assertAllEqual(predictions['probabilities'].shape, (image_count, 10)) - self.assertEqual(predictions['probabilities'].dtype, tf.float32) - self.assertAllEqual(predictions['classes'].shape, (image_count,)) - self.assertEqual(predictions['classes'].dtype, tf.int64) - - if mode != tf.estimator.ModeKeys.PREDICT: - loss = spec.loss - self.assertAllEqual(loss.shape, ()) - self.assertEqual(loss.dtype, tf.float32) - - if mode == tf.estimator.ModeKeys.EVAL: - eval_metric_ops = spec.eval_metric_ops - self.assertAllEqual(eval_metric_ops['accuracy'][0].shape, ()) - self.assertAllEqual(eval_metric_ops['accuracy'][1].shape, ()) - self.assertEqual(eval_metric_ops['accuracy'][0].dtype, tf.float32) - self.assertEqual(eval_metric_ops['accuracy'][1].dtype, tf.float32) - - def test_mnist_model_fn_train_mode(self): - self.mnist_model_fn_helper(tf.estimator.ModeKeys.TRAIN) - - def test_mnist_model_fn_train_mode_multi_gpu(self): - self.mnist_model_fn_helper(tf.estimator.ModeKeys.TRAIN, multi_gpu=True) - - def test_mnist_model_fn_eval_mode(self): - self.mnist_model_fn_helper(tf.estimator.ModeKeys.EVAL) - - def test_mnist_model_fn_predict_mode(self): - self.mnist_model_fn_helper(tf.estimator.ModeKeys.PREDICT) - - -class Benchmarks(tf.test.Benchmark): - """Simple speed benchmarking for MNIST.""" - - def benchmark_train_step_time(self): - classifier = make_estimator() - # Run one step to warmup any use of the GPU. - classifier.train(input_fn=dummy_input_fn, steps=1) - - have_gpu = tf.test.is_gpu_available() - num_steps = 1000 if have_gpu else 100 - name = 'train_step_time_%s' % ('gpu' if have_gpu else 'cpu') - - start = time.time() - classifier.train(input_fn=dummy_input_fn, steps=num_steps) - end = time.time() - - wall_time = (end - start) / num_steps - self.report_benchmark( - iters=num_steps, - wall_time=wall_time, - name=name, - extras={ - 'examples_per_sec': BATCH_SIZE / wall_time - }) - - -if __name__ == '__main__': - logging.set_verbosity(logging.ERROR) - tf.disable_v2_behavior() - tf.test.main() diff --git a/official/r1/mnist/mnist_tpu.py b/official/r1/mnist/mnist_tpu.py deleted file mode 100644 index 4ca62ef6d..000000000 --- a/official/r1/mnist/mnist_tpu.py +++ /dev/null @@ -1,202 +0,0 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""MNIST model training using TPUs. - -This program demonstrates training of the convolutional neural network model -defined in mnist.py on Google Cloud TPUs (https://cloud.google.com/tpu/). - -If you are not interested in TPUs, you should ignore this file. -""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import os -import sys - -# pylint: disable=g-bad-import-order -from absl import app as absl_app # pylint: disable=unused-import -import tensorflow.compat.v1 as tf -# pylint: enable=g-bad-import-order - -# For open source environment, add grandparent directory for import -sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(sys.path[0])))) - -from official.r1.mnist import dataset # pylint: disable=wrong-import-position -from official.r1.mnist import mnist # pylint: disable=wrong-import-position - -# Cloud TPU Cluster Resolver flags -tf.flags.DEFINE_string( - "tpu", default=None, - help="The Cloud TPU to use for training. This should be either the name " - "used when creating the Cloud TPU, or a grpc://ip.address.of.tpu:8470 " - "url.") -tf.flags.DEFINE_string( - "tpu_zone", default=None, - help="[Optional] GCE zone where the Cloud TPU is located in. If not " - "specified, we will attempt to automatically detect the GCE project from " - "metadata.") -tf.flags.DEFINE_string( - "gcp_project", default=None, - help="[Optional] Project name for the Cloud TPU-enabled project. If not " - "specified, we will attempt to automatically detect the GCE project from " - "metadata.") - -# Model specific parameters -tf.flags.DEFINE_string("data_dir", "", - "Path to directory containing the MNIST dataset") -tf.flags.DEFINE_string("model_dir", None, "Estimator model_dir") -tf.flags.DEFINE_integer("batch_size", 1024, - "Mini-batch size for the training. Note that this " - "is the global batch size and not the per-shard batch.") -tf.flags.DEFINE_integer("train_steps", 1000, "Total number of training steps.") -tf.flags.DEFINE_integer("eval_steps", 0, - "Total number of evaluation steps. If `0`, evaluation " - "after training is skipped.") -tf.flags.DEFINE_float("learning_rate", 0.05, "Learning rate.") - -tf.flags.DEFINE_bool("use_tpu", True, "Use TPUs rather than plain CPUs") -tf.flags.DEFINE_bool("enable_predict", True, "Do some predictions at the end") -tf.flags.DEFINE_integer("iterations", 50, - "Number of iterations per TPU training loop.") -tf.flags.DEFINE_integer("num_shards", 8, "Number of shards (TPU chips).") - -FLAGS = tf.flags.FLAGS - - -def metric_fn(labels, logits): - accuracy = tf.metrics.accuracy( - labels=labels, predictions=tf.argmax(logits, axis=1)) - return {"accuracy": accuracy} - - -def model_fn(features, labels, mode, params): - """model_fn constructs the ML model used to predict handwritten digits.""" - - del params - image = features - if isinstance(image, dict): - image = features["image"] - - model = mnist.create_model("channels_last") - - if mode == tf.estimator.ModeKeys.PREDICT: - logits = model(image, training=False) - predictions = { - 'class_ids': tf.argmax(logits, axis=1), - 'probabilities': tf.nn.softmax(logits), - } - return tf.estimator.tpu.TPUEstimatorSpec(mode, predictions=predictions) - - logits = model(image, training=(mode == tf.estimator.ModeKeys.TRAIN)) - loss = tf.losses.sparse_softmax_cross_entropy(labels=labels, logits=logits) - - if mode == tf.estimator.ModeKeys.TRAIN: - learning_rate = tf.train.exponential_decay( - FLAGS.learning_rate, - tf.train.get_global_step(), - decay_steps=100000, - decay_rate=0.96) - optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate) - if FLAGS.use_tpu: - optimizer = tf.tpu.CrossShardOptimizer(optimizer) - return tf.estimator.tpu.TPUEstimatorSpec( - mode=mode, - loss=loss, - train_op=optimizer.minimize(loss, tf.train.get_global_step())) - - if mode == tf.estimator.ModeKeys.EVAL: - return tf.estimator.tpu.TPUEstimatorSpec( - mode=mode, loss=loss, eval_metrics=(metric_fn, [labels, logits])) - - -def train_input_fn(params): - """train_input_fn defines the input pipeline used for training.""" - batch_size = params["batch_size"] - data_dir = params["data_dir"] - # Retrieves the batch size for the current shard. The # of shards is - # computed according to the input pipeline deployment. See - # `tf.estimator.tpu.RunConfig` for details. - ds = dataset.train(data_dir).cache().repeat().shuffle( - buffer_size=50000).batch(batch_size, drop_remainder=True) - return ds - - -def eval_input_fn(params): - batch_size = params["batch_size"] - data_dir = params["data_dir"] - ds = dataset.test(data_dir).batch(batch_size, drop_remainder=True) - return ds - - -def predict_input_fn(params): - batch_size = params["batch_size"] - data_dir = params["data_dir"] - # Take out top 10 samples from test data to make the predictions. - ds = dataset.test(data_dir).take(10).batch(batch_size) - return ds - - -def main(argv): - del argv # Unused. - tf.logging.set_verbosity(tf.logging.INFO) - - tpu_cluster_resolver = tf.distribute.cluster_resolver.TPUClusterResolver( - FLAGS.tpu, - zone=FLAGS.tpu_zone, - project=FLAGS.gcp_project - ) - - run_config = tf.estimator.tpu.RunConfig( - cluster=tpu_cluster_resolver, - model_dir=FLAGS.model_dir, - session_config=tf.ConfigProto( - allow_soft_placement=True, log_device_placement=True), - tpu_config=tf.estimator.tpu.TPUConfig(FLAGS.iterations, FLAGS.num_shards), - ) - - estimator = tf.estimator.tpu.TPUEstimator( - model_fn=model_fn, - use_tpu=FLAGS.use_tpu, - train_batch_size=FLAGS.batch_size, - eval_batch_size=FLAGS.batch_size, - predict_batch_size=FLAGS.batch_size, - params={"data_dir": FLAGS.data_dir}, - config=run_config) - # TPUEstimator.train *requires* a max_steps argument. - estimator.train(input_fn=train_input_fn, max_steps=FLAGS.train_steps) - # TPUEstimator.evaluate *requires* a steps argument. - # Note that the number of examples used during evaluation is - # --eval_steps * --batch_size. - # So if you change --batch_size then change --eval_steps too. - if FLAGS.eval_steps: - estimator.evaluate(input_fn=eval_input_fn, steps=FLAGS.eval_steps) - - # Run prediction on top few samples of test data. - if FLAGS.enable_predict: - predictions = estimator.predict(input_fn=predict_input_fn) - - for pred_dict in predictions: - template = ('Prediction is "{}" ({:.1f}%).') - - class_id = pred_dict['class_ids'] - probability = pred_dict['probabilities'][class_id] - - print(template.format(class_id, 100 * probability)) - - -if __name__ == "__main__": - tf.disable_v2_behavior() - absl_app.run(main) diff --git a/official/r1/ncf/README.md b/official/r1/ncf/README.md deleted file mode 100644 index 8156d396d..000000000 --- a/official/r1/ncf/README.md +++ /dev/null @@ -1,7 +0,0 @@ -![No Maintenance Intended](https://img.shields.io/badge/No%20Maintenance%20Intended-%E2%9C%95-red.svg) -![TensorFlow Requirement: 1.x](https://img.shields.io/badge/TensorFlow%20Requirement-1.x-brightgreen) -![TensorFlow 2 Not Supported](https://img.shields.io/badge/TensorFlow%202%20Not%20Supported-%E2%9C%95-red.svg) - -# NCF Estimator implementation - -NCF framework to train and evaluate the NeuMF model diff --git a/official/r1/ncf/ncf_estimator_main.py b/official/r1/ncf/ncf_estimator_main.py deleted file mode 100644 index a40e7b77a..000000000 --- a/official/r1/ncf/ncf_estimator_main.py +++ /dev/null @@ -1,187 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""NCF framework to train and evaluate the NeuMF model. - -The NeuMF model assembles both MF and MLP models under the NCF framework. Check -`neumf_model.py` for more details about the models. -""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import contextlib -import heapq -import json -import math -import multiprocessing -import os -import signal - -from absl import app as absl_app -from absl import flags -from absl import logging -import numpy as np -from six.moves import range -import tensorflow as tf -import typing - -from official.r1.utils.logs import hooks_helper -from official.r1.utils.logs import logger -from official.r1.utils.logs import mlperf_helper -from official.recommendation import constants as rconst -from official.recommendation import data_pipeline -from official.recommendation import data_preprocessing -from official.recommendation import movielens -from official.recommendation import ncf_common -from official.recommendation import neumf_model -from official.utils.flags import core as flags_core -from official.utils.misc import distribution_utils -from official.utils.misc import model_helpers - - -FLAGS = flags.FLAGS - - -def construct_estimator(model_dir, params): - """Construct either an Estimator for NCF. - - Args: - model_dir: The model directory for the estimator - params: The params dict for the estimator - - Returns: - An Estimator. - """ - distribution = ncf_common.get_v1_distribution_strategy(params) - run_config = tf.estimator.RunConfig(train_distribute=distribution, - eval_distribute=distribution) - model_fn = neumf_model.neumf_model_fn - estimator = tf.estimator.Estimator(model_fn=model_fn, model_dir=model_dir, - config=run_config, params=params) - return estimator - - -def log_and_get_hooks(eval_batch_size): - """Convenience function for hook and logger creation.""" - # Create hooks that log information about the training and metric values - train_hooks = hooks_helper.get_train_hooks( - FLAGS.hooks, - model_dir=FLAGS.model_dir, - batch_size=FLAGS.batch_size, # for ExamplesPerSecondHook - tensors_to_log={"cross_entropy": "cross_entropy"} - ) - run_params = { - "batch_size": FLAGS.batch_size, - "eval_batch_size": eval_batch_size, - "number_factors": FLAGS.num_factors, - "hr_threshold": FLAGS.hr_threshold, - "train_epochs": FLAGS.train_epochs, - } - benchmark_logger = logger.get_benchmark_logger() - benchmark_logger.log_run_info( - model_name="recommendation", - dataset_name=FLAGS.dataset, - run_params=run_params, - test_id=FLAGS.benchmark_test_id) - - return benchmark_logger, train_hooks - - -def main(_): - with logger.benchmark_context(FLAGS), \ - mlperf_helper.LOGGER(FLAGS.output_ml_perf_compliance_logging): - mlperf_helper.set_ncf_root(os.path.split(os.path.abspath(__file__))[0]) - run_ncf(FLAGS) - - -def run_ncf(_): - """Run NCF training and eval loop.""" - params = ncf_common.parse_flags(FLAGS) - - num_users, num_items, num_train_steps, num_eval_steps, producer = ( - ncf_common.get_inputs(params)) - - params["num_users"], params["num_items"] = num_users, num_items - producer.start() - model_helpers.apply_clean(flags.FLAGS) - - estimator = construct_estimator(model_dir=FLAGS.model_dir, params=params) - - benchmark_logger, train_hooks = log_and_get_hooks(params["eval_batch_size"]) - total_training_cycle = FLAGS.train_epochs // FLAGS.epochs_between_evals - - target_reached = False - mlperf_helper.ncf_print(key=mlperf_helper.TAGS.TRAIN_LOOP) - for cycle_index in range(total_training_cycle): - assert FLAGS.epochs_between_evals == 1 or not mlperf_helper.LOGGER.enabled - logging.info("Starting a training cycle: {}/{}".format( - cycle_index + 1, total_training_cycle)) - - mlperf_helper.ncf_print(key=mlperf_helper.TAGS.TRAIN_EPOCH, - value=cycle_index) - - train_input_fn = producer.make_input_fn(is_training=True) - estimator.train(input_fn=train_input_fn, hooks=train_hooks, - steps=num_train_steps) - - logging.info("Beginning evaluation.") - eval_input_fn = producer.make_input_fn(is_training=False) - - mlperf_helper.ncf_print(key=mlperf_helper.TAGS.EVAL_START, - value=cycle_index) - eval_results = estimator.evaluate(eval_input_fn, steps=num_eval_steps) - logging.info("Evaluation complete.") - - hr = float(eval_results[rconst.HR_KEY]) - ndcg = float(eval_results[rconst.NDCG_KEY]) - loss = float(eval_results["loss"]) - - mlperf_helper.ncf_print( - key=mlperf_helper.TAGS.EVAL_TARGET, - value={"epoch": cycle_index, "value": FLAGS.hr_threshold}) - mlperf_helper.ncf_print(key=mlperf_helper.TAGS.EVAL_ACCURACY, - value={"epoch": cycle_index, "value": hr}) - mlperf_helper.ncf_print( - key=mlperf_helper.TAGS.EVAL_HP_NUM_NEG, - value={"epoch": cycle_index, "value": rconst.NUM_EVAL_NEGATIVES}) - - mlperf_helper.ncf_print(key=mlperf_helper.TAGS.EVAL_STOP, value=cycle_index) - - # Benchmark the evaluation results - benchmark_logger.log_evaluation_result(eval_results) - # Log the HR and NDCG results. - logging.info( - "Iteration {}: HR = {:.4f}, NDCG = {:.4f}, Loss = {:.4f}".format( - cycle_index + 1, hr, ndcg, loss)) - - # If some evaluation threshold is met - if model_helpers.past_stop_threshold(FLAGS.hr_threshold, hr): - target_reached = True - break - - mlperf_helper.ncf_print(key=mlperf_helper.TAGS.RUN_STOP, - value={"success": target_reached}) - producer.stop_loop() - producer.join() - - # Clear the session explicitly to avoid session delete error - tf.keras.backend.clear_session() - mlperf_helper.ncf_print(key=mlperf_helper.TAGS.RUN_FINAL) - - -if __name__ == "__main__": - logging.set_verbosity(logging.INFO) - ncf_common.define_ncf_flags() - absl_app.run(main) diff --git a/official/r1/resnet/README.md b/official/r1/resnet/README.md deleted file mode 100644 index 7f70b5016..000000000 --- a/official/r1/resnet/README.md +++ /dev/null @@ -1,156 +0,0 @@ -![No Maintenance Intended](https://img.shields.io/badge/No%20Maintenance%20Intended-%E2%9C%95-red.svg) -![TensorFlow Requirement: 1.x](https://img.shields.io/badge/TensorFlow%20Requirement-1.x-brightgreen) -![TensorFlow 2 Not Supported](https://img.shields.io/badge/TensorFlow%202%20Not%20Supported-%E2%9C%95-red.svg) - -# ResNet in TensorFlow - -Deep residual networks, or ResNets for short, provided the breakthrough idea of -identity mappings in order to enable training of very deep convolutional neural -networks. This folder contains an implementation of ResNet for the ImageNet -dataset written in TensorFlow. - -See the following papers for more background: - -[1] [Deep Residual Learning for Image Recognition](https://arxiv.org/pdf/1512.03385.pdf) by Kaiming He, Xiangyu Zhang, Shaoqing Ren, and Jian Sun, Dec 2015. - -[2] [Identity Mappings in Deep Residual Networks](https://arxiv.org/pdf/1603.05027.pdf) by Kaiming He, Xiangyu Zhang, Shaoqing Ren, and Jian Sun, Jul 2016. - -In code, v1 refers to the ResNet defined in [1] but where a stride 2 is used on -the 3x3 conv rather than the first 1x1 in the bottleneck. This change results -in higher and more stable accuracy with less epochs than the original v1 and has -shown to scale to higher batch sizes with minimal degradation in accuracy. -There is no originating paper. The first mention we are aware of was in the -torch version of [ResNetv1](https://github.com/facebook/fb.resnet.torch). Most -popular v1 implementations are this implementation which we call ResNetv1.5. - -In testing we found v1.5 requires ~12% more compute to train and has 6% reduced -throughput for inference compared to ResNetv1. CIFAR-10 ResNet does not use the -bottleneck and is thus the same for v1 as v1.5. - -v2 refers to [2]. The principle difference between the two versions is that v1 -applies batch normalization and activation after convolution, while v2 applies -batch normalization, then activation, and finally convolution. A schematic -comparison is presented in Figure 1 (left) of [2]. - -Please proceed according to which dataset you would like to train/evaluate on: - - -## CIFAR-10 - -### Setup - -You need to have the latest version of TensorFlow installed. -First, make sure [the models folder is in your Python path](/official/#running-the-models); otherwise you may encounter `ImportError: No module named official.resnet`. - -Then, download and extract the CIFAR-10 data from Alex's website, specifying the location with the `--data_dir` flag. Run the following: - -```bash -python cifar10_download_and_extract.py --data_dir -``` - -Then, to train the model: - -```bash -python cifar10_main.py --data_dir /cifar-10-batches-bin --model_dir -``` - -Use `--data_dir` to specify the location of the CIFAR-10 data used in the previous step. There are more flag options as described in `cifar10_main.py`. - -To export a `SavedModel` from the trained checkpoint: - -```bash -python cifar10_main.py --data_dir /cifar-10-batches-bin --model_dir --eval_only --export_dir -``` - -Note: The `` must be present. You might want to run `mkdir ` beforehand. - -The `SavedModel` can then be [loaded](https://www.tensorflow.org/guide/saved_model#loading_a_savedmodel_in_python) in order to use the ResNet for prediction. - - -## ImageNet - -### Setup -To begin, you will need to download the ImageNet dataset and convert it to -TFRecord format. The following [script](https://github.com/tensorflow/tpu/blob/master/tools/datasets/imagenet_to_gcs.py) -and [README](https://github.com/tensorflow/tpu/tree/master/tools/datasets#imagenet_to_gcspy) -provide a few options. - -Once your dataset is ready, you can begin training the model as follows: - -```bash -python imagenet_main.py --data_dir=/path/to/imagenet -``` - -The model will begin training and will automatically evaluate itself on the -validation data roughly once per epoch. - -Note that there are a number of other options you can specify, including -`--model_dir` to choose where to store the model and `--resnet_size` to choose -the model size (options include ResNet-18 through ResNet-200). See -[`resnet_run_loop.py`](resnet_run_loop.py) for the full list of options. - - -## Compute Devices -Training is accomplished using the DistributionStrategies API. (https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/distribute/README.md) - -The appropriate distribution strategy is chosen based on the `--num_gpus` flag. -By default this flag is one if TensorFlow is compiled with CUDA, and zero -otherwise. - -num_gpus: -+ 0: Use OneDeviceStrategy and train on CPU. -+ 1: Use OneDeviceStrategy and train on GPU. -+ 2+: Use MirroredStrategy (data parallelism) to distribute a batch between devices. - -### Pre-trained model -You can download pre-trained versions of ResNet-50. Reported accuracies are top-1 single-crop accuracy for the ImageNet validation set. -Models are reported as both checkpoints produced by Estimator during training, and as SavedModels which are more portable. Checkpoints are fragile, -and these are not guaranteed to work with future versions of the code. Both ResNet v1 -and ResNet v2 have been trained in both fp16 and fp32 precision. (Here v1 refers to "v1.5". See the note above.) Furthermore, SavedModels -are generated to accept either tensor or JPG inputs, and with channels_first (NCHW) and channels_last (NHWC) convolutions. NCHW is generally -better for GPUs, while NHWC is generally better for CPUs. See the TensorFlow [performance guide](https://www.tensorflow.org/performance/performance_guide#data_formats) -for more details. - -ResNet-50 v2 (fp32, Accuracy 76.47%): -* [Checkpoint](http://download.tensorflow.org/models/official/20181001_resnet/checkpoints/resnet_imagenet_v2_fp32_20181001.tar.gz) -* SavedModel [(NCHW)](http://download.tensorflow.org/models/official/20181001_resnet/savedmodels/resnet_v2_fp32_savedmodel_NCHW.tar.gz), -[(NCHW, JPG)](http://download.tensorflow.org/models/official/20181001_resnet/savedmodels/resnet_v2_fp32_savedmodel_NCHW_jpg.tar.gz), -[(NHWC)](http://download.tensorflow.org/models/official/20181001_resnet/savedmodels/resnet_v2_fp32_savedmodel_NHWC.tar.gz), -[(NHWC, JPG)](http://download.tensorflow.org/models/official/20181001_resnet/savedmodels/resnet_v2_fp32_savedmodel_NHWC_jpg.tar.gz) - -ResNet-50 v2 (fp16, Accuracy 76.56%): -* [Checkpoint](http://download.tensorflow.org/models/official/20181001_resnet/checkpoints/resnet_imagenet_v2_fp16_20180928.tar.gz) -* SavedModel [(NCHW)](http://download.tensorflow.org/models/official/20181001_resnet/savedmodels/resnet_v2_fp16_savedmodel_NCHW.tar.gz), -[(NCHW, JPG)](http://download.tensorflow.org/models/official/20181001_resnet/savedmodels/resnet_v2_fp16_savedmodel_NCHW_jpg.tar.gz), -[(NHWC)](http://download.tensorflow.org/models/official/20181001_resnet/savedmodels/resnet_v2_fp16_savedmodel_NHWC.tar.gz), -[(NHWC, JPG)](http://download.tensorflow.org/models/official/20181001_resnet/savedmodels/resnet_v2_fp16_savedmodel_NHWC_jpg.tar.gz) - -ResNet-50 v1 (fp32, Accuracy 76.53%): -* [Checkpoint](http://download.tensorflow.org/models/official/20181001_resnet/checkpoints/resnet_imagenet_v1_fp32_20181001.tar.gz) -* SavedModel [(NCHW)](http://download.tensorflow.org/models/official/20181001_resnet/savedmodels/resnet_v1_fp32_savedmodel_NCHW.tar.gz), -[(NCHW, JPG)](http://download.tensorflow.org/models/official/20181001_resnet/savedmodels/resnet_v1_fp32_savedmodel_NCHW_jpg.tar.gz), -[(NHWC)](http://download.tensorflow.org/models/official/20181001_resnet/savedmodels/resnet_v1_fp32_savedmodel_NHWC.tar.gz), -[(NHWC, JPG)](http://download.tensorflow.org/models/official/20181001_resnet/savedmodels/resnet_v1_fp32_savedmodel_NHWC_jpg.tar.gz) - -ResNet-50 v1 (fp16, Accuracy 76.18%): -* [Checkpoint](http://download.tensorflow.org/models/official/20181001_resnet/checkpoints/resnet_imagenet_v1_fp16_20181001.tar.gz) -* SavedModel [(NCHW)](http://download.tensorflow.org/models/official/20181001_resnet/savedmodels/resnet_v1_fp16_savedmodel_NCHW.tar.gz), -[(NCHW, JPG)](http://download.tensorflow.org/models/official/20181001_resnet/savedmodels/resnet_v1_fp16_savedmodel_NCHW_jpg.tar.gz), -[(NHWC)](http://download.tensorflow.org/models/official/20181001_resnet/savedmodels/resnet_v1_fp16_savedmodel_NHWC.tar.gz), -[(NHWC, JPG)](http://download.tensorflow.org/models/official/20181001_resnet/savedmodels/resnet_v1_fp16_savedmodel_NHWC_jpg.tar.gz) - -### Transfer Learning -You can use a pretrained model to initialize a training process. In addition you are able to freeze all but the final fully connected layers to fine tune your model. Transfer Learning is useful when training on your own small datasets. For a brief look at transfer learning in the context of convolutional neural networks, we recommend reading these [short notes](http://cs231n.github.io/transfer-learning/). - - -To fine tune a pretrained resnet you must make three changes to your training procedure: - -1) Build the exact same model as previously except we change the number of labels in the final classification layer. - -2) Restore all weights from the pre-trained resnet except for the final classification layer; this will get randomly initialized instead. - -3) Freeze earlier layers of the network - -We can perform these three operations by specifying two flags: ```--pretrained_model_checkpoint_path``` and ```--fine_tune```. The first flag is a string that points to the path of a pre-trained resnet model. If this flag is specified, it will load all but the final classification layer. A key thing to note: if both ```--pretrained_model_checkpoint_path``` and a non empty ```model_dir``` directory are passed, the tensorflow estimator will load only the ```model_dir```. For more on this please see [WarmStartSettings](https://www.tensorflow.org/versions/master/api_docs/python/tf/estimator/WarmStartSettings) and [Estimators](https://www.tensorflow.org/guide/estimators). - -The second flag ```--fine_tune``` is a boolean that indicates whether earlier layers of the network should be frozen. You may set this flag to false if you wish to continue training a pre-trained model from a checkpoint. If you set this flag to true, you can train a new classification layer from scratch. diff --git a/official/r1/resnet/__init__.py b/official/r1/resnet/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/official/r1/resnet/cifar10_download_and_extract.py b/official/r1/resnet/cifar10_download_and_extract.py deleted file mode 100644 index a44d042e1..000000000 --- a/official/r1/resnet/cifar10_download_and_extract.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright 2015 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -"""Downloads and extracts the binary version of the CIFAR-10 dataset.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import argparse -import os -import sys -import tarfile - -from six.moves import urllib -import tensorflow as tf - -DATA_URL = 'https://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz' - -parser = argparse.ArgumentParser() - -parser.add_argument( - '--data_dir', type=str, default='/tmp/cifar10_data', - help='Directory to download data and extract the tarball') - - -def main(_): - """Download and extract the tarball from Alex's website.""" - if not os.path.exists(FLAGS.data_dir): - os.makedirs(FLAGS.data_dir) - - filename = DATA_URL.split('/')[-1] - filepath = os.path.join(FLAGS.data_dir, filename) - - if not os.path.exists(filepath): - def _progress(count, block_size, total_size): - sys.stdout.write('\r>> Downloading %s %.1f%%' % ( - filename, 100.0 * count * block_size / total_size)) - sys.stdout.flush() - - filepath, _ = urllib.request.urlretrieve(DATA_URL, filepath, _progress) - print() - statinfo = os.stat(filepath) - print('Successfully downloaded', filename, statinfo.st_size, 'bytes.') - - tarfile.open(filepath, 'r:gz').extractall(FLAGS.data_dir) - - -if __name__ == '__main__': - FLAGS, unparsed = parser.parse_known_args() - tf.compat.v1.app.run(argv=[sys.argv[0]] + unparsed) diff --git a/official/r1/resnet/cifar10_main.py b/official/r1/resnet/cifar10_main.py deleted file mode 100644 index e0983d82f..000000000 --- a/official/r1/resnet/cifar10_main.py +++ /dev/null @@ -1,297 +0,0 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Runs a ResNet model on the CIFAR-10 dataset.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import os - -from absl import app as absl_app -from absl import flags -from absl import logging -from six.moves import range -import tensorflow as tf - -from official.r1.resnet import resnet_model -from official.r1.resnet import resnet_run_loop -from official.r1.utils.logs import logger -from official.utils.flags import core as flags_core - -HEIGHT = 32 -WIDTH = 32 -NUM_CHANNELS = 3 -_DEFAULT_IMAGE_BYTES = HEIGHT * WIDTH * NUM_CHANNELS -# The record is the image plus a one-byte label -_RECORD_BYTES = _DEFAULT_IMAGE_BYTES + 1 -NUM_CLASSES = 10 -_NUM_DATA_FILES = 5 - -# TODO(tobyboyd): Change to best practice 45K(train)/5K(val)/10K(test) splits. -NUM_IMAGES = { - 'train': 50000, - 'validation': 10000, -} - -DATASET_NAME = 'CIFAR-10' - - -############################################################################### -# Data processing -############################################################################### -def get_filenames(is_training, data_dir): - """Returns a list of filenames.""" - assert tf.io.gfile.exists(data_dir), ( - 'Run cifar10_download_and_extract.py first to download and extract the ' - 'CIFAR-10 data.') - - if is_training: - return [ - os.path.join(data_dir, 'data_batch_%d.bin' % i) - for i in range(1, _NUM_DATA_FILES + 1) - ] - else: - return [os.path.join(data_dir, 'test_batch.bin')] - - -def parse_record(raw_record, is_training, dtype): - """Parse CIFAR-10 image and label from a raw record.""" - # Convert bytes to a vector of uint8 that is record_bytes long. - record_vector = tf.io.decode_raw(raw_record, tf.uint8) - - # The first byte represents the label, which we convert from uint8 to int32 - # and then to one-hot. - label = tf.cast(record_vector[0], tf.int32) - - # The remaining bytes after the label represent the image, which we reshape - # from [depth * height * width] to [depth, height, width]. - depth_major = tf.reshape(record_vector[1:_RECORD_BYTES], - [NUM_CHANNELS, HEIGHT, WIDTH]) - - # Convert from [depth, height, width] to [height, width, depth], and cast as - # float32. - image = tf.cast(tf.transpose(a=depth_major, perm=[1, 2, 0]), tf.float32) - - image = preprocess_image(image, is_training) - image = tf.cast(image, dtype) - - return image, label - - -def preprocess_image(image, is_training): - """Preprocess a single image of layout [height, width, depth].""" - if is_training: - # Resize the image to add four extra pixels on each side. - image = tf.image.resize_with_crop_or_pad( - image, HEIGHT + 8, WIDTH + 8) - - # Randomly crop a [HEIGHT, WIDTH] section of the image. - image = tf.image.random_crop(image, [HEIGHT, WIDTH, NUM_CHANNELS]) - - # Randomly flip the image horizontally. - image = tf.image.random_flip_left_right(image) - - # Subtract off the mean and divide by the variance of the pixels. - image = tf.image.per_image_standardization(image) - return image - - -def input_fn(is_training, - data_dir, - batch_size, - num_epochs=1, - dtype=tf.float32, - datasets_num_private_threads=None, - parse_record_fn=parse_record, - input_context=None, - drop_remainder=False): - """Input function which provides batches for train or eval. - - Args: - is_training: A boolean denoting whether the input is for training. - data_dir: The directory containing the input data. - batch_size: The number of samples per batch. - num_epochs: The number of epochs to repeat the dataset. - dtype: Data type to use for images/features - datasets_num_private_threads: Number of private threads for tf.data. - parse_record_fn: Function to use for parsing the records. - input_context: A `tf.distribute.InputContext` object passed in by - `tf.distribute.Strategy`. - drop_remainder: A boolean indicates whether to drop the remainder of the - batches. If True, the batch dimension will be static. - - Returns: - A dataset that can be used for iteration. - """ - filenames = get_filenames(is_training, data_dir) - dataset = tf.data.FixedLengthRecordDataset(filenames, _RECORD_BYTES) - - if input_context: - logging.info( - 'Sharding the dataset: input_pipeline_id=%d num_input_pipelines=%d', - input_context.input_pipeline_id, input_context.num_input_pipelines) - dataset = dataset.shard(input_context.num_input_pipelines, - input_context.input_pipeline_id) - - return resnet_run_loop.process_record_dataset( - dataset=dataset, - is_training=is_training, - batch_size=batch_size, - shuffle_buffer=NUM_IMAGES['train'], - parse_record_fn=parse_record_fn, - num_epochs=num_epochs, - dtype=dtype, - datasets_num_private_threads=datasets_num_private_threads, - drop_remainder=drop_remainder - ) - - -def get_synth_input_fn(dtype): - return resnet_run_loop.get_synth_input_fn( - HEIGHT, WIDTH, NUM_CHANNELS, NUM_CLASSES, dtype=dtype) - - -############################################################################### -# Running the model -############################################################################### -class Cifar10Model(resnet_model.Model): - """Model class with appropriate defaults for CIFAR-10 data.""" - - def __init__(self, resnet_size, data_format=None, num_classes=NUM_CLASSES, - resnet_version=resnet_model.DEFAULT_VERSION, - dtype=resnet_model.DEFAULT_DTYPE): - """These are the parameters that work for CIFAR-10 data. - - Args: - resnet_size: The number of convolutional layers needed in the model. - data_format: Either 'channels_first' or 'channels_last', specifying which - data format to use when setting up the model. - num_classes: The number of output classes needed from the model. This - enables users to extend the same model to their own datasets. - resnet_version: Integer representing which version of the ResNet network - to use. See README for details. Valid values: [1, 2] - dtype: The TensorFlow dtype to use for calculations. - - Raises: - ValueError: if invalid resnet_size is chosen - """ - if resnet_size % 6 != 2: - raise ValueError('resnet_size must be 6n + 2:', resnet_size) - - num_blocks = (resnet_size - 2) // 6 - - super(Cifar10Model, self).__init__( - resnet_size=resnet_size, - bottleneck=False, - num_classes=num_classes, - num_filters=16, - kernel_size=3, - conv_stride=1, - first_pool_size=None, - first_pool_stride=None, - block_sizes=[num_blocks] * 3, - block_strides=[1, 2, 2], - resnet_version=resnet_version, - data_format=data_format, - dtype=dtype - ) - - -def cifar10_model_fn(features, labels, mode, params): - """Model function for CIFAR-10.""" - features = tf.reshape(features, [-1, HEIGHT, WIDTH, NUM_CHANNELS]) - # Learning rate schedule follows arXiv:1512.03385 for ResNet-56 and under. - learning_rate_fn = resnet_run_loop.learning_rate_with_decay( - batch_size=params['batch_size'] * params.get('num_workers', 1), - batch_denom=128, num_images=NUM_IMAGES['train'], - boundary_epochs=[91, 136, 182], decay_rates=[1, 0.1, 0.01, 0.001]) - - # Weight decay of 2e-4 diverges from 1e-4 decay used in the ResNet paper - # and seems more stable in testing. The difference was nominal for ResNet-56. - weight_decay = 2e-4 - - # Empirical testing showed that including batch_normalization variables - # in the calculation of regularized loss helped validation accuracy - # for the CIFAR-10 dataset, perhaps because the regularization prevents - # overfitting on the small data set. We therefore include all vars when - # regularizing and computing loss during training. - def loss_filter_fn(_): - return True - - return resnet_run_loop.resnet_model_fn( - features=features, - labels=labels, - mode=mode, - model_class=Cifar10Model, - resnet_size=params['resnet_size'], - weight_decay=weight_decay, - learning_rate_fn=learning_rate_fn, - momentum=0.9, - data_format=params['data_format'], - resnet_version=params['resnet_version'], - loss_scale=params['loss_scale'], - loss_filter_fn=loss_filter_fn, - dtype=params['dtype'], - fine_tune=params['fine_tune'] - ) - - -def define_cifar_flags(): - resnet_run_loop.define_resnet_flags() - flags.adopt_module_key_flags(resnet_run_loop) - flags_core.set_defaults(data_dir='/tmp/cifar10_data/cifar-10-batches-bin', - model_dir='/tmp/cifar10_model', - resnet_size='56', - train_epochs=182, - epochs_between_evals=10, - batch_size=128, - image_bytes_as_serving_input=False) - - -def run_cifar(flags_obj): - """Run ResNet CIFAR-10 training and eval loop. - - Args: - flags_obj: An object containing parsed flag values. - - Returns: - Dictionary of results. Including final accuracy. - """ - if flags_obj.image_bytes_as_serving_input: - logging.fatal( - '--image_bytes_as_serving_input cannot be set to True for CIFAR. ' - 'This flag is only applicable to ImageNet.') - return - - input_function = (flags_obj.use_synthetic_data and - get_synth_input_fn(flags_core.get_tf_dtype(flags_obj)) or - input_fn) - result = resnet_run_loop.resnet_main( - flags_obj, cifar10_model_fn, input_function, DATASET_NAME, - shape=[HEIGHT, WIDTH, NUM_CHANNELS]) - - return result - - -def main(_): - with logger.benchmark_context(flags.FLAGS): - run_cifar(flags.FLAGS) - - -if __name__ == '__main__': - logging.set_verbosity(logging.INFO) - define_cifar_flags() - absl_app.run(main) diff --git a/official/r1/resnet/cifar10_test.py b/official/r1/resnet/cifar10_test.py deleted file mode 100644 index ba40eb2c6..000000000 --- a/official/r1/resnet/cifar10_test.py +++ /dev/null @@ -1,183 +0,0 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from tempfile import mkstemp - -from absl import logging -import numpy as np -import tensorflow as tf - -from official.r1.resnet import cifar10_main -from official.utils.testing import integration - -logging.set_verbosity(logging.ERROR) - -_BATCH_SIZE = 128 -_HEIGHT = 32 -_WIDTH = 32 -_NUM_CHANNELS = 3 - - -class BaseTest(tf.test.TestCase): - """Tests for the Cifar10 version of Resnet. - """ - - _num_validation_images = None - - @classmethod - def setUpClass(cls): # pylint: disable=invalid-name - super(BaseTest, cls).setUpClass() - tf.compat.v1.disable_eager_execution() - cifar10_main.define_cifar_flags() - - def setUp(self): - super(BaseTest, self).setUp() - self._num_validation_images = cifar10_main.NUM_IMAGES['validation'] - cifar10_main.NUM_IMAGES['validation'] = 4 - - def tearDown(self): - super(BaseTest, self).tearDown() - tf.io.gfile.rmtree(self.get_temp_dir()) - cifar10_main.NUM_IMAGES['validation'] = self._num_validation_images - - def test_dataset_input_fn(self): - fake_data = bytearray() - fake_data.append(7) - for i in range(_NUM_CHANNELS): - for _ in range(_HEIGHT * _WIDTH): - fake_data.append(i) - - _, filename = mkstemp(dir=self.get_temp_dir()) - data_file = open(filename, 'wb') - data_file.write(fake_data) - data_file.close() - - fake_dataset = tf.data.FixedLengthRecordDataset( - filename, cifar10_main._RECORD_BYTES) # pylint: disable=protected-access - fake_dataset = fake_dataset.map( - lambda val: cifar10_main.parse_record(val, False, tf.float32)) - image, label = tf.compat.v1.data.make_one_shot_iterator( - fake_dataset).get_next() - - self.assertAllEqual(label.shape, ()) - self.assertAllEqual(image.shape, (_HEIGHT, _WIDTH, _NUM_CHANNELS)) - - with self.session() as sess: - image, label = sess.run([image, label]) - - self.assertEqual(label, 7) - - for row in image: - for pixel in row: - self.assertAllClose(pixel, np.array([-1.225, 0., 1.225]), rtol=1e-3) - - def cifar10_model_fn_helper(self, mode, resnet_version, dtype): - input_fn = cifar10_main.get_synth_input_fn(dtype) - dataset = input_fn(True, '', _BATCH_SIZE) - iterator = tf.compat.v1.data.make_initializable_iterator(dataset) - features, labels = iterator.get_next() - spec = cifar10_main.cifar10_model_fn( - features, labels, mode, { - 'dtype': dtype, - 'resnet_size': 32, - 'data_format': 'channels_last', - 'batch_size': _BATCH_SIZE, - 'resnet_version': resnet_version, - 'loss_scale': 128 if dtype == tf.float16 else 1, - 'fine_tune': False, - }) - - predictions = spec.predictions - self.assertAllEqual(predictions['probabilities'].shape, - (_BATCH_SIZE, 10)) - self.assertEqual(predictions['probabilities'].dtype, tf.float32) - self.assertAllEqual(predictions['classes'].shape, (_BATCH_SIZE,)) - self.assertEqual(predictions['classes'].dtype, tf.int64) - - if mode != tf.estimator.ModeKeys.PREDICT: - loss = spec.loss - self.assertAllEqual(loss.shape, ()) - self.assertEqual(loss.dtype, tf.float32) - - if mode == tf.estimator.ModeKeys.EVAL: - eval_metric_ops = spec.eval_metric_ops - self.assertAllEqual(eval_metric_ops['accuracy'][0].shape, ()) - self.assertAllEqual(eval_metric_ops['accuracy'][1].shape, ()) - self.assertEqual(eval_metric_ops['accuracy'][0].dtype, tf.float32) - self.assertEqual(eval_metric_ops['accuracy'][1].dtype, tf.float32) - - def test_cifar10_model_fn_train_mode_v1(self): - self.cifar10_model_fn_helper(tf.estimator.ModeKeys.TRAIN, resnet_version=1, - dtype=tf.float32) - - def test_cifar10_model_fn_trainmode__v2(self): - self.cifar10_model_fn_helper(tf.estimator.ModeKeys.TRAIN, resnet_version=2, - dtype=tf.float32) - - def test_cifar10_model_fn_eval_mode_v1(self): - self.cifar10_model_fn_helper(tf.estimator.ModeKeys.EVAL, resnet_version=1, - dtype=tf.float32) - - def test_cifar10_model_fn_eval_mode_v2(self): - self.cifar10_model_fn_helper(tf.estimator.ModeKeys.EVAL, resnet_version=2, - dtype=tf.float32) - - def test_cifar10_model_fn_predict_mode_v1(self): - self.cifar10_model_fn_helper(tf.estimator.ModeKeys.PREDICT, - resnet_version=1, dtype=tf.float32) - - def test_cifar10_model_fn_predict_mode_v2(self): - self.cifar10_model_fn_helper(tf.estimator.ModeKeys.PREDICT, - resnet_version=2, dtype=tf.float32) - - def _test_cifar10model_shape(self, resnet_version): - batch_size = 135 - num_classes = 246 - - model = cifar10_main.Cifar10Model(32, data_format='channels_last', - num_classes=num_classes, - resnet_version=resnet_version) - fake_input = tf.random.uniform([batch_size, _HEIGHT, _WIDTH, _NUM_CHANNELS]) - output = model(fake_input, training=True) - - self.assertAllEqual(output.shape, (batch_size, num_classes)) - - def test_cifar10model_shape_v1(self): - self._test_cifar10model_shape(resnet_version=1) - - def test_cifar10model_shape_v2(self): - self._test_cifar10model_shape(resnet_version=2) - - def test_cifar10_end_to_end_synthetic_v1(self): - integration.run_synthetic( - main=cifar10_main.run_cifar, tmp_root=self.get_temp_dir(), - extra_flags=['-resnet_version', '1', '-batch_size', '4', - '--max_train_steps', '1'] - ) - - def test_cifar10_end_to_end_synthetic_v2(self): - integration.run_synthetic( - main=cifar10_main.run_cifar, tmp_root=self.get_temp_dir(), - extra_flags=['-resnet_version', '2', '-batch_size', '4', - '--max_train_steps', '1'] - ) - - -if __name__ == '__main__': - tf.test.main() diff --git a/official/r1/resnet/estimator_benchmark.py b/official/r1/resnet/estimator_benchmark.py deleted file mode 100644 index a1b9f79ff..000000000 --- a/official/r1/resnet/estimator_benchmark.py +++ /dev/null @@ -1,500 +0,0 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Executes Estimator benchmarks and accuracy tests.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import os -import time - -from absl import flags -from absl import logging -from absl.testing import flagsaver -import tensorflow as tf - -from official.r1.resnet import cifar10_main as cifar_main -from official.r1.resnet import imagenet_main -from official.r1.utils.logs import hooks -from official.utils.flags import core as flags_core - -IMAGENET_DATA_DIR_NAME = 'imagenet' -CIFAR_DATA_DIR_NAME = 'cifar-10-batches-bin' -FLAGS = flags.FLAGS - - -class EstimatorBenchmark(tf.test.Benchmark): - """Base class to hold methods common to test classes in the module. - - Code under test for Estimator models (ResNet50 and 56) report mostly the - same data and require the same FLAG setup. - """ - - local_flags = None - - def __init__(self, output_dir=None, default_flags=None, flag_methods=None): - if not output_dir: - output_dir = '/tmp' - self.output_dir = output_dir - self.default_flags = default_flags or {} - self.flag_methods = flag_methods or {} - - def _get_model_dir(self, folder_name): - """Returns directory to store info, e.g. saved model and event log.""" - return os.path.join(self.output_dir, folder_name) - - def _setup(self): - """Sets up and resets flags before each test.""" - logging.set_verbosity(logging.INFO) - if EstimatorBenchmark.local_flags is None: - for flag_method in self.flag_methods: - flag_method() - # Loads flags to get defaults to then override. List cannot be empty. - flags.FLAGS(['foo']) - # Overrides flag values with defaults for the class of tests. - for k, v in self.default_flags.items(): - setattr(FLAGS, k, v) - saved_flag_values = flagsaver.save_flag_values() - EstimatorBenchmark.local_flags = saved_flag_values - else: - flagsaver.restore_flag_values(EstimatorBenchmark.local_flags) - - def _report_benchmark(self, - stats, - wall_time_sec, - top_1_max=None, - top_1_min=None): - """Report benchmark results by writing to local protobuf file. - - Args: - stats: dict returned from estimator models with known entries. - wall_time_sec: the during of the benchmark execution in seconds - top_1_max: highest passing level for top_1 accuracy. - top_1_min: lowest passing level for top_1 accuracy. - """ - - examples_per_sec_hook = None - for hook in stats['train_hooks']: - if isinstance(hook, hooks.ExamplesPerSecondHook): - examples_per_sec_hook = hook - break - - eval_results = stats['eval_results'] - metrics = [] - if 'accuracy' in eval_results: - metrics.append({'name': 'accuracy_top_1', - 'value': float(eval_results['accuracy']), - 'min_value': top_1_min, - 'max_value': top_1_max}) - if 'accuracy_top_5' in eval_results: - metrics.append({'name': 'accuracy_top_5', - 'value': float(eval_results['accuracy_top_5'])}) - - if examples_per_sec_hook: - exp_per_second_list = examples_per_sec_hook.current_examples_per_sec_list - # ExamplesPerSecondHook skips the first 10 steps. - exp_per_sec = sum(exp_per_second_list) / (len(exp_per_second_list)) - metrics.append({'name': 'exp_per_second', - 'value': exp_per_sec}) - flags_str = flags_core.get_nondefault_flags_as_str() - self.report_benchmark( - iters=eval_results.get('global_step', None), - wall_time=wall_time_sec, - metrics=metrics, - extras={'flags': flags_str}) - - -class Resnet50EstimatorAccuracy(EstimatorBenchmark): - """Benchmark accuracy tests for ResNet50 w/ Estimator.""" - - def __init__(self, output_dir=None, root_data_dir=None, **kwargs): - """Benchmark accuracy tests for ResNet50 w/ Estimator. - - Args: - output_dir: directory where to output e.g. log files - root_data_dir: directory under which to look for dataset - **kwargs: arbitrary named arguments. This is needed to make the - constructor forward compatible in case PerfZero provides more - named arguments before updating the constructor. - """ - flag_methods = [imagenet_main.define_imagenet_flags] - - self.data_dir = os.path.join(root_data_dir, IMAGENET_DATA_DIR_NAME) - super(Resnet50EstimatorAccuracy, self).__init__( - output_dir=output_dir, flag_methods=flag_methods) - - def benchmark_graph_8_gpu(self): - """Test 8 GPUs graph mode.""" - self._setup() - FLAGS.num_gpus = 8 - FLAGS.data_dir = self.data_dir - FLAGS.batch_size = 128 * 8 - FLAGS.train_epochs = 90 - FLAGS.epochs_between_evals = 10 - FLAGS.model_dir = self._get_model_dir('benchmark_graph_8_gpu') - FLAGS.dtype = 'fp32' - FLAGS.hooks = ['ExamplesPerSecondHook'] - self._run_and_report_benchmark() - - def benchmark_graph_fp16_8_gpu(self): - """Test FP16 8 GPUs graph mode.""" - self._setup() - FLAGS.num_gpus = 8 - FLAGS.data_dir = self.data_dir - FLAGS.batch_size = 256 * 8 - FLAGS.train_epochs = 90 - FLAGS.epochs_between_evals = 10 - FLAGS.model_dir = self._get_model_dir('benchmark_graph_fp16_8_gpu') - FLAGS.dtype = 'fp16' - FLAGS.hooks = ['ExamplesPerSecondHook'] - self._run_and_report_benchmark() - - def benchmark_graph_fp16_graph_rewrite_8_gpu(self): - """Test FP16 graph rewrite 8 GPUs graph mode.""" - self._setup() - FLAGS.num_gpus = 8 - FLAGS.data_dir = self.data_dir - FLAGS.batch_size = 256 * 8 - FLAGS.train_epochs = 90 - FLAGS.epochs_between_evals = 10 - FLAGS.model_dir = self._get_model_dir( - 'benchmark_graph_fp16_graph_rewrite_8_gpu') - FLAGS.dtype = 'fp16' - FLAGS.fp16_implementation = 'graph_rewrite' - FLAGS.hooks = ['ExamplesPerSecondHook'] - self._run_and_report_benchmark() - - def _run_and_report_benchmark(self): - start_time_sec = time.time() - stats = imagenet_main.run_imagenet(flags.FLAGS) - wall_time_sec = time.time() - start_time_sec - self._report_benchmark(stats, - wall_time_sec, - top_1_min=0.762, - top_1_max=0.766) - - -class Resnet50EstimatorBenchmarkBase(EstimatorBenchmark): - """Base class for benchmarks for ResNet50 using Estimator.""" - local_flags = None - - def __init__(self, output_dir=None, default_flags=None): - flag_methods = [imagenet_main.define_imagenet_flags] - - super(Resnet50EstimatorBenchmarkBase, self).__init__( - output_dir=output_dir, - default_flags=default_flags, - flag_methods=flag_methods) - - def _run_and_report_benchmark(self): - start_time_sec = time.time() - stats = imagenet_main.run_imagenet(FLAGS) - wall_time_sec = time.time() - start_time_sec - print(stats) - # Remove values to skip triggering accuracy check. - stats['eval_results'].pop('accuracy', None) - stats['eval_results'].pop('accuracy_top_5', None) - - self._report_benchmark(stats, wall_time_sec) - - -class Resnet50EstimatorBenchmark(Resnet50EstimatorBenchmarkBase): - """Benchmarks for ResNet50 using Estimator with 1 worker.""" - - def __init__(self, output_dir=None, default_flags=None): - super(Resnet50EstimatorBenchmark, self).__init__( - output_dir=output_dir, - default_flags=default_flags) - - def benchmark_graph_fp16_1_gpu(self): - """Benchmarks graph fp16 1 gpu.""" - self._setup() - - FLAGS.num_gpus = 1 - FLAGS.model_dir = self._get_model_dir('benchmark_graph_fp16_1_gpu') - FLAGS.batch_size = 128 - FLAGS.dtype = 'fp16' - FLAGS.hooks = ['ExamplesPerSecondHook'] - self._run_and_report_benchmark() - - def benchmark_graph_fp16_1_gpu_tweaked(self): - """Benchmarks graph fp16 1 gpu tweaked.""" - self._setup() - - FLAGS.num_gpus = 1 - FLAGS.tf_gpu_thread_mode = 'gpu_private' - FLAGS.intra_op_parallelism_threads = 1 - FLAGS.model_dir = self._get_model_dir('benchmark_graph_fp16_1_gpu_tweaked') - FLAGS.batch_size = 256 - FLAGS.dtype = 'fp16' - FLAGS.hooks = ['ExamplesPerSecondHook'] - self._run_and_report_benchmark() - - def benchmark_graph_fp16_graph_rewrite_1_gpu_tweaked(self): - """Benchmarks graph fp16 graph rewrite 1 gpu tweaked.""" - self._setup() - - FLAGS.num_gpus = 1 - FLAGS.tf_gpu_thread_mode = 'gpu_private' - FLAGS.intra_op_parallelism_threads = 1 - FLAGS.model_dir = self._get_model_dir( - 'benchmark_graph_fp16_graph_rewrite_1_gpu_tweaked') - FLAGS.batch_size = 256 - FLAGS.dtype = 'fp16' - FLAGS.fp16_implementation = 'graph_rewrite' - FLAGS.hooks = ['ExamplesPerSecondHook'] - self._run_and_report_benchmark() - - def benchmark_graph_1_gpu(self): - """Benchmarks graph 1 gpu.""" - self._setup() - - FLAGS.num_gpus = 1 - FLAGS.model_dir = self._get_model_dir('benchmark_graph_1_gpu') - FLAGS.batch_size = 128 - FLAGS.dtype = 'fp32' - FLAGS.hooks = ['ExamplesPerSecondHook'] - self._run_and_report_benchmark() - - def benchmark_graph_8_gpu(self): - """Benchmarks graph 8 gpus.""" - self._setup() - - FLAGS.num_gpus = 8 - FLAGS.model_dir = self._get_model_dir('benchmark_graph_8_gpu') - FLAGS.batch_size = 128*8 - FLAGS.dtype = 'fp32' - FLAGS.hooks = ['ExamplesPerSecondHook'] - self._run_and_report_benchmark() - - def benchmark_graph_fp16_8_gpu(self): - """Benchmarks graph fp16 8 gpus.""" - self._setup() - - FLAGS.num_gpus = 8 - FLAGS.model_dir = self._get_model_dir('benchmark_graph_fp16_8_gpu') - FLAGS.batch_size = 256*8 - FLAGS.dtype = 'fp16' - FLAGS.hooks = ['ExamplesPerSecondHook'] - self._run_and_report_benchmark() - - def benchmark_graph_fp16_8_gpu_tweaked(self): - """Benchmarks graph fp16 8 gpus tweaked.""" - self._setup() - - FLAGS.num_gpus = 8 - FLAGS.tf_gpu_thread_mode = 'gpu_private' - FLAGS.intra_op_parallelism_threads = 1 - FLAGS.model_dir = self._get_model_dir('benchmark_graph_fp16_8_gpu_tweaked') - FLAGS.batch_size = 256*8 - FLAGS.dtype = 'fp16' - FLAGS.hooks = ['ExamplesPerSecondHook'] - self._run_and_report_benchmark() - - def benchmark_graph_fp16_graph_rewrite_8_gpu_tweaked(self): - """Benchmarks graph fp16 graph rewrite 8 gpus tweaked.""" - self._setup() - - FLAGS.num_gpus = 8 - FLAGS.tf_gpu_thread_mode = 'gpu_private' - FLAGS.intra_op_parallelism_threads = 1 - FLAGS.model_dir = self._get_model_dir( - 'benchmark_graph_fp16_graph_rewrite_8_gpu_tweaked') - FLAGS.batch_size = 256*8 - FLAGS.dtype = 'fp16' - FLAGS.fp16_implementation = 'graph_rewrite' - FLAGS.hooks = ['ExamplesPerSecondHook'] - self._run_and_report_benchmark() - - -class Resnet50EstimatorBenchmarkSynth(Resnet50EstimatorBenchmark): - """Resnet50 synthetic benchmark tests.""" - - def __init__(self, output_dir=None, root_data_dir=None, **kwargs): - def_flags = {} - def_flags['use_synthetic_data'] = True - def_flags['max_train_steps'] = 110 - def_flags['train_epochs'] = 1 - - super(Resnet50EstimatorBenchmarkSynth, self).__init__( - output_dir=output_dir, default_flags=def_flags) - - -class Resnet50EstimatorBenchmarkReal(Resnet50EstimatorBenchmark): - """Resnet50 real data benchmark tests.""" - - def __init__(self, output_dir=None, root_data_dir=None, **kwargs): - def_flags = {} - def_flags['data_dir'] = os.path.join(root_data_dir, IMAGENET_DATA_DIR_NAME) - def_flags['max_train_steps'] = 110 - def_flags['train_epochs'] = 1 - - super(Resnet50EstimatorBenchmarkReal, self).__init__( - output_dir=output_dir, default_flags=def_flags) - - -class Resnet50MultiWorkerEstimatorBenchmark(Resnet50EstimatorBenchmarkBase): - """Benchmarks for ResNet50 using Estimator with multiple workers.""" - - def __init__(self, output_dir=None, default_flags=None): - super(Resnet50MultiWorkerEstimatorBenchmark, self).__init__( - output_dir=output_dir, - default_flags=default_flags) - - def benchmark_graph_fp16_8_gpu_ring_tweaked(self): - """Benchmarks graph fp16 8 gpus with ring collective tweaked.""" - self._setup() - - FLAGS.num_gpus = 8 - FLAGS.distribution_strategy = 'multi_worker_mirrored' - FLAGS.all_reduce_alg = 'ring' - FLAGS.tf_gpu_thread_mode = 'gpu_private' - FLAGS.intra_op_parallelism_threads = 1 - FLAGS.datasets_num_private_threads = 32 - FLAGS.model_dir = self._get_model_dir( - folder_name='benchmark_graph_fp16_8_gpu_ring_tweaked') - FLAGS.batch_size = 256*8 - FLAGS.dtype = 'fp16' - FLAGS.hooks = ['ExamplesPerSecondHook'] - self._run_and_report_benchmark() - - def benchmark_graph_fp16_8_gpu_nccl_tweaked(self): - """Benchmarks graph fp16 8 gpus with nccl collective tweaked.""" - self._setup() - - FLAGS.num_gpus = 8 - FLAGS.distribution_strategy = 'multi_worker_mirrored' - FLAGS.all_reduce_alg = 'nccl' - FLAGS.tf_gpu_thread_mode = 'gpu_private' - FLAGS.intra_op_parallelism_threads = 1 - FLAGS.datasets_num_private_threads = 32 - FLAGS.model_dir = self._get_model_dir( - folder_name='benchmark_graph_fp16_8_gpu_nccl_tweaked') - FLAGS.batch_size = 256*8 - FLAGS.dtype = 'fp16' - FLAGS.hooks = ['ExamplesPerSecondHook'] - self._run_and_report_benchmark() - - -class Resnet50MultiWorkerEstimatorBenchmarkSynth( - Resnet50MultiWorkerEstimatorBenchmark): - """ResNet50, multi-worker, Estimator, synthetic data.""" - - def __init__(self, output_dir=None, root_data_dir=None, **kwargs): - def_flags = {} - def_flags['use_synthetic_data'] = True - def_flags['max_train_steps'] = 110 - def_flags['train_epochs'] = 1 - - super(Resnet50MultiWorkerEstimatorBenchmarkSynth, self).__init__( - output_dir=output_dir, default_flags=def_flags) - - -class Resnet56EstimatorAccuracy(EstimatorBenchmark): - """Accuracy tests for Estimator ResNet56.""" - - local_flags = None - - def __init__(self, output_dir=None, root_data_dir=None, **kwargs): - """A benchmark class. - - Args: - output_dir: directory where to output e.g. log files - root_data_dir: directory under which to look for dataset - **kwargs: arbitrary named arguments. This is needed to make the - constructor forward compatible in case PerfZero provides more - named arguments before updating the constructor. - """ - flag_methods = [cifar_main.define_cifar_flags] - - self.data_dir = os.path.join(root_data_dir, CIFAR_DATA_DIR_NAME) - super(Resnet56EstimatorAccuracy, self).__init__( - output_dir=output_dir, flag_methods=flag_methods) - - def benchmark_graph_1_gpu(self): - """Test layers model with Estimator and distribution strategies.""" - self._setup() - flags.FLAGS.num_gpus = 1 - flags.FLAGS.data_dir = self.data_dir - flags.FLAGS.batch_size = 128 - flags.FLAGS.train_epochs = 182 - flags.FLAGS.model_dir = self._get_model_dir('benchmark_graph_1_gpu') - flags.FLAGS.resnet_size = 56 - flags.FLAGS.dtype = 'fp32' - flags.FLAGS.hooks = ['ExamplesPerSecondHook'] - self._run_and_report_benchmark() - - def benchmark_graph_fp16_1_gpu(self): - """Test layers FP16 model with Estimator and distribution strategies.""" - self._setup() - flags.FLAGS.num_gpus = 1 - flags.FLAGS.data_dir = self.data_dir - flags.FLAGS.batch_size = 128 - flags.FLAGS.train_epochs = 182 - flags.FLAGS.model_dir = self._get_model_dir('benchmark_graph_fp16_1_gpu') - flags.FLAGS.resnet_size = 56 - flags.FLAGS.dtype = 'fp16' - flags.FLAGS.hooks = ['ExamplesPerSecondHook'] - self._run_and_report_benchmark() - - def benchmark_graph_2_gpu(self): - """Test layers model with Estimator and dist_strat. 2 GPUs.""" - self._setup() - flags.FLAGS.num_gpus = 2 - flags.FLAGS.data_dir = self.data_dir - flags.FLAGS.batch_size = 128 - flags.FLAGS.train_epochs = 182 - flags.FLAGS.model_dir = self._get_model_dir('benchmark_graph_2_gpu') - flags.FLAGS.resnet_size = 56 - flags.FLAGS.dtype = 'fp32' - flags.FLAGS.hooks = ['ExamplesPerSecondHook'] - self._run_and_report_benchmark() - - def benchmark_graph_fp16_2_gpu(self): - """Test layers FP16 model with Estimator and dist_strat. 2 GPUs.""" - self._setup() - flags.FLAGS.num_gpus = 2 - flags.FLAGS.data_dir = self.data_dir - flags.FLAGS.batch_size = 128 - flags.FLAGS.train_epochs = 182 - flags.FLAGS.model_dir = self._get_model_dir('benchmark_graph_fp16_2_gpu') - flags.FLAGS.resnet_size = 56 - flags.FLAGS.dtype = 'fp16' - flags.FLAGS.hooks = ['ExamplesPerSecondHook'] - self._run_and_report_benchmark() - - def unit_test(self): - """A lightweight test that can finish quickly.""" - self._setup() - flags.FLAGS.num_gpus = 1 - flags.FLAGS.data_dir = self.data_dir - flags.FLAGS.batch_size = 128 - flags.FLAGS.train_epochs = 1 - flags.FLAGS.model_dir = self._get_model_dir('unit_test') - flags.FLAGS.resnet_size = 8 - flags.FLAGS.dtype = 'fp32' - flags.FLAGS.hooks = ['ExamplesPerSecondHook'] - self._run_and_report_benchmark() - - def _run_and_report_benchmark(self): - """Executes benchmark and reports result.""" - start_time_sec = time.time() - stats = cifar_main.run_cifar(flags.FLAGS) - wall_time_sec = time.time() - start_time_sec - - self._report_benchmark(stats, - wall_time_sec, - top_1_min=0.926, - top_1_max=0.938) diff --git a/official/r1/resnet/imagenet_main.py b/official/r1/resnet/imagenet_main.py deleted file mode 100644 index 37420d334..000000000 --- a/official/r1/resnet/imagenet_main.py +++ /dev/null @@ -1,393 +0,0 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Runs a ResNet model on the ImageNet dataset.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import os - -from absl import app as absl_app -from absl import flags -from absl import logging -from six.moves import range -import tensorflow as tf - -from official.r1.resnet import imagenet_preprocessing -from official.r1.resnet import resnet_model -from official.r1.resnet import resnet_run_loop -from official.r1.utils.logs import logger -from official.utils.flags import core as flags_core - -DEFAULT_IMAGE_SIZE = 224 -NUM_CHANNELS = 3 -NUM_CLASSES = 1001 - -NUM_IMAGES = { - 'train': 1281167, - 'validation': 50000, -} - -_NUM_TRAIN_FILES = 1024 -_SHUFFLE_BUFFER = 10000 - -DATASET_NAME = 'ImageNet' - -############################################################################### -# Data processing -############################################################################### -def get_filenames(is_training, data_dir): - """Return filenames for dataset.""" - if is_training: - return [ - os.path.join(data_dir, 'train-%05d-of-01024' % i) - for i in range(_NUM_TRAIN_FILES)] - else: - return [ - os.path.join(data_dir, 'validation-%05d-of-00128' % i) - for i in range(128)] - - -def _parse_example_proto(example_serialized): - """Parses an Example proto containing a training example of an image. - - The output of the build_image_data.py image preprocessing script is a dataset - containing serialized Example protocol buffers. Each Example proto contains - the following fields (values are included as examples): - - image/height: 462 - image/width: 581 - image/colorspace: 'RGB' - image/channels: 3 - image/class/label: 615 - image/class/synset: 'n03623198' - image/class/text: 'knee pad' - image/object/bbox/xmin: 0.1 - image/object/bbox/xmax: 0.9 - image/object/bbox/ymin: 0.2 - image/object/bbox/ymax: 0.6 - image/object/bbox/label: 615 - image/format: 'JPEG' - image/filename: 'ILSVRC2012_val_00041207.JPEG' - image/encoded: - - Args: - example_serialized: scalar Tensor tf.string containing a serialized - Example protocol buffer. - - Returns: - image_buffer: Tensor tf.string containing the contents of a JPEG file. - label: Tensor tf.int32 containing the label. - bbox: 3-D float Tensor of bounding boxes arranged [1, num_boxes, coords] - where each coordinate is [0, 1) and the coordinates are arranged as - [ymin, xmin, ymax, xmax]. - """ - # Dense features in Example proto. - feature_map = { - 'image/encoded': tf.io.FixedLenFeature([], dtype=tf.string, - default_value=''), - 'image/class/label': tf.io.FixedLenFeature([], dtype=tf.int64, - default_value=-1), - 'image/class/text': tf.io.FixedLenFeature([], dtype=tf.string, - default_value=''), - } - sparse_float32 = tf.io.VarLenFeature(dtype=tf.float32) - # Sparse features in Example proto. - feature_map.update( - {k: sparse_float32 for k in ['image/object/bbox/xmin', - 'image/object/bbox/ymin', - 'image/object/bbox/xmax', - 'image/object/bbox/ymax']}) - - features = tf.io.parse_single_example(serialized=example_serialized, - features=feature_map) - label = tf.cast(features['image/class/label'], dtype=tf.int32) - - xmin = tf.expand_dims(features['image/object/bbox/xmin'].values, 0) - ymin = tf.expand_dims(features['image/object/bbox/ymin'].values, 0) - xmax = tf.expand_dims(features['image/object/bbox/xmax'].values, 0) - ymax = tf.expand_dims(features['image/object/bbox/ymax'].values, 0) - - # Note that we impose an ordering of (y, x) just to make life difficult. - bbox = tf.concat([ymin, xmin, ymax, xmax], 0) - - # Force the variable number of bounding boxes into the shape - # [1, num_boxes, coords]. - bbox = tf.expand_dims(bbox, 0) - bbox = tf.transpose(a=bbox, perm=[0, 2, 1]) - - return features['image/encoded'], label, bbox - - -def parse_record(raw_record, is_training, dtype): - """Parses a record containing a training example of an image. - - The input record is parsed into a label and image, and the image is passed - through preprocessing steps (cropping, flipping, and so on). - - Args: - raw_record: scalar Tensor tf.string containing a serialized - Example protocol buffer. - is_training: A boolean denoting whether the input is for training. - dtype: data type to use for images/features. - - Returns: - Tuple with processed image tensor and one-hot-encoded label tensor. - """ - image_buffer, label, bbox = _parse_example_proto(raw_record) - - image = imagenet_preprocessing.preprocess_image( - image_buffer=image_buffer, - bbox=bbox, - output_height=DEFAULT_IMAGE_SIZE, - output_width=DEFAULT_IMAGE_SIZE, - num_channels=NUM_CHANNELS, - is_training=is_training) - image = tf.cast(image, dtype) - - return image, label - - -def input_fn(is_training, - data_dir, - batch_size, - num_epochs=1, - dtype=tf.float32, - datasets_num_private_threads=None, - parse_record_fn=parse_record, - input_context=None, - drop_remainder=False, - tf_data_experimental_slack=False): - """Input function which provides batches for train or eval. - - Args: - is_training: A boolean denoting whether the input is for training. - data_dir: The directory containing the input data. - batch_size: The number of samples per batch. - num_epochs: The number of epochs to repeat the dataset. - dtype: Data type to use for images/features - datasets_num_private_threads: Number of private threads for tf.data. - parse_record_fn: Function to use for parsing the records. - input_context: A `tf.distribute.InputContext` object passed in by - `tf.distribute.Strategy`. - drop_remainder: A boolean indicates whether to drop the remainder of the - batches. If True, the batch dimension will be static. - tf_data_experimental_slack: Whether to enable tf.data's - `experimental_slack` option. - - Returns: - A dataset that can be used for iteration. - """ - filenames = get_filenames(is_training, data_dir) - dataset = tf.data.Dataset.from_tensor_slices(filenames) - - if input_context: - logging.info( - 'Sharding the dataset: input_pipeline_id=%d num_input_pipelines=%d', - input_context.input_pipeline_id, input_context.num_input_pipelines) - dataset = dataset.shard(input_context.num_input_pipelines, - input_context.input_pipeline_id) - - if is_training: - # Shuffle the input files - dataset = dataset.shuffle(buffer_size=_NUM_TRAIN_FILES) - - # Convert to individual records. - # cycle_length = 10 means that up to 10 files will be read and deserialized in - # parallel. You may want to increase this number if you have a large number of - # CPU cores. - dataset = dataset.interleave( - tf.data.TFRecordDataset, - cycle_length=10, - num_parallel_calls=tf.data.experimental.AUTOTUNE) - - return resnet_run_loop.process_record_dataset( - dataset=dataset, - is_training=is_training, - batch_size=batch_size, - shuffle_buffer=_SHUFFLE_BUFFER, - parse_record_fn=parse_record_fn, - num_epochs=num_epochs, - dtype=dtype, - datasets_num_private_threads=datasets_num_private_threads, - drop_remainder=drop_remainder, - tf_data_experimental_slack=tf_data_experimental_slack, - ) - - -def get_synth_input_fn(dtype): - return resnet_run_loop.get_synth_input_fn( - DEFAULT_IMAGE_SIZE, DEFAULT_IMAGE_SIZE, NUM_CHANNELS, NUM_CLASSES, - dtype=dtype) - - -############################################################################### -# Running the model -############################################################################### -class ImagenetModel(resnet_model.Model): - """Model class with appropriate defaults for Imagenet data.""" - - def __init__(self, resnet_size, data_format=None, num_classes=NUM_CLASSES, - resnet_version=resnet_model.DEFAULT_VERSION, - dtype=resnet_model.DEFAULT_DTYPE): - """These are the parameters that work for Imagenet data. - - Args: - resnet_size: The number of convolutional layers needed in the model. - data_format: Either 'channels_first' or 'channels_last', specifying which - data format to use when setting up the model. - num_classes: The number of output classes needed from the model. This - enables users to extend the same model to their own datasets. - resnet_version: Integer representing which version of the ResNet network - to use. See README for details. Valid values: [1, 2] - dtype: The TensorFlow dtype to use for calculations. - """ - - # For bigger models, we want to use "bottleneck" layers - if resnet_size < 50: - bottleneck = False - else: - bottleneck = True - - super(ImagenetModel, self).__init__( - resnet_size=resnet_size, - bottleneck=bottleneck, - num_classes=num_classes, - num_filters=64, - kernel_size=7, - conv_stride=2, - first_pool_size=3, - first_pool_stride=2, - block_sizes=_get_block_sizes(resnet_size), - block_strides=[1, 2, 2, 2], - resnet_version=resnet_version, - data_format=data_format, - dtype=dtype - ) - - -def _get_block_sizes(resnet_size): - """Retrieve the size of each block_layer in the ResNet model. - - The number of block layers used for the Resnet model varies according - to the size of the model. This helper grabs the layer set we want, throwing - an error if a non-standard size has been selected. - - Args: - resnet_size: The number of convolutional layers needed in the model. - - Returns: - A list of block sizes to use in building the model. - - Raises: - KeyError: if invalid resnet_size is received. - """ - choices = { - 18: [2, 2, 2, 2], - 34: [3, 4, 6, 3], - 50: [3, 4, 6, 3], - 101: [3, 4, 23, 3], - 152: [3, 8, 36, 3], - 200: [3, 24, 36, 3] - } - - try: - return choices[resnet_size] - except KeyError: - err = ('Could not find layers for selected Resnet size.\n' - 'Size received: {}; sizes allowed: {}.'.format( - resnet_size, list(choices.keys()))) - raise ValueError(err) - - -def imagenet_model_fn(features, labels, mode, params): - """Our model_fn for ResNet to be used with our Estimator.""" - - # Warmup and higher lr may not be valid for fine tuning with small batches - # and smaller numbers of training images. - if params['fine_tune']: - warmup = False - base_lr = .1 - else: - warmup = True - base_lr = .128 - - learning_rate_fn = resnet_run_loop.learning_rate_with_decay( - batch_size=params['batch_size'] * params.get('num_workers', 1), - batch_denom=256, num_images=NUM_IMAGES['train'], - boundary_epochs=[30, 60, 80, 90], decay_rates=[1, 0.1, 0.01, 0.001, 1e-4], - warmup=warmup, base_lr=base_lr) - - return resnet_run_loop.resnet_model_fn( - features=features, - labels=labels, - mode=mode, - model_class=ImagenetModel, - resnet_size=params['resnet_size'], - weight_decay=flags.FLAGS.weight_decay, - learning_rate_fn=learning_rate_fn, - momentum=0.9, - data_format=params['data_format'], - resnet_version=params['resnet_version'], - loss_scale=params['loss_scale'], - loss_filter_fn=None, - dtype=params['dtype'], - fine_tune=params['fine_tune'], - label_smoothing=flags.FLAGS.label_smoothing - ) - - -def define_imagenet_flags(): - resnet_run_loop.define_resnet_flags( - resnet_size_choices=['18', '34', '50', '101', '152', '200'], - dynamic_loss_scale=True, - fp16_implementation=True) - flags.adopt_module_key_flags(resnet_run_loop) - flags_core.set_defaults(train_epochs=90) - - -def run_imagenet(flags_obj): - """Run ResNet ImageNet training and eval loop. - - Args: - flags_obj: An object containing parsed flag values. - - Returns: - Dict of results of the run. Contains the keys `eval_results` and - `train_hooks`. `eval_results` contains accuracy (top_1) and - accuracy_top_5. `train_hooks` is a list the instances of hooks used during - training. - """ - input_function = (flags_obj.use_synthetic_data and - get_synth_input_fn(flags_core.get_tf_dtype(flags_obj)) or - input_fn) - - result = resnet_run_loop.resnet_main( - flags_obj, imagenet_model_fn, input_function, DATASET_NAME, - shape=[DEFAULT_IMAGE_SIZE, DEFAULT_IMAGE_SIZE, NUM_CHANNELS]) - - return result - - -def main(_): - with logger.benchmark_context(flags.FLAGS): - run_imagenet(flags.FLAGS) - - -if __name__ == '__main__': - logging.set_verbosity(logging.INFO) - define_imagenet_flags() - absl_app.run(main) diff --git a/official/r1/resnet/imagenet_preprocessing.py b/official/r1/resnet/imagenet_preprocessing.py deleted file mode 100644 index 891b58ab1..000000000 --- a/official/r1/resnet/imagenet_preprocessing.py +++ /dev/null @@ -1,262 +0,0 @@ -# Copyright 2016 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Provides utilities to preprocess images. - -Training images are sampled using the provided bounding boxes, and subsequently -cropped to the sampled bounding box. Images are additionally flipped randomly, -then resized to the target output size (without aspect-ratio preservation). - -Images used during evaluation are resized (with aspect-ratio preservation) and -centrally cropped. - -All images undergo mean color subtraction. - -Note that these steps are colloquially referred to as "ResNet preprocessing," -and they differ from "VGG preprocessing," which does not use bounding boxes -and instead does an aspect-preserving resize followed by random crop during -training. (These both differ from "Inception preprocessing," which introduces -color distortion steps.) - -""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import tensorflow as tf - -_R_MEAN = 123.68 -_G_MEAN = 116.78 -_B_MEAN = 103.94 -_CHANNEL_MEANS = [_R_MEAN, _G_MEAN, _B_MEAN] - -# The lower bound for the smallest side of the image for aspect-preserving -# resizing. For example, if an image is 500 x 1000, it will be resized to -# _RESIZE_MIN x (_RESIZE_MIN * 2). -_RESIZE_MIN = 256 - - -def _decode_crop_and_flip(image_buffer, bbox, num_channels): - """Crops the given image to a random part of the image, and randomly flips. - - We use the fused decode_and_crop op, which performs better than the two ops - used separately in series, but note that this requires that the image be - passed in as an un-decoded string Tensor. - - Args: - image_buffer: scalar string Tensor representing the raw JPEG image buffer. - bbox: 3-D float Tensor of bounding boxes arranged [1, num_boxes, coords] - where each coordinate is [0, 1) and the coordinates are arranged as - [ymin, xmin, ymax, xmax]. - num_channels: Integer depth of the image buffer for decoding. - - Returns: - 3-D tensor with cropped image. - - """ - # A large fraction of image datasets contain a human-annotated bounding box - # delineating the region of the image containing the object of interest. We - # choose to create a new bounding box for the object which is a randomly - # distorted version of the human-annotated bounding box that obeys an - # allowed range of aspect ratios, sizes and overlap with the human-annotated - # bounding box. If no box is supplied, then we assume the bounding box is - # the entire image. - sample_distorted_bounding_box = tf.image.sample_distorted_bounding_box( - tf.image.extract_jpeg_shape(image_buffer), - bounding_boxes=bbox, - min_object_covered=0.1, - aspect_ratio_range=[0.75, 1.33], - area_range=[0.05, 1.0], - max_attempts=100, - use_image_if_no_bounding_boxes=True) - bbox_begin, bbox_size, _ = sample_distorted_bounding_box - - # Reassemble the bounding box in the format the crop op requires. - offset_y, offset_x, _ = tf.unstack(bbox_begin) - target_height, target_width, _ = tf.unstack(bbox_size) - crop_window = tf.stack([offset_y, offset_x, target_height, target_width]) - - # Use the fused decode and crop op here, which is faster than each in series. - cropped = tf.image.decode_and_crop_jpeg( - image_buffer, crop_window, channels=num_channels) - - # Flip to add a little more random distortion in. - cropped = tf.image.random_flip_left_right(cropped) - return cropped - - -def _central_crop(image, crop_height, crop_width): - """Performs central crops of the given image list. - - Args: - image: a 3-D image tensor - crop_height: the height of the image following the crop. - crop_width: the width of the image following the crop. - - Returns: - 3-D tensor with cropped image. - """ - shape = tf.shape(input=image) - height, width = shape[0], shape[1] - - amount_to_be_cropped_h = (height - crop_height) - crop_top = amount_to_be_cropped_h // 2 - amount_to_be_cropped_w = (width - crop_width) - crop_left = amount_to_be_cropped_w // 2 - return tf.slice( - image, [crop_top, crop_left, 0], [crop_height, crop_width, -1]) - - -def _mean_image_subtraction(image, means, num_channels): - """Subtracts the given means from each image channel. - - For example: - means = [123.68, 116.779, 103.939] - image = _mean_image_subtraction(image, means) - - Note that the rank of `image` must be known. - - Args: - image: a tensor of size [height, width, C]. - means: a C-vector of values to subtract from each channel. - num_channels: number of color channels in the image that will be distorted. - - Returns: - the centered image. - - Raises: - ValueError: If the rank of `image` is unknown, if `image` has a rank other - than three or if the number of channels in `image` doesn't match the - number of values in `means`. - """ - if image.get_shape().ndims != 3: - raise ValueError('Input must be of size [height, width, C>0]') - - if len(means) != num_channels: - raise ValueError('len(means) must match the number of channels') - - # We have a 1-D tensor of means; convert to 3-D. - # Note(b/130245863): we explicitly call `broadcast` instead of simply - # expanding dimensions for better performance. - means = tf.broadcast_to(means, tf.shape(image)) - - return image - means - - -def _smallest_size_at_least(height, width, resize_min): - """Computes new shape with the smallest side equal to `smallest_side`. - - Computes new shape with the smallest side equal to `smallest_side` while - preserving the original aspect ratio. - - Args: - height: an int32 scalar tensor indicating the current height. - width: an int32 scalar tensor indicating the current width. - resize_min: A python integer or scalar `Tensor` indicating the size of - the smallest side after resize. - - Returns: - new_height: an int32 scalar tensor indicating the new height. - new_width: an int32 scalar tensor indicating the new width. - """ - resize_min = tf.cast(resize_min, tf.float32) - - # Convert to floats to make subsequent calculations go smoothly. - height, width = tf.cast(height, tf.float32), tf.cast(width, tf.float32) - - smaller_dim = tf.minimum(height, width) - scale_ratio = resize_min / smaller_dim - - # Convert back to ints to make heights and widths that TF ops will accept. - new_height = tf.cast(height * scale_ratio, tf.int32) - new_width = tf.cast(width * scale_ratio, tf.int32) - - return new_height, new_width - - -def _aspect_preserving_resize(image, resize_min): - """Resize images preserving the original aspect ratio. - - Args: - image: A 3-D image `Tensor`. - resize_min: A python integer or scalar `Tensor` indicating the size of - the smallest side after resize. - - Returns: - resized_image: A 3-D tensor containing the resized image. - """ - shape = tf.shape(input=image) - height, width = shape[0], shape[1] - - new_height, new_width = _smallest_size_at_least(height, width, resize_min) - - return _resize_image(image, new_height, new_width) - - -def _resize_image(image, height, width): - """Simple wrapper around tf.resize_images. - - This is primarily to make sure we use the same `ResizeMethod` and other - details each time. - - Args: - image: A 3-D image `Tensor`. - height: The target height for the resized image. - width: The target width for the resized image. - - Returns: - resized_image: A 3-D tensor containing the resized image. The first two - dimensions have the shape [height, width]. - """ - return tf.compat.v1.image.resize( - image, [height, width], method=tf.image.ResizeMethod.BILINEAR, - align_corners=False) - - -def preprocess_image(image_buffer, bbox, output_height, output_width, - num_channels, is_training=False): - """Preprocesses the given image. - - Preprocessing includes decoding, cropping, and resizing for both training - and eval images. Training preprocessing, however, introduces some random - distortion of the image to improve accuracy. - - Args: - image_buffer: scalar string Tensor representing the raw JPEG image buffer. - bbox: 3-D float Tensor of bounding boxes arranged [1, num_boxes, coords] - where each coordinate is [0, 1) and the coordinates are arranged as - [ymin, xmin, ymax, xmax]. - output_height: The height of the image after preprocessing. - output_width: The width of the image after preprocessing. - num_channels: Integer depth of the image buffer for decoding. - is_training: `True` if we're preprocessing the image for training and - `False` otherwise. - - Returns: - A preprocessed image. - """ - if is_training: - # For training, we want to randomize some of the distortions. - image = _decode_crop_and_flip(image_buffer, bbox, num_channels) - image = _resize_image(image, output_height, output_width) - else: - # For validation, we want to decode, resize, then just crop the middle. - image = tf.image.decode_jpeg(image_buffer, channels=num_channels) - image = _aspect_preserving_resize(image, _RESIZE_MIN) - image = _central_crop(image, output_height, output_width) - - image.set_shape([output_height, output_width, num_channels]) - - return _mean_image_subtraction(image, _CHANNEL_MEANS, num_channels) diff --git a/official/r1/resnet/imagenet_test.py b/official/r1/resnet/imagenet_test.py deleted file mode 100644 index c25cafb85..000000000 --- a/official/r1/resnet/imagenet_test.py +++ /dev/null @@ -1,325 +0,0 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import unittest - -import tensorflow as tf # pylint: disable=g-bad-import-order -from absl import logging - -from official.r1.resnet import imagenet_main -from official.utils.testing import integration - -logging.set_verbosity(logging.ERROR) - -_BATCH_SIZE = 32 -_LABEL_CLASSES = 1001 - - -class BaseTest(tf.test.TestCase): - - _num_validation_images = None - - @classmethod - def setUpClass(cls): # pylint: disable=invalid-name - super(BaseTest, cls).setUpClass() - imagenet_main.define_imagenet_flags() - - def setUp(self): - super(BaseTest, self).setUp() - tf.compat.v1.disable_eager_execution() - self._num_validation_images = imagenet_main.NUM_IMAGES['validation'] - imagenet_main.NUM_IMAGES['validation'] = 4 - - def tearDown(self): - super(BaseTest, self).tearDown() - tf.io.gfile.rmtree(self.get_temp_dir()) - imagenet_main.NUM_IMAGES['validation'] = self._num_validation_images - - def _tensor_shapes_helper(self, resnet_size, resnet_version, dtype, with_gpu): - """Checks the tensor shapes after each phase of the ResNet model.""" - def reshape(shape): - """Returns the expected dimensions depending on if a GPU is being used.""" - - # If a GPU is used for the test, the shape is returned (already in NCHW - # form). When GPU is not used, the shape is converted to NHWC. - if with_gpu: - return shape - return shape[0], shape[2], shape[3], shape[1] - - graph = tf.Graph() - - with graph.as_default(), self.test_session( - graph=graph, use_gpu=with_gpu, force_gpu=with_gpu): - model = imagenet_main.ImagenetModel( - resnet_size=resnet_size, - data_format='channels_first' if with_gpu else 'channels_last', - resnet_version=resnet_version, - dtype=dtype - ) - inputs = tf.random.uniform([1, 224, 224, 3]) - output = model(inputs, training=True) - - initial_conv = graph.get_tensor_by_name('resnet_model/initial_conv:0') - max_pool = graph.get_tensor_by_name('resnet_model/initial_max_pool:0') - block_layer1 = graph.get_tensor_by_name('resnet_model/block_layer1:0') - block_layer2 = graph.get_tensor_by_name('resnet_model/block_layer2:0') - block_layer3 = graph.get_tensor_by_name('resnet_model/block_layer3:0') - block_layer4 = graph.get_tensor_by_name('resnet_model/block_layer4:0') - reduce_mean = graph.get_tensor_by_name('resnet_model/final_reduce_mean:0') - dense = graph.get_tensor_by_name('resnet_model/final_dense:0') - - self.assertAllEqual(initial_conv.shape, reshape((1, 64, 112, 112))) - self.assertAllEqual(max_pool.shape, reshape((1, 64, 56, 56))) - - # The number of channels after each block depends on whether we're - # using the building_block or the bottleneck_block. - if resnet_size < 50: - self.assertAllEqual(block_layer1.shape, reshape((1, 64, 56, 56))) - self.assertAllEqual(block_layer2.shape, reshape((1, 128, 28, 28))) - self.assertAllEqual(block_layer3.shape, reshape((1, 256, 14, 14))) - self.assertAllEqual(block_layer4.shape, reshape((1, 512, 7, 7))) - self.assertAllEqual(reduce_mean.shape, reshape((1, 512, 1, 1))) - else: - self.assertAllEqual(block_layer1.shape, reshape((1, 256, 56, 56))) - self.assertAllEqual(block_layer2.shape, reshape((1, 512, 28, 28))) - self.assertAllEqual(block_layer3.shape, reshape((1, 1024, 14, 14))) - self.assertAllEqual(block_layer4.shape, reshape((1, 2048, 7, 7))) - self.assertAllEqual(reduce_mean.shape, reshape((1, 2048, 1, 1))) - - self.assertAllEqual(dense.shape, (1, _LABEL_CLASSES)) - self.assertAllEqual(output.shape, (1, _LABEL_CLASSES)) - - def tensor_shapes_helper(self, resnet_size, resnet_version, with_gpu=False): - self._tensor_shapes_helper(resnet_size=resnet_size, - resnet_version=resnet_version, - dtype=tf.float32, with_gpu=with_gpu) - self._tensor_shapes_helper(resnet_size=resnet_size, - resnet_version=resnet_version, - dtype=tf.float16, with_gpu=with_gpu) - - def test_tensor_shapes_resnet_18_v1(self): - self.tensor_shapes_helper(18, resnet_version=1) - - def test_tensor_shapes_resnet_18_v2(self): - self.tensor_shapes_helper(18, resnet_version=2) - - def test_tensor_shapes_resnet_34_v1(self): - self.tensor_shapes_helper(34, resnet_version=1) - - def test_tensor_shapes_resnet_34_v2(self): - self.tensor_shapes_helper(34, resnet_version=2) - - def test_tensor_shapes_resnet_50_v1(self): - self.tensor_shapes_helper(50, resnet_version=1) - - def test_tensor_shapes_resnet_50_v2(self): - self.tensor_shapes_helper(50, resnet_version=2) - - def test_tensor_shapes_resnet_101_v1(self): - self.tensor_shapes_helper(101, resnet_version=1) - - def test_tensor_shapes_resnet_101_v2(self): - self.tensor_shapes_helper(101, resnet_version=2) - - def test_tensor_shapes_resnet_152_v1(self): - self.tensor_shapes_helper(152, resnet_version=1) - - def test_tensor_shapes_resnet_152_v2(self): - self.tensor_shapes_helper(152, resnet_version=2) - - def test_tensor_shapes_resnet_200_v1(self): - self.tensor_shapes_helper(200, resnet_version=1) - - def test_tensor_shapes_resnet_200_v2(self): - self.tensor_shapes_helper(200, resnet_version=2) - - @unittest.skipUnless(tf.test.is_built_with_cuda(), 'requires GPU') - def test_tensor_shapes_resnet_18_with_gpu_v1(self): - self.tensor_shapes_helper(18, resnet_version=1, with_gpu=True) - - @unittest.skipUnless(tf.test.is_built_with_cuda(), 'requires GPU') - def test_tensor_shapes_resnet_18_with_gpu_v2(self): - self.tensor_shapes_helper(18, resnet_version=2, with_gpu=True) - - @unittest.skipUnless(tf.test.is_built_with_cuda(), 'requires GPU') - def test_tensor_shapes_resnet_34_with_gpu_v1(self): - self.tensor_shapes_helper(34, resnet_version=1, with_gpu=True) - - @unittest.skipUnless(tf.test.is_built_with_cuda(), 'requires GPU') - def test_tensor_shapes_resnet_34_with_gpu_v2(self): - self.tensor_shapes_helper(34, resnet_version=2, with_gpu=True) - - @unittest.skipUnless(tf.test.is_built_with_cuda(), 'requires GPU') - def test_tensor_shapes_resnet_50_with_gpu_v1(self): - self.tensor_shapes_helper(50, resnet_version=1, with_gpu=True) - - @unittest.skipUnless(tf.test.is_built_with_cuda(), 'requires GPU') - def test_tensor_shapes_resnet_50_with_gpu_v2(self): - self.tensor_shapes_helper(50, resnet_version=2, with_gpu=True) - - @unittest.skipUnless(tf.test.is_built_with_cuda(), 'requires GPU') - def test_tensor_shapes_resnet_101_with_gpu_v1(self): - self.tensor_shapes_helper(101, resnet_version=1, with_gpu=True) - - @unittest.skipUnless(tf.test.is_built_with_cuda(), 'requires GPU') - def test_tensor_shapes_resnet_101_with_gpu_v2(self): - self.tensor_shapes_helper(101, resnet_version=2, with_gpu=True) - - @unittest.skipUnless(tf.test.is_built_with_cuda(), 'requires GPU') - def test_tensor_shapes_resnet_152_with_gpu_v1(self): - self.tensor_shapes_helper(152, resnet_version=1, with_gpu=True) - - @unittest.skipUnless(tf.test.is_built_with_cuda(), 'requires GPU') - def test_tensor_shapes_resnet_152_with_gpu_v2(self): - self.tensor_shapes_helper(152, resnet_version=2, with_gpu=True) - - @unittest.skipUnless(tf.test.is_built_with_cuda(), 'requires GPU') - def test_tensor_shapes_resnet_200_with_gpu_v1(self): - self.tensor_shapes_helper(200, resnet_version=1, with_gpu=True) - - @unittest.skipUnless(tf.test.is_built_with_cuda(), 'requires GPU') - def test_tensor_shapes_resnet_200_with_gpu_v2(self): - self.tensor_shapes_helper(200, resnet_version=2, with_gpu=True) - - def resnet_model_fn_helper(self, mode, resnet_version, dtype): - """Tests that the EstimatorSpec is given the appropriate arguments.""" - tf.compat.v1.train.create_global_step() - - input_fn = imagenet_main.get_synth_input_fn(dtype) - dataset = input_fn(True, '', _BATCH_SIZE) - iterator = tf.compat.v1.data.make_initializable_iterator(dataset) - features, labels = iterator.get_next() - spec = imagenet_main.imagenet_model_fn( - features, labels, mode, { - 'dtype': dtype, - 'resnet_size': 50, - 'data_format': 'channels_last', - 'batch_size': _BATCH_SIZE, - 'resnet_version': resnet_version, - 'loss_scale': 128 if dtype == tf.float16 else 1, - 'fine_tune': False, - }) - - predictions = spec.predictions - self.assertAllEqual(predictions['probabilities'].shape, - (_BATCH_SIZE, _LABEL_CLASSES)) - self.assertEqual(predictions['probabilities'].dtype, tf.float32) - self.assertAllEqual(predictions['classes'].shape, (_BATCH_SIZE,)) - self.assertEqual(predictions['classes'].dtype, tf.int64) - - if mode != tf.estimator.ModeKeys.PREDICT: - loss = spec.loss - self.assertAllEqual(loss.shape, ()) - self.assertEqual(loss.dtype, tf.float32) - - if mode == tf.estimator.ModeKeys.EVAL: - eval_metric_ops = spec.eval_metric_ops - self.assertAllEqual(eval_metric_ops['accuracy'][0].shape, ()) - self.assertAllEqual(eval_metric_ops['accuracy'][1].shape, ()) - self.assertEqual(eval_metric_ops['accuracy'][0].dtype, tf.float32) - self.assertEqual(eval_metric_ops['accuracy'][1].dtype, tf.float32) - - def test_resnet_model_fn_train_mode_v1(self): - self.resnet_model_fn_helper(tf.estimator.ModeKeys.TRAIN, resnet_version=1, - dtype=tf.float32) - - def test_resnet_model_fn_train_mode_v2(self): - self.resnet_model_fn_helper(tf.estimator.ModeKeys.TRAIN, resnet_version=2, - dtype=tf.float32) - - def test_resnet_model_fn_eval_mode_v1(self): - self.resnet_model_fn_helper(tf.estimator.ModeKeys.EVAL, resnet_version=1, - dtype=tf.float32) - - def test_resnet_model_fn_eval_mode_v2(self): - self.resnet_model_fn_helper(tf.estimator.ModeKeys.EVAL, resnet_version=2, - dtype=tf.float32) - - def test_resnet_model_fn_predict_mode_v1(self): - self.resnet_model_fn_helper(tf.estimator.ModeKeys.PREDICT, resnet_version=1, - dtype=tf.float32) - - def test_resnet_model_fn_predict_mode_v2(self): - self.resnet_model_fn_helper(tf.estimator.ModeKeys.PREDICT, resnet_version=2, - dtype=tf.float32) - - def _test_imagenetmodel_shape(self, resnet_version): - batch_size = 135 - num_classes = 246 - - model = imagenet_main.ImagenetModel( - 50, data_format='channels_last', num_classes=num_classes, - resnet_version=resnet_version) - - fake_input = tf.random.uniform([batch_size, 224, 224, 3]) - output = model(fake_input, training=True) - - self.assertAllEqual(output.shape, (batch_size, num_classes)) - - def test_imagenetmodel_shape_v1(self): - self._test_imagenetmodel_shape(resnet_version=1) - - def test_imagenetmodel_shape_v2(self): - self._test_imagenetmodel_shape(resnet_version=2) - - def test_imagenet_end_to_end_synthetic_v1(self): - integration.run_synthetic( - main=imagenet_main.run_imagenet, tmp_root=self.get_temp_dir(), - extra_flags=['-resnet_version', '1', '-batch_size', '4', - '--max_train_steps', '1'] - ) - - def test_imagenet_end_to_end_synthetic_v2(self): - integration.run_synthetic( - main=imagenet_main.run_imagenet, tmp_root=self.get_temp_dir(), - extra_flags=['-resnet_version', '2', '-batch_size', '4', - '--max_train_steps', '1'] - ) - - def test_imagenet_end_to_end_synthetic_v1_tiny(self): - integration.run_synthetic( - main=imagenet_main.run_imagenet, tmp_root=self.get_temp_dir(), - extra_flags=['-resnet_version', '1', '-batch_size', '4', - '-resnet_size', '18', '--max_train_steps', '1'] - ) - - def test_imagenet_end_to_end_synthetic_v2_tiny(self): - integration.run_synthetic( - main=imagenet_main.run_imagenet, tmp_root=self.get_temp_dir(), - extra_flags=['-resnet_version', '2', '-batch_size', '4', - '-resnet_size', '18', '--max_train_steps', '1'] - ) - - def test_imagenet_end_to_end_synthetic_v1_huge(self): - integration.run_synthetic( - main=imagenet_main.run_imagenet, tmp_root=self.get_temp_dir(), - extra_flags=['-resnet_version', '1', '-batch_size', '4', - '-resnet_size', '200', '--max_train_steps', '1'] - ) - - def test_imagenet_end_to_end_synthetic_v2_huge(self): - integration.run_synthetic( - main=imagenet_main.run_imagenet, tmp_root=self.get_temp_dir(), - extra_flags=['-resnet_version', '2', '-batch_size', '4', - '-resnet_size', '200', '--max_train_steps', '1'] - ) - - -if __name__ == '__main__': - tf.test.main() diff --git a/official/r1/resnet/resnet_model.py b/official/r1/resnet/resnet_model.py deleted file mode 100644 index d6449df51..000000000 --- a/official/r1/resnet/resnet_model.py +++ /dev/null @@ -1,548 +0,0 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Contains definitions for Residual Networks. - -Residual networks ('v1' ResNets) were originally proposed in: -[1] Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun - Deep Residual Learning for Image Recognition. arXiv:1512.03385 - -The full preactivation 'v2' ResNet variant was introduced by: -[2] Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun - Identity Mappings in Deep Residual Networks. arXiv: 1603.05027 - -The key difference of the full preactivation 'v2' variant compared to the -'v1' variant in [1] is the use of batch normalization before every weight layer -rather than after. -""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import tensorflow as tf - -_BATCH_NORM_DECAY = 0.997 -_BATCH_NORM_EPSILON = 1e-5 -DEFAULT_VERSION = 2 -DEFAULT_DTYPE = tf.float32 -CASTABLE_TYPES = (tf.float16,) -ALLOWED_TYPES = (DEFAULT_DTYPE,) + CASTABLE_TYPES - - -################################################################################ -# Convenience functions for building the ResNet model. -################################################################################ -def batch_norm(inputs, training, data_format): - """Performs a batch normalization using a standard set of parameters.""" - # We set fused=True for a significant performance boost. See - # https://www.tensorflow.org/performance/performance_guide#common_fused_ops - return tf.compat.v1.layers.batch_normalization( - inputs=inputs, axis=1 if data_format == 'channels_first' else 3, - momentum=_BATCH_NORM_DECAY, epsilon=_BATCH_NORM_EPSILON, center=True, - scale=True, training=training, fused=True) - - -def fixed_padding(inputs, kernel_size, data_format): - """Pads the input along the spatial dimensions independently of input size. - - Args: - inputs: A tensor of size [batch, channels, height_in, width_in] or - [batch, height_in, width_in, channels] depending on data_format. - kernel_size: The kernel to be used in the conv2d or max_pool2d operation. - Should be a positive integer. - data_format: The input format ('channels_last' or 'channels_first'). - - Returns: - A tensor with the same format as the input with the data either intact - (if kernel_size == 1) or padded (if kernel_size > 1). - """ - pad_total = kernel_size - 1 - pad_beg = pad_total // 2 - pad_end = pad_total - pad_beg - - if data_format == 'channels_first': - padded_inputs = tf.pad(tensor=inputs, - paddings=[[0, 0], [0, 0], [pad_beg, pad_end], - [pad_beg, pad_end]]) - else: - padded_inputs = tf.pad(tensor=inputs, - paddings=[[0, 0], [pad_beg, pad_end], - [pad_beg, pad_end], [0, 0]]) - return padded_inputs - - -def conv2d_fixed_padding(inputs, filters, kernel_size, strides, data_format): - """Strided 2-D convolution with explicit padding.""" - # The padding is consistent and is based only on `kernel_size`, not on the - # dimensions of `inputs` (as opposed to using `tf.layers.conv2d` alone). - if strides > 1: - inputs = fixed_padding(inputs, kernel_size, data_format) - - return tf.compat.v1.layers.conv2d( - inputs=inputs, filters=filters, kernel_size=kernel_size, strides=strides, - padding=('SAME' if strides == 1 else 'VALID'), use_bias=False, - kernel_initializer=tf.compat.v1.variance_scaling_initializer(), - data_format=data_format) - - -################################################################################ -# ResNet block definitions. -################################################################################ -def _building_block_v1(inputs, filters, training, projection_shortcut, strides, - data_format): - """A single block for ResNet v1, without a bottleneck. - - Convolution then batch normalization then ReLU as described by: - Deep Residual Learning for Image Recognition - https://arxiv.org/pdf/1512.03385.pdf - by Kaiming He, Xiangyu Zhang, Shaoqing Ren, and Jian Sun, Dec 2015. - - Args: - inputs: A tensor of size [batch, channels, height_in, width_in] or - [batch, height_in, width_in, channels] depending on data_format. - filters: The number of filters for the convolutions. - training: A Boolean for whether the model is in training or inference - mode. Needed for batch normalization. - projection_shortcut: The function to use for projection shortcuts - (typically a 1x1 convolution when downsampling the input). - strides: The block's stride. If greater than 1, this block will ultimately - downsample the input. - data_format: The input format ('channels_last' or 'channels_first'). - - Returns: - The output tensor of the block; shape should match inputs. - """ - shortcut = inputs - - if projection_shortcut is not None: - shortcut = projection_shortcut(inputs) - shortcut = batch_norm(inputs=shortcut, training=training, - data_format=data_format) - - inputs = conv2d_fixed_padding( - inputs=inputs, filters=filters, kernel_size=3, strides=strides, - data_format=data_format) - inputs = batch_norm(inputs, training, data_format) - inputs = tf.nn.relu(inputs) - - inputs = conv2d_fixed_padding( - inputs=inputs, filters=filters, kernel_size=3, strides=1, - data_format=data_format) - inputs = batch_norm(inputs, training, data_format) - inputs += shortcut - inputs = tf.nn.relu(inputs) - - return inputs - - -def _building_block_v2(inputs, filters, training, projection_shortcut, strides, - data_format): - """A single block for ResNet v2, without a bottleneck. - - Batch normalization then ReLu then convolution as described by: - Identity Mappings in Deep Residual Networks - https://arxiv.org/pdf/1603.05027.pdf - by Kaiming He, Xiangyu Zhang, Shaoqing Ren, and Jian Sun, Jul 2016. - - Args: - inputs: A tensor of size [batch, channels, height_in, width_in] or - [batch, height_in, width_in, channels] depending on data_format. - filters: The number of filters for the convolutions. - training: A Boolean for whether the model is in training or inference - mode. Needed for batch normalization. - projection_shortcut: The function to use for projection shortcuts - (typically a 1x1 convolution when downsampling the input). - strides: The block's stride. If greater than 1, this block will ultimately - downsample the input. - data_format: The input format ('channels_last' or 'channels_first'). - - Returns: - The output tensor of the block; shape should match inputs. - """ - shortcut = inputs - inputs = batch_norm(inputs, training, data_format) - inputs = tf.nn.relu(inputs) - - # The projection shortcut should come after the first batch norm and ReLU - # since it performs a 1x1 convolution. - if projection_shortcut is not None: - shortcut = projection_shortcut(inputs) - - inputs = conv2d_fixed_padding( - inputs=inputs, filters=filters, kernel_size=3, strides=strides, - data_format=data_format) - - inputs = batch_norm(inputs, training, data_format) - inputs = tf.nn.relu(inputs) - inputs = conv2d_fixed_padding( - inputs=inputs, filters=filters, kernel_size=3, strides=1, - data_format=data_format) - - return inputs + shortcut - - -def _bottleneck_block_v1(inputs, filters, training, projection_shortcut, - strides, data_format): - """A single block for ResNet v1, with a bottleneck. - - Similar to _building_block_v1(), except using the "bottleneck" blocks - described in: - Convolution then batch normalization then ReLU as described by: - Deep Residual Learning for Image Recognition - https://arxiv.org/pdf/1512.03385.pdf - by Kaiming He, Xiangyu Zhang, Shaoqing Ren, and Jian Sun, Dec 2015. - - Args: - inputs: A tensor of size [batch, channels, height_in, width_in] or - [batch, height_in, width_in, channels] depending on data_format. - filters: The number of filters for the convolutions. - training: A Boolean for whether the model is in training or inference - mode. Needed for batch normalization. - projection_shortcut: The function to use for projection shortcuts - (typically a 1x1 convolution when downsampling the input). - strides: The block's stride. If greater than 1, this block will ultimately - downsample the input. - data_format: The input format ('channels_last' or 'channels_first'). - - Returns: - The output tensor of the block; shape should match inputs. - """ - shortcut = inputs - - if projection_shortcut is not None: - shortcut = projection_shortcut(inputs) - shortcut = batch_norm(inputs=shortcut, training=training, - data_format=data_format) - - inputs = conv2d_fixed_padding( - inputs=inputs, filters=filters, kernel_size=1, strides=1, - data_format=data_format) - inputs = batch_norm(inputs, training, data_format) - inputs = tf.nn.relu(inputs) - - inputs = conv2d_fixed_padding( - inputs=inputs, filters=filters, kernel_size=3, strides=strides, - data_format=data_format) - inputs = batch_norm(inputs, training, data_format) - inputs = tf.nn.relu(inputs) - - inputs = conv2d_fixed_padding( - inputs=inputs, filters=4 * filters, kernel_size=1, strides=1, - data_format=data_format) - inputs = batch_norm(inputs, training, data_format) - inputs += shortcut - inputs = tf.nn.relu(inputs) - - return inputs - - -def _bottleneck_block_v2(inputs, filters, training, projection_shortcut, - strides, data_format): - """A single block for ResNet v2, with a bottleneck. - - Similar to _building_block_v2(), except using the "bottleneck" blocks - described in: - Convolution then batch normalization then ReLU as described by: - Deep Residual Learning for Image Recognition - https://arxiv.org/pdf/1512.03385.pdf - by Kaiming He, Xiangyu Zhang, Shaoqing Ren, and Jian Sun, Dec 2015. - - Adapted to the ordering conventions of: - Batch normalization then ReLu then convolution as described by: - Identity Mappings in Deep Residual Networks - https://arxiv.org/pdf/1603.05027.pdf - by Kaiming He, Xiangyu Zhang, Shaoqing Ren, and Jian Sun, Jul 2016. - - Args: - inputs: A tensor of size [batch, channels, height_in, width_in] or - [batch, height_in, width_in, channels] depending on data_format. - filters: The number of filters for the convolutions. - training: A Boolean for whether the model is in training or inference - mode. Needed for batch normalization. - projection_shortcut: The function to use for projection shortcuts - (typically a 1x1 convolution when downsampling the input). - strides: The block's stride. If greater than 1, this block will ultimately - downsample the input. - data_format: The input format ('channels_last' or 'channels_first'). - - Returns: - The output tensor of the block; shape should match inputs. - """ - shortcut = inputs - inputs = batch_norm(inputs, training, data_format) - inputs = tf.nn.relu(inputs) - - # The projection shortcut should come after the first batch norm and ReLU - # since it performs a 1x1 convolution. - if projection_shortcut is not None: - shortcut = projection_shortcut(inputs) - - inputs = conv2d_fixed_padding( - inputs=inputs, filters=filters, kernel_size=1, strides=1, - data_format=data_format) - - inputs = batch_norm(inputs, training, data_format) - inputs = tf.nn.relu(inputs) - inputs = conv2d_fixed_padding( - inputs=inputs, filters=filters, kernel_size=3, strides=strides, - data_format=data_format) - - inputs = batch_norm(inputs, training, data_format) - inputs = tf.nn.relu(inputs) - inputs = conv2d_fixed_padding( - inputs=inputs, filters=4 * filters, kernel_size=1, strides=1, - data_format=data_format) - - return inputs + shortcut - - -def block_layer(inputs, filters, bottleneck, block_fn, blocks, strides, - training, name, data_format): - """Creates one layer of blocks for the ResNet model. - - Args: - inputs: A tensor of size [batch, channels, height_in, width_in] or - [batch, height_in, width_in, channels] depending on data_format. - filters: The number of filters for the first convolution of the layer. - bottleneck: Is the block created a bottleneck block. - block_fn: The block to use within the model, either `building_block` or - `bottleneck_block`. - blocks: The number of blocks contained in the layer. - strides: The stride to use for the first convolution of the layer. If - greater than 1, this layer will ultimately downsample the input. - training: Either True or False, whether we are currently training the - model. Needed for batch norm. - name: A string name for the tensor output of the block layer. - data_format: The input format ('channels_last' or 'channels_first'). - - Returns: - The output tensor of the block layer. - """ - - # Bottleneck blocks end with 4x the number of filters as they start with - filters_out = filters * 4 if bottleneck else filters - - def projection_shortcut(inputs): - return conv2d_fixed_padding( - inputs=inputs, filters=filters_out, kernel_size=1, strides=strides, - data_format=data_format) - - # Only the first block per block_layer uses projection_shortcut and strides - inputs = block_fn(inputs, filters, training, projection_shortcut, strides, - data_format) - - for _ in range(1, blocks): - inputs = block_fn(inputs, filters, training, None, 1, data_format) - - return tf.identity(inputs, name) - - -class Model(object): - """Base class for building the Resnet Model.""" - - def __init__(self, resnet_size, bottleneck, num_classes, num_filters, - kernel_size, - conv_stride, first_pool_size, first_pool_stride, - block_sizes, block_strides, - resnet_version=DEFAULT_VERSION, data_format=None, - dtype=DEFAULT_DTYPE): - """Creates a model for classifying an image. - - Args: - resnet_size: A single integer for the size of the ResNet model. - bottleneck: Use regular blocks or bottleneck blocks. - num_classes: The number of classes used as labels. - num_filters: The number of filters to use for the first block layer - of the model. This number is then doubled for each subsequent block - layer. - kernel_size: The kernel size to use for convolution. - conv_stride: stride size for the initial convolutional layer - first_pool_size: Pool size to be used for the first pooling layer. - If none, the first pooling layer is skipped. - first_pool_stride: stride size for the first pooling layer. Not used - if first_pool_size is None. - block_sizes: A list containing n values, where n is the number of sets of - block layers desired. Each value should be the number of blocks in the - i-th set. - block_strides: List of integers representing the desired stride size for - each of the sets of block layers. Should be same length as block_sizes. - resnet_version: Integer representing which version of the ResNet network - to use. See README for details. Valid values: [1, 2] - data_format: Input format ('channels_last', 'channels_first', or None). - If set to None, the format is dependent on whether a GPU is available. - dtype: The TensorFlow dtype to use for calculations. If not specified - tf.float32 is used. - - Raises: - ValueError: if invalid version is selected. - """ - self.resnet_size = resnet_size - - if not data_format: - data_format = ('channels_first' if tf.config.list_physical_devices('GPU') - else 'channels_last') - - self.resnet_version = resnet_version - if resnet_version not in (1, 2): - raise ValueError( - 'Resnet version should be 1 or 2. See README for citations.') - - self.bottleneck = bottleneck - if bottleneck: - if resnet_version == 1: - self.block_fn = _bottleneck_block_v1 - else: - self.block_fn = _bottleneck_block_v2 - else: - if resnet_version == 1: - self.block_fn = _building_block_v1 - else: - self.block_fn = _building_block_v2 - - if dtype not in ALLOWED_TYPES: - raise ValueError('dtype must be one of: {}'.format(ALLOWED_TYPES)) - - self.data_format = data_format - self.num_classes = num_classes - self.num_filters = num_filters - self.kernel_size = kernel_size - self.conv_stride = conv_stride - self.first_pool_size = first_pool_size - self.first_pool_stride = first_pool_stride - self.block_sizes = block_sizes - self.block_strides = block_strides - self.dtype = dtype - self.pre_activation = resnet_version == 2 - - def _custom_dtype_getter(self, getter, name, shape=None, dtype=DEFAULT_DTYPE, - *args, **kwargs): - """Creates variables in fp32, then casts to fp16 if necessary. - - This function is a custom getter. A custom getter is a function with the - same signature as tf.get_variable, except it has an additional getter - parameter. Custom getters can be passed as the `custom_getter` parameter of - tf.variable_scope. Then, tf.get_variable will call the custom getter, - instead of directly getting a variable itself. This can be used to change - the types of variables that are retrieved with tf.get_variable. - The `getter` parameter is the underlying variable getter, that would have - been called if no custom getter was used. Custom getters typically get a - variable with `getter`, then modify it in some way. - - This custom getter will create an fp32 variable. If a low precision - (e.g. float16) variable was requested it will then cast the variable to the - requested dtype. The reason we do not directly create variables in low - precision dtypes is that applying small gradients to such variables may - cause the variable not to change. - - Args: - getter: The underlying variable getter, that has the same signature as - tf.get_variable and returns a variable. - name: The name of the variable to get. - shape: The shape of the variable to get. - dtype: The dtype of the variable to get. Note that if this is a low - precision dtype, the variable will be created as a tf.float32 variable, - then cast to the appropriate dtype - *args: Additional arguments to pass unmodified to getter. - **kwargs: Additional keyword arguments to pass unmodified to getter. - - Returns: - A variable which is cast to fp16 if necessary. - """ - - if dtype in CASTABLE_TYPES: - var = getter(name, shape, tf.float32, *args, **kwargs) - return tf.cast(var, dtype=dtype, name=name + '_cast') - else: - return getter(name, shape, dtype, *args, **kwargs) - - def _model_variable_scope(self): - """Returns a variable scope that the model should be created under. - - If self.dtype is a castable type, model variable will be created in fp32 - then cast to self.dtype before being used. - - Returns: - A variable scope for the model. - """ - - return tf.compat.v1.variable_scope('resnet_model', - custom_getter=self._custom_dtype_getter) - - def __call__(self, inputs, training): - """Add operations to classify a batch of input images. - - Args: - inputs: A Tensor representing a batch of input images. - training: A boolean. Set to True to add operations required only when - training the classifier. - - Returns: - A logits Tensor with shape [, self.num_classes]. - """ - - with self._model_variable_scope(): - if self.data_format == 'channels_first': - # Convert the inputs from channels_last (NHWC) to channels_first (NCHW). - # This provides a large performance boost on GPU. See - # https://www.tensorflow.org/performance/performance_guide#data_formats - inputs = tf.transpose(a=inputs, perm=[0, 3, 1, 2]) - - inputs = conv2d_fixed_padding( - inputs=inputs, filters=self.num_filters, kernel_size=self.kernel_size, - strides=self.conv_stride, data_format=self.data_format) - inputs = tf.identity(inputs, 'initial_conv') - - # We do not include batch normalization or activation functions in V2 - # for the initial conv1 because the first ResNet unit will perform these - # for both the shortcut and non-shortcut paths as part of the first - # block's projection. Cf. Appendix of [2]. - if self.resnet_version == 1: - inputs = batch_norm(inputs, training, self.data_format) - inputs = tf.nn.relu(inputs) - - if self.first_pool_size: - inputs = tf.compat.v1.layers.max_pooling2d( - inputs=inputs, pool_size=self.first_pool_size, - strides=self.first_pool_stride, padding='SAME', - data_format=self.data_format) - inputs = tf.identity(inputs, 'initial_max_pool') - - for i, num_blocks in enumerate(self.block_sizes): - num_filters = self.num_filters * (2**i) - inputs = block_layer( - inputs=inputs, filters=num_filters, bottleneck=self.bottleneck, - block_fn=self.block_fn, blocks=num_blocks, - strides=self.block_strides[i], training=training, - name='block_layer{}'.format(i + 1), data_format=self.data_format) - - # Only apply the BN and ReLU for model that does pre_activation in each - # building/bottleneck block, eg resnet V2. - if self.pre_activation: - inputs = batch_norm(inputs, training, self.data_format) - inputs = tf.nn.relu(inputs) - - # The current top layer has shape - # `batch_size x pool_size x pool_size x final_size`. - # ResNet does an Average Pooling layer over pool_size, - # but that is the same as doing a reduce_mean. We do a reduce_mean - # here because it performs better than AveragePooling2D. - axes = [2, 3] if self.data_format == 'channels_first' else [1, 2] - inputs = tf.reduce_mean(input_tensor=inputs, axis=axes, keepdims=True) - inputs = tf.identity(inputs, 'final_reduce_mean') - - inputs = tf.squeeze(inputs, axes) - inputs = tf.compat.v1.layers.dense(inputs=inputs, units=self.num_classes) - inputs = tf.identity(inputs, 'final_dense') - return inputs diff --git a/official/r1/resnet/resnet_run_loop.py b/official/r1/resnet/resnet_run_loop.py deleted file mode 100644 index 5c90d855d..000000000 --- a/official/r1/resnet/resnet_run_loop.py +++ /dev/null @@ -1,831 +0,0 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Contains utility and supporting functions for ResNet. - - This module contains ResNet code which does not directly build layers. This -includes dataset management, hyperparameter and optimizer code, and argument -parsing. Code for defining the ResNet layers can be found in resnet_model.py. -""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import functools -import math -import multiprocessing -import os - -from absl import flags -from absl import logging -import tensorflow as tf - -from official.r1.resnet import imagenet_preprocessing -from official.r1.resnet import resnet_model -from official.r1.utils import export -from official.r1.utils.logs import hooks_helper -from official.r1.utils.logs import logger -from official.utils.flags import core as flags_core -from official.utils.misc import distribution_utils -from official.utils.misc import model_helpers - - -################################################################################ -# Functions for input processing. -################################################################################ -def process_record_dataset(dataset, - is_training, - batch_size, - shuffle_buffer, - parse_record_fn, - num_epochs=1, - dtype=tf.float32, - datasets_num_private_threads=None, - drop_remainder=False, - tf_data_experimental_slack=False): - """Given a Dataset with raw records, return an iterator over the records. - - Args: - dataset: A Dataset representing raw records - is_training: A boolean denoting whether the input is for training. - batch_size: The number of samples per batch. - shuffle_buffer: The buffer size to use when shuffling records. A larger - value results in better randomness, but smaller values reduce startup - time and use less memory. - parse_record_fn: A function that takes a raw record and returns the - corresponding (image, label) pair. - num_epochs: The number of epochs to repeat the dataset. - dtype: Data type to use for images/features. - datasets_num_private_threads: Number of threads for a private - threadpool created for all datasets computation. - drop_remainder: A boolean indicates whether to drop the remainder of the - batches. If True, the batch dimension will be static. - tf_data_experimental_slack: Whether to enable tf.data's - `experimental_slack` option. - - Returns: - Dataset of (image, label) pairs ready for iteration. - """ - # Defines a specific size thread pool for tf.data operations. - if datasets_num_private_threads: - options = tf.data.Options() - options.experimental_threading.private_threadpool_size = ( - datasets_num_private_threads) - dataset = dataset.with_options(options) - logging.info('datasets_num_private_threads: %s', - datasets_num_private_threads) - - # Disable intra-op parallelism to optimize for throughput instead of latency. - options = tf.data.Options() - options.experimental_threading.max_intra_op_parallelism = 1 - dataset = dataset.with_options(options) - - # Prefetches a batch at a time to smooth out the time taken to load input - # files for shuffling and processing. - dataset = dataset.prefetch(buffer_size=batch_size) - if is_training: - # Shuffles records before repeating to respect epoch boundaries. - dataset = dataset.shuffle(buffer_size=shuffle_buffer) - - # Repeats the dataset for the number of epochs to train. - dataset = dataset.repeat(num_epochs) - - # Parses the raw records into images and labels. - dataset = dataset.map( - lambda value: parse_record_fn(value, is_training, dtype), - num_parallel_calls=tf.data.experimental.AUTOTUNE) - dataset = dataset.batch(batch_size, drop_remainder=drop_remainder) - - # Operations between the final prefetch and the get_next call to the iterator - # will happen synchronously during run time. We prefetch here again to - # background all of the above processing work and keep it out of the - # critical training path. Setting buffer_size to tf.data.experimental.AUTOTUNE - # allows DistributionStrategies to adjust how many batches to fetch based - # on how many devices are present. - dataset = dataset.prefetch(buffer_size=tf.data.experimental.AUTOTUNE) - - if tf_data_experimental_slack: - options = tf.data.Options() - options.experimental_slack = True - dataset = dataset.with_options(options) - - return dataset - - -def get_synth_input_fn(height, width, num_channels, num_classes, - dtype=tf.float32): - """Returns an input function that returns a dataset with random data. - - This input_fn returns a data set that iterates over a set of random data and - bypasses all preprocessing, e.g. jpeg decode and copy. The host to device - copy is still included. This used to find the upper throughput bound when - tunning the full input pipeline. - - Args: - height: Integer height that will be used to create a fake image tensor. - width: Integer width that will be used to create a fake image tensor. - num_channels: Integer depth that will be used to create a fake image tensor. - num_classes: Number of classes that should be represented in the fake labels - tensor - dtype: Data type for features/images. - - Returns: - An input_fn that can be used in place of a real one to return a dataset - that can be used for iteration. - """ - # pylint: disable=unused-argument - def input_fn(is_training, data_dir, batch_size, *args, **kwargs): - """Returns dataset filled with random data.""" - # Synthetic input should be within [0, 255]. - inputs = tf.random.truncated_normal( - [batch_size] + [height, width, num_channels], - dtype=dtype, - mean=127, - stddev=60, - name='synthetic_inputs') - - labels = tf.random.uniform( - [batch_size], - minval=0, - maxval=num_classes - 1, - dtype=tf.int32, - name='synthetic_labels') - data = tf.data.Dataset.from_tensors((inputs, labels)).repeat() - data = data.prefetch(buffer_size=tf.data.experimental.AUTOTUNE) - return data - - return input_fn - - -def image_bytes_serving_input_fn(image_shape, dtype=tf.float32): - """Serving input fn for raw jpeg images.""" - - def _preprocess_image(image_bytes): - """Preprocess a single raw image.""" - # Bounding box around the whole image. - bbox = tf.constant([0.0, 0.0, 1.0, 1.0], dtype=dtype, shape=[1, 1, 4]) - height, width, num_channels = image_shape - image = imagenet_preprocessing.preprocess_image( - image_bytes, bbox, height, width, num_channels, is_training=False) - return image - - image_bytes_list = tf.compat.v1.placeholder( - shape=[None], dtype=tf.string, name='input_tensor') - images = tf.map_fn( - _preprocess_image, image_bytes_list, back_prop=False, dtype=dtype) - return tf.estimator.export.TensorServingInputReceiver( - images, {'image_bytes': image_bytes_list}) - - -def override_flags_and_set_envars_for_gpu_thread_pool(flags_obj): - """Override flags and set env_vars for performance. - - These settings exist to test the difference between using stock settings - and manual tuning. It also shows some of the ENV_VARS that can be tweaked to - squeeze a few extra examples per second. These settings are defaulted to the - current platform of interest, which changes over time. - - On systems with small numbers of cpu cores, e.g. under 8 logical cores, - setting up a gpu thread pool with `tf_gpu_thread_mode=gpu_private` may perform - poorly. - - Args: - flags_obj: Current flags, which will be adjusted possibly overriding - what has been set by the user on the command-line. - """ - cpu_count = multiprocessing.cpu_count() - logging.info('Logical CPU cores: %s', cpu_count) - - # Sets up thread pool for each GPU for op scheduling. - per_gpu_thread_count = 1 - total_gpu_thread_count = per_gpu_thread_count * flags_obj.num_gpus - os.environ['TF_GPU_THREAD_MODE'] = flags_obj.tf_gpu_thread_mode - os.environ['TF_GPU_THREAD_COUNT'] = str(per_gpu_thread_count) - logging.info('TF_GPU_THREAD_COUNT: %s', os.environ['TF_GPU_THREAD_COUNT']) - logging.info('TF_GPU_THREAD_MODE: %s', os.environ['TF_GPU_THREAD_MODE']) - - # Reduces general thread pool by number of threads used for GPU pool. - main_thread_count = cpu_count - total_gpu_thread_count - flags_obj.inter_op_parallelism_threads = main_thread_count - - # Sets thread count for tf.data. Logical cores minus threads assign to the - # private GPU pool along with 2 thread per GPU for event monitoring and - # sending / receiving tensors. - num_monitoring_threads = 2 * flags_obj.num_gpus - flags_obj.datasets_num_private_threads = (cpu_count - total_gpu_thread_count - - num_monitoring_threads) - - -################################################################################ -# Functions for running training/eval/validation loops for the model. -################################################################################ -def learning_rate_with_decay( - batch_size, batch_denom, num_images, boundary_epochs, decay_rates, - base_lr=0.1, warmup=False): - """Get a learning rate that decays step-wise as training progresses. - - Args: - batch_size: the number of examples processed in each training batch. - batch_denom: this value will be used to scale the base learning rate. - `0.1 * batch size` is divided by this number, such that when - batch_denom == batch_size, the initial learning rate will be 0.1. - num_images: total number of images that will be used for training. - boundary_epochs: list of ints representing the epochs at which we - decay the learning rate. - decay_rates: list of floats representing the decay rates to be used - for scaling the learning rate. It should have one more element - than `boundary_epochs`, and all elements should have the same type. - base_lr: Initial learning rate scaled based on batch_denom. - warmup: Run a 5 epoch warmup to the initial lr. - Returns: - Returns a function that takes a single argument - the number of batches - trained so far (global_step)- and returns the learning rate to be used - for training the next batch. - """ - initial_learning_rate = base_lr * batch_size / batch_denom - batches_per_epoch = num_images / batch_size - - # Reduce the learning rate at certain epochs. - # CIFAR-10: divide by 10 at epoch 100, 150, and 200 - # ImageNet: divide by 10 at epoch 30, 60, 80, and 90 - boundaries = [int(batches_per_epoch * epoch) for epoch in boundary_epochs] - vals = [initial_learning_rate * decay for decay in decay_rates] - - def learning_rate_fn(global_step): - """Builds scaled learning rate function with 5 epoch warm up.""" - lr = tf.compat.v1.train.piecewise_constant(global_step, boundaries, vals) - if warmup: - warmup_steps = int(batches_per_epoch * 5) - warmup_lr = ( - initial_learning_rate * tf.cast(global_step, tf.float32) / tf.cast( - warmup_steps, tf.float32)) - return tf.cond(pred=global_step < warmup_steps, - true_fn=lambda: warmup_lr, - false_fn=lambda: lr) - return lr - - def poly_rate_fn(global_step): - """Handles linear scaling rule, gradual warmup, and LR decay. - - The learning rate starts at 0, then it increases linearly per step. After - FLAGS.poly_warmup_epochs, we reach the base learning rate (scaled to account - for batch size). The learning rate is then decayed using a polynomial rate - decay schedule with power 2.0. - - Args: - global_step: the current global_step - - Returns: - returns the current learning rate - """ - - # Learning rate schedule for LARS polynomial schedule - if flags.FLAGS.batch_size < 8192: - plr = 5.0 - w_epochs = 5 - elif flags.FLAGS.batch_size < 16384: - plr = 10.0 - w_epochs = 5 - elif flags.FLAGS.batch_size < 32768: - plr = 25.0 - w_epochs = 5 - else: - plr = 32.0 - w_epochs = 14 - - w_steps = int(w_epochs * batches_per_epoch) - wrate = (plr * tf.cast(global_step, tf.float32) / tf.cast( - w_steps, tf.float32)) - - # TODO(pkanwar): use a flag to help calc num_epochs. - num_epochs = 90 - train_steps = batches_per_epoch * num_epochs - - min_step = tf.constant(1, dtype=tf.int64) - decay_steps = tf.maximum(min_step, tf.subtract(global_step, w_steps)) - poly_rate = tf.train.polynomial_decay( - plr, - decay_steps, - train_steps - w_steps + 1, - power=2.0) - return tf.where(global_step <= w_steps, wrate, poly_rate) - - # For LARS we have a new learning rate schedule - if flags.FLAGS.enable_lars: - return poly_rate_fn - - return learning_rate_fn - - -def per_replica_batch_size(batch_size, num_gpus): - """For multi-gpu, batch-size must be a multiple of the number of GPUs. - - - Note that distribution strategy handles this automatically when used with - Keras. For using with Estimator, we need to get per GPU batch. - - Args: - batch_size: Global batch size to be divided among devices. This should be - equal to num_gpus times the single-GPU batch_size for multi-gpu training. - num_gpus: How many GPUs are used with DistributionStrategies. - - Returns: - Batch size per device. - - Raises: - ValueError: if batch_size is not divisible by number of devices - """ - if num_gpus <= 1: - return batch_size - - remainder = batch_size % num_gpus - if remainder: - err = ('When running with multiple GPUs, batch size ' - 'must be a multiple of the number of available GPUs. Found {} ' - 'GPUs with a batch size of {}; try --batch_size={} instead.' - ).format(num_gpus, batch_size, batch_size - remainder) - raise ValueError(err) - return int(batch_size / num_gpus) - - -def resnet_model_fn(features, labels, mode, model_class, - resnet_size, weight_decay, learning_rate_fn, momentum, - data_format, resnet_version, loss_scale, - loss_filter_fn=None, dtype=resnet_model.DEFAULT_DTYPE, - fine_tune=False, label_smoothing=0.0): - """Shared functionality for different resnet model_fns. - - Initializes the ResnetModel representing the model layers - and uses that model to build the necessary EstimatorSpecs for - the `mode` in question. For training, this means building losses, - the optimizer, and the train op that get passed into the EstimatorSpec. - For evaluation and prediction, the EstimatorSpec is returned without - a train op, but with the necessary parameters for the given mode. - - Args: - features: tensor representing input images - labels: tensor representing class labels for all input images - mode: current estimator mode; should be one of - `tf.estimator.ModeKeys.TRAIN`, `EVALUATE`, `PREDICT` - model_class: a class representing a TensorFlow model that has a __call__ - function. We assume here that this is a subclass of ResnetModel. - resnet_size: A single integer for the size of the ResNet model. - weight_decay: weight decay loss rate used to regularize learned variables. - learning_rate_fn: function that returns the current learning rate given - the current global_step - momentum: momentum term used for optimization - data_format: Input format ('channels_last', 'channels_first', or None). - If set to None, the format is dependent on whether a GPU is available. - resnet_version: Integer representing which version of the ResNet network to - use. See README for details. Valid values: [1, 2] - loss_scale: The factor to scale the loss for numerical stability. A detailed - summary is present in the arg parser help text. - loss_filter_fn: function that takes a string variable name and returns - True if the var should be included in loss calculation, and False - otherwise. If None, batch_normalization variables will be excluded - from the loss. - dtype: the TensorFlow dtype to use for calculations. - fine_tune: If True only train the dense layers(final layers). - label_smoothing: If greater than 0 then smooth the labels. - - Returns: - EstimatorSpec parameterized according to the input params and the - current mode. - """ - - # Generate a summary node for the images - tf.compat.v1.summary.image('images', features, max_outputs=6) - # Checks that features/images have same data type being used for calculations. - assert features.dtype == dtype - - model = model_class(resnet_size, data_format, resnet_version=resnet_version, - dtype=dtype) - - logits = model(features, mode == tf.estimator.ModeKeys.TRAIN) - - # This acts as a no-op if the logits are already in fp32 (provided logits are - # not a SparseTensor). If dtype is is low precision, logits must be cast to - # fp32 for numerical stability. - logits = tf.cast(logits, tf.float32) - - predictions = { - 'classes': tf.argmax(input=logits, axis=1), - 'probabilities': tf.nn.softmax(logits, name='softmax_tensor') - } - - if mode == tf.estimator.ModeKeys.PREDICT: - # Return the predictions and the specification for serving a SavedModel - return tf.estimator.EstimatorSpec( - mode=mode, - predictions=predictions, - export_outputs={ - 'predict': tf.estimator.export.PredictOutput(predictions) - }) - - # Calculate loss, which includes softmax cross entropy and L2 regularization. - if label_smoothing != 0.0: - one_hot_labels = tf.one_hot(labels, 1001) - cross_entropy = tf.losses.softmax_cross_entropy( - logits=logits, onehot_labels=one_hot_labels, - label_smoothing=label_smoothing) - else: - cross_entropy = tf.compat.v1.losses.sparse_softmax_cross_entropy( - logits=logits, labels=labels) - - # Create a tensor named cross_entropy for logging purposes. - tf.identity(cross_entropy, name='cross_entropy') - tf.compat.v1.summary.scalar('cross_entropy', cross_entropy) - - # If no loss_filter_fn is passed, assume we want the default behavior, - # which is that batch_normalization variables are excluded from loss. - def exclude_batch_norm(name): - return 'batch_normalization' not in name - loss_filter_fn = loss_filter_fn or exclude_batch_norm - - # Add weight decay to the loss. - l2_loss = weight_decay * tf.add_n( - # loss is computed using fp32 for numerical stability. - [ - tf.nn.l2_loss(tf.cast(v, tf.float32)) - for v in tf.compat.v1.trainable_variables() - if loss_filter_fn(v.name) - ]) - tf.compat.v1.summary.scalar('l2_loss', l2_loss) - loss = cross_entropy + l2_loss - - if mode == tf.estimator.ModeKeys.TRAIN: - global_step = tf.compat.v1.train.get_or_create_global_step() - - learning_rate = learning_rate_fn(global_step) - - # Create a tensor named learning_rate for logging purposes - tf.identity(learning_rate, name='learning_rate') - tf.compat.v1.summary.scalar('learning_rate', learning_rate) - - if flags.FLAGS.enable_lars: - from tensorflow.contrib import opt as contrib_opt # pylint: disable=g-import-not-at-top - optimizer = contrib_opt.LARSOptimizer( - learning_rate, - momentum=momentum, - weight_decay=weight_decay, - skip_list=['batch_normalization', 'bias']) - else: - optimizer = tf.compat.v1.train.MomentumOptimizer( - learning_rate=learning_rate, - momentum=momentum - ) - - fp16_implementation = getattr(flags.FLAGS, 'fp16_implementation', None) - if fp16_implementation == 'graph_rewrite': - optimizer = ( - tf.compat.v1.train.experimental.enable_mixed_precision_graph_rewrite( - optimizer, loss_scale=loss_scale)) - - def _dense_grad_filter(gvs): - """Only apply gradient updates to the final layer. - - This function is used for fine tuning. - - Args: - gvs: list of tuples with gradients and variable info - Returns: - filtered gradients so that only the dense layer remains - """ - return [(g, v) for g, v in gvs if 'dense' in v.name] - - if loss_scale != 1 and fp16_implementation != 'graph_rewrite': - # When computing fp16 gradients, often intermediate tensor values are - # so small, they underflow to 0. To avoid this, we multiply the loss by - # loss_scale to make these tensor values loss_scale times bigger. - scaled_grad_vars = optimizer.compute_gradients(loss * loss_scale) - - if fine_tune: - scaled_grad_vars = _dense_grad_filter(scaled_grad_vars) - - # Once the gradient computation is complete we can scale the gradients - # back to the correct scale before passing them to the optimizer. - unscaled_grad_vars = [(grad / loss_scale, var) - for grad, var in scaled_grad_vars] - minimize_op = optimizer.apply_gradients(unscaled_grad_vars, global_step) - else: - grad_vars = optimizer.compute_gradients(loss) - if fine_tune: - grad_vars = _dense_grad_filter(grad_vars) - minimize_op = optimizer.apply_gradients(grad_vars, global_step) - - update_ops = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.UPDATE_OPS) - train_op = tf.group(minimize_op, update_ops) - else: - train_op = None - - accuracy = tf.compat.v1.metrics.accuracy(labels, predictions['classes']) - accuracy_top_5 = tf.compat.v1.metrics.mean( - tf.nn.in_top_k(predictions=logits, targets=labels, k=5, name='top_5_op')) - metrics = {'accuracy': accuracy, - 'accuracy_top_5': accuracy_top_5} - - # Create a tensor named train_accuracy for logging purposes - tf.identity(accuracy[1], name='train_accuracy') - tf.identity(accuracy_top_5[1], name='train_accuracy_top_5') - tf.compat.v1.summary.scalar('train_accuracy', accuracy[1]) - tf.compat.v1.summary.scalar('train_accuracy_top_5', accuracy_top_5[1]) - - return tf.estimator.EstimatorSpec( - mode=mode, - predictions=predictions, - loss=loss, - train_op=train_op, - eval_metric_ops=metrics) - - -def resnet_main( - flags_obj, model_function, input_function, dataset_name, shape=None): - """Shared main loop for ResNet Models. - - Args: - flags_obj: An object containing parsed flags. See define_resnet_flags() - for details. - model_function: the function that instantiates the Model and builds the - ops for train/eval. This will be passed directly into the estimator. - input_function: the function that processes the dataset and returns a - dataset that the estimator can train on. This will be wrapped with - all the relevant flags for running and passed to estimator. - dataset_name: the name of the dataset for training and evaluation. This is - used for logging purpose. - shape: list of ints representing the shape of the images used for training. - This is only used if flags_obj.export_dir is passed. - - Returns: - Dict of results of the run. Contains the keys `eval_results` and - `train_hooks`. `eval_results` contains accuracy (top_1) and accuracy_top_5. - `train_hooks` is a list the instances of hooks used during training. - """ - - model_helpers.apply_clean(flags.FLAGS) - - # Ensures flag override logic is only executed if explicitly triggered. - if flags_obj.tf_gpu_thread_mode: - override_flags_and_set_envars_for_gpu_thread_pool(flags_obj) - - # Configures cluster spec for distribution strategy. - num_workers = distribution_utils.configure_cluster(flags_obj.worker_hosts, - flags_obj.task_index) - - # Creates session config. allow_soft_placement = True, is required for - # multi-GPU and is not harmful for other modes. - session_config = tf.compat.v1.ConfigProto( - inter_op_parallelism_threads=flags_obj.inter_op_parallelism_threads, - intra_op_parallelism_threads=flags_obj.intra_op_parallelism_threads, - allow_soft_placement=True) - - distribution_strategy = distribution_utils.get_distribution_strategy( - distribution_strategy=flags_obj.distribution_strategy, - num_gpus=flags_core.get_num_gpus(flags_obj), - all_reduce_alg=flags_obj.all_reduce_alg, - num_packs=flags_obj.num_packs) - - # Creates a `RunConfig` that checkpoints every 24 hours which essentially - # results in checkpoints determined only by `epochs_between_evals`. - run_config = tf.estimator.RunConfig( - train_distribute=distribution_strategy, - session_config=session_config, - save_checkpoints_secs=60*60*24, - save_checkpoints_steps=None) - - # Initializes model with all but the dense layer from pretrained ResNet. - if flags_obj.pretrained_model_checkpoint_path is not None: - warm_start_settings = tf.estimator.WarmStartSettings( - flags_obj.pretrained_model_checkpoint_path, - vars_to_warm_start='^(?!.*dense)') - else: - warm_start_settings = None - - classifier = tf.estimator.Estimator( - model_fn=model_function, model_dir=flags_obj.model_dir, config=run_config, - warm_start_from=warm_start_settings, params={ - 'resnet_size': int(flags_obj.resnet_size), - 'data_format': flags_obj.data_format, - 'batch_size': flags_obj.batch_size, - 'resnet_version': int(flags_obj.resnet_version), - 'loss_scale': flags_core.get_loss_scale(flags_obj, - default_for_fp16=128), - 'dtype': flags_core.get_tf_dtype(flags_obj), - 'fine_tune': flags_obj.fine_tune, - 'num_workers': num_workers, - }) - - run_params = { - 'batch_size': flags_obj.batch_size, - 'dtype': flags_core.get_tf_dtype(flags_obj), - 'resnet_size': flags_obj.resnet_size, - 'resnet_version': flags_obj.resnet_version, - 'synthetic_data': flags_obj.use_synthetic_data, - 'train_epochs': flags_obj.train_epochs, - 'num_workers': num_workers, - } - if flags_obj.use_synthetic_data: - dataset_name = dataset_name + '-synthetic' - - benchmark_logger = logger.get_benchmark_logger() - benchmark_logger.log_run_info('resnet', dataset_name, run_params, - test_id=flags_obj.benchmark_test_id) - - train_hooks = hooks_helper.get_train_hooks( - flags_obj.hooks, - model_dir=flags_obj.model_dir, - batch_size=flags_obj.batch_size) - - def input_fn_train(num_epochs, input_context=None): - return input_function( - is_training=True, - data_dir=flags_obj.data_dir, - batch_size=per_replica_batch_size( - flags_obj.batch_size, flags_core.get_num_gpus(flags_obj)), - num_epochs=num_epochs, - dtype=flags_core.get_tf_dtype(flags_obj), - datasets_num_private_threads=flags_obj.datasets_num_private_threads, - input_context=input_context) - - def input_fn_eval(): - return input_function( - is_training=False, - data_dir=flags_obj.data_dir, - batch_size=per_replica_batch_size( - flags_obj.batch_size, flags_core.get_num_gpus(flags_obj)), - num_epochs=1, - dtype=flags_core.get_tf_dtype(flags_obj)) - - train_epochs = (0 if flags_obj.eval_only or not flags_obj.train_epochs else - flags_obj.train_epochs) - - use_train_and_evaluate = flags_obj.use_train_and_evaluate or num_workers > 1 - if use_train_and_evaluate: - train_spec = tf.estimator.TrainSpec( - input_fn=lambda input_context=None: input_fn_train( - train_epochs, input_context=input_context), - hooks=train_hooks, - max_steps=flags_obj.max_train_steps) - eval_spec = tf.estimator.EvalSpec(input_fn=input_fn_eval) - logging.info('Starting to train and evaluate.') - tf.estimator.train_and_evaluate(classifier, train_spec, eval_spec) - # tf.estimator.train_and_evalute doesn't return anything in multi-worker - # case. - eval_results = {} - else: - if train_epochs == 0: - # If --eval_only is set, perform a single loop with zero train epochs. - schedule, n_loops = [0], 1 - else: - # Compute the number of times to loop while training. All but the last - # pass will train for `epochs_between_evals` epochs, while the last will - # train for the number needed to reach `training_epochs`. For instance if - # train_epochs = 25 and epochs_between_evals = 10 - # schedule will be set to [10, 10, 5]. That is to say, the loop will: - # Train for 10 epochs and then evaluate. - # Train for another 10 epochs and then evaluate. - # Train for a final 5 epochs (to reach 25 epochs) and then evaluate. - n_loops = math.ceil(train_epochs / flags_obj.epochs_between_evals) - schedule = [flags_obj.epochs_between_evals for _ in range(int(n_loops))] - schedule[-1] = train_epochs - sum(schedule[:-1]) # over counting. - - for cycle_index, num_train_epochs in enumerate(schedule): - logging.info('Starting cycle: %d/%d', cycle_index, int(n_loops)) - - if num_train_epochs: - # Since we are calling classifier.train immediately in each loop, the - # value of num_train_epochs in the lambda function will not be changed - # before it is used. So it is safe to ignore the pylint error here - # pylint: disable=cell-var-from-loop - classifier.train( - input_fn=lambda input_context=None: input_fn_train( - num_train_epochs, input_context=input_context), - hooks=train_hooks, - max_steps=flags_obj.max_train_steps) - - # flags_obj.max_train_steps is generally associated with testing and - # profiling. As a result it is frequently called with synthetic data, - # which will iterate forever. Passing steps=flags_obj.max_train_steps - # allows the eval (which is generally unimportant in those circumstances) - # to terminate. Note that eval will run for max_train_steps each loop, - # regardless of the global_step count. - logging.info('Starting to evaluate.') - eval_results = classifier.evaluate(input_fn=input_fn_eval, - steps=flags_obj.max_train_steps) - - benchmark_logger.log_evaluation_result(eval_results) - - if model_helpers.past_stop_threshold( - flags_obj.stop_threshold, eval_results['accuracy']): - break - - if flags_obj.export_dir is not None: - # Exports a saved model for the given classifier. - export_dtype = flags_core.get_tf_dtype(flags_obj) - if flags_obj.image_bytes_as_serving_input: - input_receiver_fn = functools.partial( - image_bytes_serving_input_fn, shape, dtype=export_dtype) - else: - input_receiver_fn = export.build_tensor_serving_input_receiver_fn( - shape, batch_size=flags_obj.batch_size, dtype=export_dtype) - classifier.export_savedmodel(flags_obj.export_dir, input_receiver_fn, - strip_default_attrs=True) - - stats = {} - stats['eval_results'] = eval_results - stats['train_hooks'] = train_hooks - - return stats - - -def define_resnet_flags(resnet_size_choices=None, dynamic_loss_scale=False, - fp16_implementation=False): - """Add flags and validators for ResNet.""" - flags_core.define_base(clean=True, train_epochs=True, - epochs_between_evals=True, stop_threshold=True, - num_gpu=True, hooks=True, export_dir=True, - distribution_strategy=True) - flags_core.define_performance(num_parallel_calls=False, - inter_op=True, - intra_op=True, - synthetic_data=True, - dtype=True, - all_reduce_alg=True, - num_packs=True, - tf_gpu_thread_mode=True, - datasets_num_private_threads=True, - dynamic_loss_scale=dynamic_loss_scale, - fp16_implementation=fp16_implementation, - loss_scale=True, - tf_data_experimental_slack=True, - max_train_steps=True) - flags_core.define_image() - flags_core.define_benchmark() - flags_core.define_distribution() - flags.adopt_module_key_flags(flags_core) - - flags.DEFINE_enum( - name='resnet_version', short_name='rv', default='1', - enum_values=['1', '2'], - help=flags_core.help_wrap( - 'Version of ResNet. (1 or 2) See README.md for details.')) - flags.DEFINE_bool( - name='fine_tune', short_name='ft', default=False, - help=flags_core.help_wrap( - 'If True do not train any parameters except for the final layer.')) - flags.DEFINE_string( - name='pretrained_model_checkpoint_path', short_name='pmcp', default=None, - help=flags_core.help_wrap( - 'If not None initialize all the network except the final layer with ' - 'these values')) - flags.DEFINE_boolean( - name='eval_only', default=False, - help=flags_core.help_wrap('Skip training and only perform evaluation on ' - 'the latest checkpoint.')) - flags.DEFINE_boolean( - name='image_bytes_as_serving_input', default=False, - help=flags_core.help_wrap( - 'If True exports savedmodel with serving signature that accepts ' - 'JPEG image bytes instead of a fixed size [HxWxC] tensor that ' - 'represents the image. The former is easier to use for serving at ' - 'the expense of image resize/cropping being done as part of model ' - 'inference. Note, this flag only applies to ImageNet and cannot ' - 'be used for CIFAR.')) - flags.DEFINE_boolean( - name='use_train_and_evaluate', default=False, - help=flags_core.help_wrap( - 'If True, uses `tf.estimator.train_and_evaluate` for the training ' - 'and evaluation loop, instead of separate calls to `classifier.train ' - 'and `classifier.evaluate`, which is the default behavior.')) - flags.DEFINE_bool( - name='enable_lars', default=False, - help=flags_core.help_wrap( - 'Enable LARS optimizer for large batch training.')) - flags.DEFINE_float( - name='label_smoothing', default=0.0, - help=flags_core.help_wrap( - 'Label smoothing parameter used in the softmax_cross_entropy')) - flags.DEFINE_float( - name='weight_decay', default=1e-4, - help=flags_core.help_wrap( - 'Weight decay coefficiant for l2 regularization.')) - - choice_kwargs = dict( - name='resnet_size', short_name='rs', default='50', - help=flags_core.help_wrap('The size of the ResNet model to use.')) - - if resnet_size_choices is None: - flags.DEFINE_string(**choice_kwargs) - else: - flags.DEFINE_enum(enum_values=resnet_size_choices, **choice_kwargs) diff --git a/official/r1/transformer/README.md b/official/r1/transformer/README.md deleted file mode 100644 index c680f8b33..000000000 --- a/official/r1/transformer/README.md +++ /dev/null @@ -1,380 +0,0 @@ -![No Maintenance Intended](https://img.shields.io/badge/No%20Maintenance%20Intended-%E2%9C%95-red.svg) -![TensorFlow Requirement: 1.x](https://img.shields.io/badge/TensorFlow%20Requirement-1.x-brightgreen) -![TensorFlow 2 Not Supported](https://img.shields.io/badge/TensorFlow%202%20Not%20Supported-%E2%9C%95-red.svg) - -# Transformer Translation Model -This is an implementation of the Transformer translation model as described in the [Attention is All You Need](https://arxiv.org/abs/1706.03762) paper. Based on the code provided by the authors: [Transformer code](https://github.com/tensorflow/tensor2tensor/blob/master/tensor2tensor/models/transformer.py) from [Tensor2Tensor](https://github.com/tensorflow/tensor2tensor). Also, check out the [tutorial](https://www.tensorflow.org/beta/tutorials/text/transformer) on Transformer in TF 2.0. - -**Please follow the [README](https://github.com/tensorflow/models/official/transformer/README.md), the new Keras-based TF 2 implementation, to walk through the new Transformer.** - -Transformer is a neural network architecture that solves sequence to sequence problems using attention mechanisms. Unlike traditional neural seq2seq models, Transformer does not involve recurrent connections. The attention mechanism learns dependencies between tokens in two sequences. Since attention weights apply to all tokens in the sequences, the Transformer model is able to easily capture long-distance dependencies. - -Transformer's overall structure follows the standard encoder-decoder pattern. The encoder uses self-attention to compute a representation of the input sequence. The decoder generates the output sequence one token at a time, taking the encoder output and previous decoder-outputted tokens as inputs. - -The model also applies embeddings on the input and output tokens, and adds a constant positional encoding. The positional encoding adds information about the position of each token. - -## Contents - * [Contents](#contents) - * [Walkthrough](#walkthrough) - * [Benchmarks](#benchmarks) - * [Training times](#training-times) - * [Evaluation results](#evaluation-results) - * [Detailed instructions](#detailed-instructions) - * [Environment preparation](#environment-preparation) - * [Download and preprocess datasets](#download-and-preprocess-datasets) - * [Model training and evaluation](#model-training-and-evaluation) - * [Translate using the model](#translate-using-the-model) - * [Compute official BLEU score](#compute-official-bleu-score) - * [TPU](#tpu) - * [Export trained model](#export-trained-model) - * [Example translation](#example-translation) - * [Implementation overview](#implementation-overview) - * [Model Definition](#model-definition) - * [Model Estimator](#model-estimator) - * [Other scripts](#other-scripts) - * [Test dataset](#test-dataset) - * [Term definitions](#term-definitions) - -## Walkthrough - -Below are the commands for running the Transformer model. See the -[Detailed instructions](#detailed-instructions) for more details on running the -model. - -``` -cd /path/to/models/official/transformer - -# Ensure that PYTHONPATH is correctly defined as described in -# https://github.com/tensorflow/models/tree/master/official#requirements -# export PYTHONPATH="$PYTHONPATH:/path/to/models" - -# Export variables -PARAM_SET=big -DATA_DIR=$HOME/transformer/data -MODEL_DIR=$HOME/transformer/model_$PARAM_SET -VOCAB_FILE=$DATA_DIR/vocab.ende.32768 - -# Download training/evaluation/test datasets -python data_download.py --data_dir=$DATA_DIR - -# Train the model for 10 epochs, and evaluate after every epoch. -python transformer_main.py --data_dir=$DATA_DIR --model_dir=$MODEL_DIR \ - --vocab_file=$VOCAB_FILE --param_set=$PARAM_SET \ - --bleu_source=$DATA_DIR/newstest2014.en --bleu_ref=$DATA_DIR/newstest2014.de - -# Run during training in a separate process to get continuous updates, -# or after training is complete. -tensorboard --logdir=$MODEL_DIR - -# Translate some text using the trained model -python translate.py --model_dir=$MODEL_DIR --vocab_file=$VOCAB_FILE \ - --param_set=$PARAM_SET --text="hello world" - -# Compute model's BLEU score using the newstest2014 dataset. -python translate.py --model_dir=$MODEL_DIR --vocab_file=$VOCAB_FILE \ - --param_set=$PARAM_SET --file=$DATA_DIR/newstest2014.en --file_out=translation.en -python compute_bleu.py --translation=translation.en --reference=$DATA_DIR/newstest2014.de -``` - -## Benchmarks -### Training times - -Currently, both big and base parameter sets run on a single GPU. The measurements below -are reported from running the model on a P100 GPU. - -Param Set | batches/sec | batches per epoch | time per epoch ---- | --- | --- | --- -base | 4.8 | 83244 | 4 hr -big | 1.1 | 41365 | 10 hr - -### Evaluation results -Below are the case-insensitive BLEU scores after 10 epochs. - -Param Set | Score ---- | --- | -base | 27.7 -big | 28.9 - - -## Detailed instructions - - -0. ### Environment preparation - - #### Add models repo to PYTHONPATH - Follow the instructions described in the [Requirements](https://github.com/tensorflow/models/tree/master/official#requirements) section to add the models folder to the python path. - - #### Export variables (optional) - - Export the following variables, or modify the values in each of the snippets below: - ``` - PARAM_SET=big - DATA_DIR=$HOME/transformer/data - MODEL_DIR=$HOME/transformer/model_$PARAM_SET - VOCAB_FILE=$DATA_DIR/vocab.ende.32768 - ``` - -1. ### Download and preprocess datasets - - [data_download.py](data_download.py) downloads and preprocesses the training and evaluation WMT datasets. After the data is downloaded and extracted, the training data is used to generate a vocabulary of subtokens. The evaluation and training strings are tokenized, and the resulting data is sharded, shuffled, and saved as TFRecords. - - 1.75GB of compressed data will be downloaded. In total, the raw files (compressed, extracted, and combined files) take up 8.4GB of disk space. The resulting TFRecord and vocabulary files are 722MB. The script takes around 40 minutes to run, with the bulk of the time spent downloading and ~15 minutes spent on preprocessing. - - Command to run: - ``` - python data_download.py --data_dir=$DATA_DIR - ``` - - Arguments: - * `--data_dir`: Path where the preprocessed TFRecord data, and vocab file will be saved. - * Use the `--help` or `-h` flag to get a full list of possible arguments. - -2. ### Model training and evaluation - - [transformer_main.py](transformer_main.py) creates a Transformer model, and trains it using Tensorflow Estimator. - - Command to run: - ``` - python transformer_main.py --data_dir=$DATA_DIR --model_dir=$MODEL_DIR \ - --vocab_file=$VOCAB_FILE --param_set=$PARAM_SET - ``` - - Arguments: - * `--data_dir`: This should be set to the same directory given to the `data_download`'s `data_dir` argument. - * `--model_dir`: Directory to save Transformer model training checkpoints. - * `--vocab_file`: Path to subtoken vocabulary file. If data_download was used, you may find the file in `data_dir`. - * `--param_set`: Parameter set to use when creating and training the model. Options are `base` and `big` (default). - * Use the `--help` or `-h` flag to get a full list of possible arguments. - - #### Customizing training schedule - - By default, the model will train for 10 epochs, and evaluate after every epoch. The training schedule may be defined through the flags: - * Training with epochs (default): - * `--train_epochs`: The total number of complete passes to make through the dataset - * `--epochs_between_evals`: The number of epochs to train between evaluations. - * Training with steps: - * `--train_steps`: sets the total number of training steps to run. - * `--steps_between_evals`: Number of training steps to run between evaluations. - - Only one of `train_epochs` or `train_steps` may be set. Since the default option is to evaluate the model after training for an epoch, it may take 4 or more hours between model evaluations. To get more frequent evaluations, use the flags `--train_steps=250000 --steps_between_evals=1000`. - - Note: At the beginning of each training session, the training dataset is reloaded and shuffled. Stopping the training before completing an epoch may result in worse model quality, due to the chance that some examples may be seen more than others. Therefore, it is recommended to use epochs when the model quality is important. - - #### Compute BLEU score during model evaluation - - Use these flags to compute the BLEU when the model evaluates: - * `--bleu_source`: Path to file containing text to translate. - * `--bleu_ref`: Path to file containing the reference translation. - * `--stop_threshold`: Train until the BLEU score reaches this lower bound. This setting overrides the `--train_steps` and `--train_epochs` flags. - - When running `transformer_main.py`, use the flags: `--bleu_source=$DATA_DIR/newstest2014.en --bleu_ref=$DATA_DIR/newstest2014.de` - - #### Tensorboard - Training and evaluation metrics (loss, accuracy, approximate BLEU score, etc.) are logged, and can be displayed in the browser using Tensorboard. - ``` - tensorboard --logdir=$MODEL_DIR - ``` - The values are displayed at [localhost:6006](localhost:6006). - -3. ### Translate using the model - [translate.py](translate.py) contains the script to use the trained model to translate input text or file. Each line in the file is translated separately. - - Command to run: - ``` - python translate.py --model_dir=$MODEL_DIR --vocab_file=$VOCAB_FILE \ - --param_set=$PARAM_SET --text="hello world" - ``` - - Arguments for initializing the Subtokenizer and trained model: - * `--model_dir` and `--param_set`: These parameters are used to rebuild the trained model - * `--vocab_file`: Path to subtoken vocabulary file. If data_download was used, you may find the file in `data_dir`. - - Arguments for specifying what to translate: - * `--text`: Text to translate - * `--file`: Path to file containing text to translate - * `--file_out`: If `--file` is set, then this file will store the input file's translations. - - To translate the newstest2014 data, run: - ``` - python translate.py --model_dir=$MODEL_DIR --vocab_file=$VOCAB_FILE \ - --param_set=$PARAM_SET --file=$DATA_DIR/newstest2014.en --file_out=translation.en - ``` - - Translating the file takes around 15 minutes on a GTX1080, or 5 minutes on a P100. - -4. ### Compute official BLEU score - Use [compute_bleu.py](compute_bleu.py) to compute the BLEU by comparing generated translations to the reference translation. - - Command to run: - ``` - python compute_bleu.py --translation=translation.en --reference=$DATA_DIR/newstest2014.de - ``` - - Arguments: - * `--translation`: Path to file containing generated translations. - * `--reference`: Path to file containing reference translations. - * Use the `--help` or `-h` flag to get a full list of possible arguments. - -5. ### TPU - TPU support for this version of Transformer is experimental. Currently it is present for - demonstration purposes only, but will be optimized in the coming weeks. - -## Export trained model -To export the model as a Tensorflow [SavedModel](https://www.tensorflow.org/guide/saved_model) format, use the argument `--export_dir` when running `transformer_main.py`. A folder will be created in the directory with the name as the timestamp (e.g. $EXPORT_DIR/1526427396). - -``` -EXPORT_DIR=$HOME/transformer/saved_model -python transformer_main.py --data_dir=$DATA_DIR --model_dir=$MODEL_DIR \ - --vocab_file=$VOCAB_FILE --param_set=$PARAM_SET --export_model=$EXPORT_DIR -``` - -To inspect the SavedModel, use saved_model_cli: -``` -SAVED_MODEL_DIR=$EXPORT_DIR/{TIMESTAMP} # replace {TIMESTAMP} with the name of the folder created -saved_model_cli show --dir=$SAVED_MODEL_DIR --all -``` - -### Example translation -Let's translate **"hello world!"**, **"goodbye world."**, and **"Would you like some pie?"**. - -The SignatureDef for "translate" is: - - signature_def['translate']: - The given SavedModel SignatureDef contains the following input(s): - inputs['input'] tensor_info: - dtype: DT_INT64 - shape: (-1, -1) - name: Placeholder:0 - The given SavedModel SignatureDef contains the following output(s): - outputs['outputs'] tensor_info: - dtype: DT_INT32 - shape: (-1, -1) - name: model/Transformer/strided_slice_19:0 - outputs['scores'] tensor_info: - dtype: DT_FLOAT - shape: (-1) - name: model/Transformer/strided_slice_20:0 - -Follow the steps below to use the translate signature def: - -1. #### Encode the inputs to integer arrays. - This can be done using `utils.tokenizer.Subtokenizer`, and the vocab file in the SavedModel assets (`$SAVED_MODEL_DIR/assets.extra/vocab.txt`). - - ``` - from official.transformer.utils.tokenizer import Subtokenizer - s = Subtokenizer(PATH_TO_VOCAB_FILE) - print(s.encode("hello world!", add_eos=True)) - ``` - - The encoded inputs are: - * `"hello world!" = [6170, 3731, 178, 207, 1]` - * `"goodbye world." = [15431, 13966, 36, 178, 3, 1]` - * `"Would you like some pie?" = [9092, 72, 155, 202, 19851, 102, 1]` - -2. #### Run `saved_model_cli` to obtain the predicted translations - The encoded inputs should be padded so that they are the same length. The padding token is `0`. - ``` - ENCODED_INPUTS="[[26228, 145, 178, 1, 0, 0, 0], \ - [15431, 13966, 36, 178, 3, 1, 0], \ - [9092, 72, 155, 202, 19851, 102, 1]]" - ``` - - Now, use the `run` command with `saved_model_cli` to get the outputs. - - ``` - saved_model_cli run --dir=$SAVED_MODEL_DIR --tag_set=serve --signature_def=translate \ - --input_expr="input=$ENCODED_INPUTS" - ``` - - The outputs will look similar to: - ``` - Result for output key outputs: - [[18744 145 297 1 0 0 0 0 0 0 0 0 - 0 0] - [ 5450 4642 21 11 297 3 1 0 0 0 0 0 - 0 0] - [25940 22 66 103 21713 31 102 1 0 0 0 0 - 0 0]] - Result for output key scores: - [-1.5493642 -1.4032784 -3.252089 ] - ``` - -3. #### Decode the outputs to strings. - Use the `Subtokenizer` and vocab file as described in step 1 to decode the output integer arrays. - ``` - from official.transformer.utils.tokenizer import Subtokenizer - s = Subtokenizer(PATH_TO_VOCAB_FILE) - print(s.decode([18744, 145, 297, 1])) - ``` - The decoded outputs from above are: - * `[18744, 145, 297, 1] = "Hallo Welt"` - * `[5450, 4642, 21, 11, 297, 3, 1] = "Abschied von der Welt."` - * `[25940, 22, 66, 103, 21713, 31, 102, 1] = "Möchten Sie einen Kuchen?"` - -## Implementation overview - -A brief look at each component in the code: - -### Model Definition -The [model](model) subdirectory contains the implementation of the Transformer model. The following files define the Transformer model and its layers: -* [transformer.py](model/transformer.py): Defines the transformer model and its encoder/decoder layer stacks. -* [embedding_layer.py](model/embedding_layer.py): Contains the layer that calculates the embeddings. The embedding weights are also used to calculate the pre-softmax probabilities from the decoder output. -* [attention_layer.py](model/attention_layer.py): Defines the multi-headed and self attention layers that are used in the encoder/decoder stacks. -* [ffn_layer.py](model/ffn_layer.py): Defines the feedforward network that is used in the encoder/decoder stacks. The network is composed of 2 fully connected layers. - -Other files: -* [beam_search.py](model/beam_search.py) contains the beam search implementation, which is used during model inference to find high scoring translations. -* [model_params.py](model/model_params.py) contains the parameters used for the big and base models. -* [model_utils.py](model/model_utils.py) defines some helper functions used in the model (calculating padding, bias, etc.). - - -### Model Estimator -[transformer_main.py](model/transformer.py) creates an `Estimator` to train and evaluate the model. - -Helper functions: -* [utils/dataset.py](utils/dataset.py): contains functions for creating a `dataset` that is passed to the `Estimator`. -* [utils/metrics.py](utils/metrics.py): defines metrics functions used by the `Estimator` to evaluate the - -### Other scripts - -Aside from the main file to train the Transformer model, we provide other scripts for using the model or downloading the data: - -#### Data download and preprocessing - -[data_download.py](data_download.py) downloads and extracts data, then uses `Subtokenizer` to tokenize strings into arrays of int IDs. The int arrays are converted to `tf.Examples` and saved in the `tf.RecordDataset` format. - - The data is downloaded from the Workshop of Machine Translation (WMT) [news translation task](http://www.statmt.org/wmt17/translation-task.html). The following datasets are used: - - * Europarl v7 - * Common Crawl corpus - * News Commentary v12 - - See the [download section](http://www.statmt.org/wmt17/translation-task.html#download) to explore the raw datasets. The parameters in this model are tuned to fit the English-German translation data, so the EN-DE texts are extracted from the downloaded compressed files. - -The text is transformed into arrays of integer IDs using the `Subtokenizer` defined in [`utils/tokenizer.py`](util/tokenizer.py). During initialization of the `Subtokenizer`, the raw training data is used to generate a vocabulary list containing common subtokens. - -The target vocabulary size of the WMT dataset is 32,768. The set of subtokens is found through binary search on the minimum number of times a subtoken appears in the data. The actual vocabulary size is 33,708, and is stored in a 324kB file. - -#### Translation -Translation is defined in [translate.py](translate.py). First, `Subtokenizer` tokenizes the input. The vocabulary file is the same used to tokenize the training/eval files. Next, beam search is used to find the combination of tokens that maximizes the probability outputted by the model decoder. The tokens are then converted back to strings with `Subtokenizer`. - -#### BLEU computation -[compute_bleu.py](compute_bleu.py): Implementation from [https://github.com/tensorflow/tensor2tensor/blob/master/tensor2tensor/utils/bleu_hook.py](https://github.com/tensorflow/tensor2tensor/blob/master/tensor2tensor/utils/bleu_hook.py). - -### Test dataset -The [newstest2014 files](https://storage.googleapis.com/tf-perf-public/official_transformer/test_data/newstest2014.tgz) -are extracted from the [NMT Seq2Seq tutorial](https://google.github.io/seq2seq/nmt/#download-data). -The raw text files are converted from the SGM format of the -[WMT 2016](http://www.statmt.org/wmt16/translation-task.html) test sets. The -newstest2014 files are put into the `$DATA_DIR` when executing -`data_download.py` - -## Term definitions - -**Steps / Epochs**: -* Step: unit for processing a single batch of data -* Epoch: a complete run through the dataset - -Example: Consider a training a dataset with 100 examples that is divided into 20 batches with 5 examples per batch. A single training step trains the model on one batch. After 20 training steps, the model will have trained on every batch in the dataset, or one epoch. - -**Subtoken**: Words are referred to as tokens, and parts of words are referred to as 'subtokens'. For example, the word 'inclined' may be split into `['incline', 'd_']`. The '\_' indicates the end of the token. The subtoken vocabulary list is guaranteed to contain the alphabet (including numbers and special characters), so all words can be tokenized. diff --git a/official/r1/transformer/__init__.py b/official/r1/transformer/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/official/r1/transformer/attention_layer.py b/official/r1/transformer/attention_layer.py deleted file mode 100644 index e3537939f..000000000 --- a/official/r1/transformer/attention_layer.py +++ /dev/null @@ -1,148 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Implementation of multiheaded attention and self-attention layers.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import tensorflow.compat.v1 as tf - - -class Attention(tf.layers.Layer): - """Multi-headed attention layer.""" - - def __init__(self, hidden_size, num_heads, attention_dropout, train): - if hidden_size % num_heads != 0: - raise ValueError("Hidden size must be evenly divisible by the number of " - "heads.") - - super(Attention, self).__init__() - self.hidden_size = hidden_size - self.num_heads = num_heads - self.attention_dropout = attention_dropout - self.train = train - - # Layers for linearly projecting the queries, keys, and values. - self.q_dense_layer = tf.layers.Dense(hidden_size, use_bias=False, name="q") - self.k_dense_layer = tf.layers.Dense(hidden_size, use_bias=False, name="k") - self.v_dense_layer = tf.layers.Dense(hidden_size, use_bias=False, name="v") - - self.output_dense_layer = tf.layers.Dense(hidden_size, use_bias=False, - name="output_transform") - - def split_heads(self, x): - """Split x into different heads, and transpose the resulting value. - - The tensor is transposed to insure the inner dimensions hold the correct - values during the matrix multiplication. - - Args: - x: A tensor with shape [batch_size, length, hidden_size] - - Returns: - A tensor with shape [batch_size, num_heads, length, hidden_size/num_heads] - """ - with tf.name_scope("split_heads"): - batch_size = tf.shape(x)[0] - length = tf.shape(x)[1] - - # Calculate depth of last dimension after it has been split. - depth = (self.hidden_size // self.num_heads) - - # Split the last dimension - x = tf.reshape(x, [batch_size, length, self.num_heads, depth]) - - # Transpose the result - return tf.transpose(x, [0, 2, 1, 3]) - - def combine_heads(self, x): - """Combine tensor that has been split. - - Args: - x: A tensor [batch_size, num_heads, length, hidden_size/num_heads] - - Returns: - A tensor with shape [batch_size, length, hidden_size] - """ - with tf.name_scope("combine_heads"): - batch_size = tf.shape(x)[0] - length = tf.shape(x)[2] - x = tf.transpose(x, [0, 2, 1, 3]) # --> [batch, length, num_heads, depth] - return tf.reshape(x, [batch_size, length, self.hidden_size]) - - def call(self, x, y, bias, cache=None): - """Apply attention mechanism to x and y. - - Args: - x: a tensor with shape [batch_size, length_x, hidden_size] - y: a tensor with shape [batch_size, length_y, hidden_size] - bias: attention bias that will be added to the result of the dot product. - cache: (Used during prediction) dictionary with tensors containing results - of previous attentions. The dictionary must have the items: - {"k": tensor with shape [batch_size, i, key_channels], - "v": tensor with shape [batch_size, i, value_channels]} - where i is the current decoded length. - - Returns: - Attention layer output with shape [batch_size, length_x, hidden_size] - """ - # Linearly project the query (q), key (k) and value (v) using different - # learned projections. This is in preparation of splitting them into - # multiple heads. Multi-head attention uses multiple queries, keys, and - # values rather than regular attention (which uses a single q, k, v). - q = self.q_dense_layer(x) - k = self.k_dense_layer(y) - v = self.v_dense_layer(y) - - if cache is not None: - # Combine cached keys and values with new keys and values. - k = tf.concat([cache["k"], k], axis=1) - v = tf.concat([cache["v"], v], axis=1) - - # Update cache - cache["k"] = k - cache["v"] = v - - # Split q, k, v into heads. - q = self.split_heads(q) - k = self.split_heads(k) - v = self.split_heads(v) - - # Scale q to prevent the dot product between q and k from growing too large. - depth = (self.hidden_size // self.num_heads) - q *= depth ** -0.5 - - # Calculate dot product attention - logits = tf.matmul(q, k, transpose_b=True) - logits += bias - weights = tf.nn.softmax(logits, name="attention_weights") - if self.train: - weights = tf.nn.dropout(weights, 1.0 - self.attention_dropout) - attention_output = tf.matmul(weights, v) - - # Recombine heads --> [batch_size, length, hidden_size] - attention_output = self.combine_heads(attention_output) - - # Run the combined outputs through another linear projection layer. - attention_output = self.output_dense_layer(attention_output) - return attention_output - - -class SelfAttention(Attention): - """Multiheaded self-attention layer.""" - - def call(self, x, bias, cache=None): - return super(SelfAttention, self).call(x, x, bias, cache) diff --git a/official/r1/transformer/dataset.py b/official/r1/transformer/dataset.py deleted file mode 100644 index 47987745c..000000000 --- a/official/r1/transformer/dataset.py +++ /dev/null @@ -1,284 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Input pipeline for the transformer model to read, filter, and batch examples. - -Two things to note in the pipeline: - -1. Batching scheme - - The examples encoded in the TFRecord files contain data in the format: - {"inputs": [variable length array of integers], - "targets": [variable length array of integers]} - Where integers in the arrays refer to tokens in the English and German vocab - file (named `vocab.ende.32768`). - - Prior to batching, elements in the dataset are grouped by length (max between - "inputs" and "targets" length). Each group is then batched such that: - group_batch_size * length <= batch_size. - - Another way to view batch_size is the maximum number of tokens in each batch. - - Once batched, each element in the dataset will have the shape: - {"inputs": [group_batch_size, padded_input_length], - "targets": [group_batch_size, padded_target_length]} - Lengths are padded to the longest "inputs" or "targets" sequence in the batch - (padded_input_length and padded_target_length can be different). - - This batching scheme decreases the fraction of padding tokens per training - batch, thus improving the training speed significantly. - -2. Shuffling - - While training, the dataset is shuffled in two places in the code. The first - is the list of training files. Second, while reading records using - `parallel_interleave`, the `sloppy` argument is used to generate randomness - in the order of the examples. -""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import math -import os - -import tensorflow.compat.v1 as tf - -from official.utils.misc import model_helpers - -# Buffer size for reading records from a TFRecord file. Each training file is -# 7.2 MB, so 8 MB allows an entire file to be kept in memory. -_READ_RECORD_BUFFER = 8 * 1000 * 1000 - -# Example grouping constants. Defines length boundaries for each group. -# These values are the defaults used in Tensor2Tensor. -_MIN_BOUNDARY = 8 -_BOUNDARY_SCALE = 1.1 - - -def _load_records(filename): - """Read file and return a dataset of tf.Examples.""" - return tf.data.TFRecordDataset(filename, buffer_size=_READ_RECORD_BUFFER) - - -def _parse_example(serialized_example): - """Return inputs and targets Tensors from a serialized tf.Example.""" - data_fields = { - "inputs": tf.VarLenFeature(tf.int64), - "targets": tf.VarLenFeature(tf.int64) - } - parsed = tf.parse_single_example(serialized_example, data_fields) - inputs = tf.sparse_tensor_to_dense(parsed["inputs"]) - targets = tf.sparse_tensor_to_dense(parsed["targets"]) - return inputs, targets - - -def _filter_max_length(example, max_length=256): - """Indicates whether the example's length is lower than the maximum length.""" - return tf.logical_and(tf.size(example[0]) <= max_length, - tf.size(example[1]) <= max_length) - - -def _get_example_length(example): - """Returns the maximum length between the example inputs and targets.""" - length = tf.maximum(tf.shape(example[0])[0], tf.shape(example[1])[0]) - return length - - -def _create_min_max_boundaries( - max_length, min_boundary=_MIN_BOUNDARY, boundary_scale=_BOUNDARY_SCALE): - """Create min and max boundary lists up to max_length. - - For example, when max_length=24, min_boundary=4 and boundary_scale=2, the - returned values will be: - buckets_min = [0, 4, 8, 16, 24] - buckets_max = [4, 8, 16, 24, 25] - - Args: - max_length: The maximum length of example in dataset. - min_boundary: Minimum length in boundary. - boundary_scale: Amount to scale consecutive boundaries in the list. - - Returns: - min and max boundary lists - - """ - # Create bucket boundaries list by scaling the previous boundary or adding 1 - # (to ensure increasing boundary sizes). - bucket_boundaries = [] - x = min_boundary - while x < max_length: - bucket_boundaries.append(x) - x = max(x + 1, int(x * boundary_scale)) - - # Create min and max boundary lists from the initial list. - buckets_min = [0] + bucket_boundaries - buckets_max = bucket_boundaries + [max_length + 1] - return buckets_min, buckets_max - - -def _batch_examples(dataset, batch_size, max_length): - """Group examples by similar lengths, and return batched dataset. - - Each batch of similar-length examples are padded to the same length, and may - have different number of elements in each batch, such that: - group_batch_size * padded_length <= batch_size. - - This decreases the number of padding tokens per batch, which improves the - training speed. - - Args: - dataset: Dataset of unbatched examples. - batch_size: Max number of tokens per batch of examples. - max_length: Max number of tokens in an example input or target sequence. - - Returns: - Dataset of batched examples with similar lengths. - """ - # Get min and max boundary lists for each example. These are used to calculate - # the `bucket_id`, which is the index at which: - # buckets_min[bucket_id] <= len(example) < buckets_max[bucket_id] - # Note that using both min and max lists improves the performance. - buckets_min, buckets_max = _create_min_max_boundaries(max_length) - - # Create list of batch sizes for each bucket_id, so that - # bucket_batch_size[bucket_id] * buckets_max[bucket_id] <= batch_size - bucket_batch_sizes = [batch_size // x for x in buckets_max] - # bucket_id will be a tensor, so convert this list to a tensor as well. - bucket_batch_sizes = tf.constant(bucket_batch_sizes, dtype=tf.int64) - - def example_to_bucket_id(example_input, example_target): - """Return int64 bucket id for this example, calculated based on length.""" - seq_length = _get_example_length((example_input, example_target)) - - # TODO: investigate whether removing code branching improves performance. - conditions_c = tf.logical_and( - tf.less_equal(buckets_min, seq_length), - tf.less(seq_length, buckets_max)) - bucket_id = tf.reduce_min(tf.where(conditions_c)) - return bucket_id - - def window_size_fn(bucket_id): - """Return number of examples to be grouped when given a bucket id.""" - return bucket_batch_sizes[bucket_id] - - def batching_fn(bucket_id, grouped_dataset): - """Batch and add padding to a dataset of elements with similar lengths.""" - bucket_batch_size = window_size_fn(bucket_id) - - # Batch the dataset and add padding so that all input sequences in the - # examples have the same length, and all target sequences have the same - # lengths as well. Resulting lengths of inputs and targets can differ. - return grouped_dataset.padded_batch(bucket_batch_size, ([None], [None])) - - return dataset.apply(tf.data.experimental.group_by_window( - key_func=example_to_bucket_id, - reduce_func=batching_fn, - window_size=None, - window_size_func=window_size_fn)) - - -def _read_and_batch_from_files( - file_pattern, batch_size, max_length, num_parallel_calls, shuffle, repeat, - static_batch=False): - """Create dataset where each item is a dict of "inputs" and "targets". - - Args: - file_pattern: String used to match the input TFRecord files. - batch_size: Maximum number of tokens per batch of examples - max_length: Maximum number of tokens per example - num_parallel_calls: Number of cpu cores for parallel input processing. - shuffle: If true, randomizes order of elements. - repeat: Number of times to repeat the dataset. If None, the dataset is - repeated forever. - static_batch: Whether the batches in the dataset should have static shapes. - If True, the input is batched so that every batch has the - shape [batch_size // max_length, max_length]. If False, the input is - grouped by length, and batched so that batches may have different - shapes [N, M], where: - N * M <= batch_size - M <= max_length - In general, this setting should be False. Dynamic shapes allow the inputs - to be grouped so that the number of padding tokens is minimized, and helps - model training. In cases where the input shape must be static - (e.g. running on TPU), this setting should be set to True. - - Returns: - tf.data.Dataset object containing examples loaded from the files. - """ - dataset = tf.data.Dataset.list_files(file_pattern, shuffle=shuffle) - - # Read files and interleave results. When training, the order of the examples - # will be non-deterministic. - dataset = dataset.apply( - tf.data.experimental.parallel_interleave( - _load_records, sloppy=shuffle, cycle_length=num_parallel_calls)) - - # Parse each tf.Example into a dictionary - # TODO: Look into prefetch_input_elements for performance optimization. - dataset = dataset.map(_parse_example, - num_parallel_calls=num_parallel_calls) - - # Remove examples where the input or target length exceeds the maximum length, - dataset = dataset.filter(lambda x, y: _filter_max_length((x, y), max_length)) - - if static_batch: - dataset = dataset.padded_batch( - batch_size // max_length, ([max_length], [max_length]), - drop_remainder=True) - else: - # Group and batch such that each batch has examples of similar length. - dataset = _batch_examples(dataset, batch_size, max_length) - - dataset = dataset.repeat(repeat) - - # Prefetch the next element to improve speed of input pipeline. - dataset = dataset.prefetch(buffer_size=tf.data.experimental.AUTOTUNE) - return dataset - - -def _generate_synthetic_data(params): - """Create synthetic data based on the parameter batch size.""" - batch = length = int(math.sqrt(params["batch_size"])) - return model_helpers.generate_synthetic_data( - input_shape=tf.TensorShape([batch, length]), - input_value=1, - input_dtype=tf.int32, - label_shape=tf.TensorShape([batch, length]), - label_value=1, - label_dtype=tf.int32, - ) - - -def train_input_fn(params): - """Load and return dataset of batched examples for use during training.""" - file_pattern = os.path.join(params["data_dir"] or "", "*train*") - if params["use_synthetic_data"]: - return _generate_synthetic_data(params) - return _read_and_batch_from_files( - file_pattern, params["batch_size"], params["max_length"], - params["num_parallel_calls"], shuffle=True, - repeat=params["repeat_dataset"], static_batch=params["static_batch"]) - - -def eval_input_fn(params): - """Load and return dataset of batched examples for use during evaluation.""" - file_pattern = os.path.join(params["data_dir"] or "", "*dev*") - if params["use_synthetic_data"]: - return _generate_synthetic_data(params) - return _read_and_batch_from_files( - file_pattern, params["batch_size"], params["max_length"], - params["num_parallel_calls"], shuffle=False, repeat=1, - static_batch=params["static_batch"]) diff --git a/official/r1/transformer/embedding_layer.py b/official/r1/transformer/embedding_layer.py deleted file mode 100644 index 3ebedeafc..000000000 --- a/official/r1/transformer/embedding_layer.py +++ /dev/null @@ -1,108 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Implementation of embedding layer with shared weights.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import tensorflow.compat.v1 as tf # pylint: disable=g-bad-import-order - -from official.r1.utils import tpu as tpu_utils - - -class EmbeddingSharedWeights(tf.layers.Layer): - """Calculates input embeddings and pre-softmax linear with shared weights.""" - - def __init__(self, vocab_size, hidden_size, method="gather"): - """Specify characteristic parameters of embedding layer. - - Args: - vocab_size: Number of tokens in the embedding. (Typically ~32,000) - hidden_size: Dimensionality of the embedding. (Typically 512 or 1024) - method: Strategy for performing embedding lookup. "gather" uses tf.gather - which performs well on CPUs and GPUs, but very poorly on TPUs. "matmul" - one-hot encodes the indicies and formulates the embedding as a sparse - matrix multiplication. The matmul formulation is wasteful as it does - extra work, however matrix multiplication is very fast on TPUs which - makes "matmul" considerably faster than "gather" on TPUs. - """ - super(EmbeddingSharedWeights, self).__init__() - self.vocab_size = vocab_size - self.hidden_size = hidden_size - if method not in ("gather", "matmul"): - raise ValueError("method {} must be 'gather' or 'matmul'".format(method)) - self.method = method - - def build(self, _): - with tf.variable_scope("embedding_and_softmax", reuse=tf.AUTO_REUSE): - # Create and initialize weights. The random normal initializer was chosen - # randomly, and works well. - self.shared_weights = tf.get_variable( - "weights", [self.vocab_size, self.hidden_size], - initializer=tf.random_normal_initializer( - 0., self.hidden_size ** -0.5)) - - self.built = True - - def call(self, x): - """Get token embeddings of x. - - Args: - x: An int64 tensor with shape [batch_size, length] - Returns: - embeddings: float32 tensor with shape [batch_size, length, embedding_size] - padding: float32 tensor with shape [batch_size, length] indicating the - locations of the padding tokens in x. - """ - with tf.name_scope("embedding"): - # Create binary mask of size [batch_size, length] - mask = tf.to_float(tf.not_equal(x, 0)) - - if self.method == "gather": - embeddings = tf.gather(self.shared_weights, x) - embeddings *= tf.expand_dims(mask, -1) - else: # matmul - embeddings = tpu_utils.embedding_matmul( - embedding_table=self.shared_weights, - values=tf.cast(x, dtype=tf.int32), - mask=mask - ) - # embedding_matmul already zeros out masked positions, so - # `embeddings *= tf.expand_dims(mask, -1)` is unnecessary. - - - # Scale embedding by the sqrt of the hidden size - embeddings *= self.hidden_size ** 0.5 - - return embeddings - - - def linear(self, x): - """Computes logits by running x through a linear layer. - - Args: - x: A float32 tensor with shape [batch_size, length, hidden_size] - Returns: - float32 tensor with shape [batch_size, length, vocab_size]. - """ - with tf.name_scope("presoftmax_linear"): - batch_size = tf.shape(x)[0] - length = tf.shape(x)[1] - - x = tf.reshape(x, [-1, self.hidden_size]) - logits = tf.matmul(x, self.shared_weights, transpose_b=True) - - return tf.reshape(logits, [batch_size, length, self.vocab_size]) diff --git a/official/r1/transformer/ffn_layer.py b/official/r1/transformer/ffn_layer.py deleted file mode 100644 index fc4750323..000000000 --- a/official/r1/transformer/ffn_layer.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Implementation of fully connected network.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import tensorflow.compat.v1 as tf - - -class FeedFowardNetwork(tf.layers.Layer): - """Fully connected feedforward network.""" - - def __init__(self, hidden_size, filter_size, relu_dropout, train, allow_pad): - super(FeedFowardNetwork, self).__init__() - self.hidden_size = hidden_size - self.filter_size = filter_size - self.relu_dropout = relu_dropout - self.train = train - self.allow_pad = allow_pad - - self.filter_dense_layer = tf.layers.Dense( - filter_size, use_bias=True, activation=tf.nn.relu, name="filter_layer") - self.output_dense_layer = tf.layers.Dense( - hidden_size, use_bias=True, name="output_layer") - - def call(self, x, padding=None): - """Return outputs of the feedforward network. - - Args: - x: tensor with shape [batch_size, length, hidden_size] - padding: (optional) If set, the padding values are temporarily removed - from x (provided self.allow_pad is set). The padding values are placed - back in the output tensor in the same locations. - shape [batch_size, length] - - Returns: - Output of the feedforward network. - tensor with shape [batch_size, length, hidden_size] - """ - padding = None if not self.allow_pad else padding - - # Retrieve dynamically known shapes - batch_size = tf.shape(x)[0] - length = tf.shape(x)[1] - - if padding is not None: - with tf.name_scope("remove_padding"): - # Flatten padding to [batch_size*length] - pad_mask = tf.reshape(padding, [-1]) - - nonpad_ids = tf.to_int32(tf.where(pad_mask < 1e-9)) - - # Reshape x to [batch_size*length, hidden_size] to remove padding - x = tf.reshape(x, [-1, self.hidden_size]) - x = tf.gather_nd(x, indices=nonpad_ids) - - # Reshape x from 2 dimensions to 3 dimensions. - x.set_shape([None, self.hidden_size]) - x = tf.expand_dims(x, axis=0) - - output = self.filter_dense_layer(x) - if self.train: - output = tf.nn.dropout(output, 1.0 - self.relu_dropout) - output = self.output_dense_layer(output) - - if padding is not None: - with tf.name_scope("re_add_padding"): - output = tf.squeeze(output, axis=0) - output = tf.scatter_nd( - indices=nonpad_ids, - updates=output, - shape=[batch_size * length, self.hidden_size] - ) - output = tf.reshape(output, [batch_size, length, self.hidden_size]) - return output diff --git a/official/r1/transformer/schedule.py b/official/r1/transformer/schedule.py deleted file mode 100644 index 60aedffc1..000000000 --- a/official/r1/transformer/schedule.py +++ /dev/null @@ -1,130 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Abstract training on a step or epoch basis.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import math - -import tensorflow.compat.v1 as tf - - -_TRAIN, _EVAL = tf.estimator.ModeKeys.TRAIN, tf.estimator.ModeKeys.EVAL - - -NUM_EXAMPLES = { - tf.estimator.ModeKeys.TRAIN: 4572160, - # # Examples that are too long are filtered out, thus the total is less - # # than the total number of lines. - # 2399123 + # news-commentary-v12.de-en - # 1920209 + # commoncrawl.de-en - # 270769, # europarl-v7.de-en - tf.estimator.ModeKeys.EVAL: 3000, # newstest2013 -} - - -class Manager(object): - """Container for convenience functions to abstract step or epoch basis. - Transformer allows users to specify an epoch basis (generally recommended for - full training) or a number of steps basis (convenient since epochs are rather - large). TPUs furthermore require a step basis; however epochs are the norm in - the machine learning community and it is desirable to allow users to specify - epochs even when running with TPUS which requires behind the scenes - conversions. - This container simply groups what are largely mundane checks and conversions - rather than interspersing them throughout the run loop code. - """ - - def __init__(self, train_steps, steps_between_evals, train_epochs, - epochs_between_evals, default_train_epochs, batch_size, - max_length, use_tpu=False, num_tpu_shards=8): - if train_steps and train_epochs: - raise ValueError("Both train_steps or train_epochs were be defined.") - - # Determine training schedule based on flags. - if train_steps: - self.train_eval_iterations = train_steps // steps_between_evals - self._single_iteration_train_steps = steps_between_evals - self._single_iteration_train_epochs = None - else: - train_epochs = train_epochs or default_train_epochs - self.train_eval_iterations = train_epochs // epochs_between_evals - self._single_iteration_train_steps = None - self._single_iteration_train_epochs = epochs_between_evals - - self.max_length = max_length - self.batch_size = batch_size - self.use_tpu = use_tpu - self.num_tpu_shards = num_tpu_shards - - if self.use_tpu: - assert (self.batch_size // self.max_length) % self.num_tpu_shards == 0 - - @property - def single_iteration_train_steps(self): - if self._single_iteration_train_steps or not self.use_tpu: - return self._single_iteration_train_steps - - return self.epochs_to_steps( - num_epochs=self._single_iteration_train_epochs, mode=_TRAIN) - - @property - def single_iteration_eval_steps(self): - if not self.use_tpu: - return None - - return self.epochs_to_steps(num_epochs=1, mode=_EVAL) - - @property - def train_increment_str(self): - if self._single_iteration_train_steps: - return "{} steps.".format(self._single_iteration_train_steps) - - if not self.use_tpu: - return "{} epochs.".format(self._single_iteration_train_epochs) - - return "~{} epochs. ({} steps)".format( - self._single_iteration_train_epochs, - self.single_iteration_train_steps) - - @property - def repeat_dataset(self): - if (self._single_iteration_train_epochs is None and - self._single_iteration_train_steps > NUM_EXAMPLES[_TRAIN]): - return math.ceil(self._single_iteration_train_steps / - NUM_EXAMPLES[_TRAIN]) - return self._single_iteration_train_epochs - - def epochs_to_steps(self, num_epochs, mode): - """Converts a number of epochs to a number of training steps. - - TPU only: This function assumes that static_batch is True. - - TPU can not tolerate an OutOfRange error from a dataset. As a result the - number of examples to be processed must be known ahead of time. TPUs also - do not allow partial batches, so this function rounds down. - - Args: - num_epochs: An integer of the number of epochs to convert to steps. - mode: The estimator ModeKey of the computation - - Returns: - An integer of the number of equivalent steps rounded down. - """ - assert self.use_tpu, "epochs_to_steps should only be reached when using TPU" - total_num_tokens = NUM_EXAMPLES[mode] * self.max_length * num_epochs - return total_num_tokens // self.batch_size diff --git a/official/r1/transformer/schedule_test.py b/official/r1/transformer/schedule_test.py deleted file mode 100644 index 29b4d5fde..000000000 --- a/official/r1/transformer/schedule_test.py +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Test Transformer's schedule manager.""" - -import tensorflow.compat.v1 as tf - -from official.r1.transformer import schedule - - -class ScheduleBaseTester(tf.test.TestCase): - def test_mutual_exclusivity(self): - with self.assertRaises(ValueError): - schedule.Manager( - train_steps=100, steps_between_evals=100, train_epochs=2, - epochs_between_evals=1, default_train_epochs=None, batch_size=2048, - max_length=256) - - def test_step_basis(self): - manager = schedule.Manager( - train_steps=1000, steps_between_evals=100, train_epochs=None, - epochs_between_evals=None, default_train_epochs=None, batch_size=2048, - max_length=256) - - self.assertEqual(manager.single_iteration_train_steps, 100) - - # Evaluation uses the full set - self.assertIsNone(manager.single_iteration_eval_steps) - - self.assertIsNone(manager.repeat_dataset) - - def test_epoch_basis(self): - manager = schedule.Manager( - train_steps=None, steps_between_evals=None, train_epochs=10, - epochs_between_evals=2, default_train_epochs=None, batch_size=2048, - max_length=256) - - # For non-TPU, estimator relies on dataset exhausion - self.assertIsNone(manager.single_iteration_train_steps) - self.assertIsNone(manager.single_iteration_eval_steps) - - self.assertEqual(manager.repeat_dataset, 2) - - def test_step_basis_tpu(self): - manager = schedule.Manager( - train_steps=1000, steps_between_evals=100, train_epochs=None, - epochs_between_evals=None, default_train_epochs=None, batch_size=2048, - max_length=256, use_tpu=True) - - self.assertEqual(manager.single_iteration_train_steps, 100) - # num_eval_examples / (batch_size / max_length) == 3000 / (2048 / 256) - self.assertEqual(manager.single_iteration_eval_steps, 375) - self.assertIsNone(manager.repeat_dataset) - - def test_epoch_basis_tpu(self): - manager = schedule.Manager( - train_steps=None, steps_between_evals=None, train_epochs=10, - epochs_between_evals=2, default_train_epochs=None, batch_size=2048, - max_length=256, use_tpu=True) - - self.assertEqual( - manager.single_iteration_train_steps, - schedule.NUM_EXAMPLES[tf.estimator.ModeKeys.TRAIN] * 2 // (2048 / 256) - ) - - # num_eval_examples / (batch_size / max_length) == 3000 / (2048 / 256) - self.assertEqual(manager.single_iteration_eval_steps, 375) - - self.assertEqual(manager.repeat_dataset, 2) - - -if __name__ == "__main__": - tf.test.main() diff --git a/official/r1/transformer/transformer.py b/official/r1/transformer/transformer.py deleted file mode 100644 index 708c3dd92..000000000 --- a/official/r1/transformer/transformer.py +++ /dev/null @@ -1,417 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Defines the Transformer model, and its encoder and decoder stacks. - -Model paper: https://arxiv.org/pdf/1706.03762.pdf -Transformer model code source: https://github.com/tensorflow/tensor2tensor -""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import tensorflow.compat.v1 as tf - -from official.nlp.transformer import beam_search_v1 as beam_search -from official.nlp.transformer import model_utils -from official.nlp.transformer.utils.tokenizer import EOS_ID -from official.r1.transformer import attention_layer -from official.r1.transformer import embedding_layer -from official.r1.transformer import ffn_layer - -_NEG_INF = -1e9 - - -class Transformer(object): - """Transformer model for sequence to sequence data. - - Implemented as described in: https://arxiv.org/pdf/1706.03762.pdf - - The Transformer model consists of an encoder and decoder. The input is an int - sequence (or a batch of sequences). The encoder produces a continous - representation, and the decoder uses the encoder output to generate - probabilities for the output sequence. - """ - - def __init__(self, params, train): - """Initialize layers to build Transformer model. - - Args: - params: hyperparameter object defining layer sizes, dropout values, etc. - train: boolean indicating whether the model is in training mode. Used to - determine if dropout layers should be added. - """ - self.train = train - self.params = params - - self.embedding_softmax_layer = embedding_layer.EmbeddingSharedWeights( - params["vocab_size"], params["hidden_size"], - method="matmul" if params["tpu"] else "gather") - self.encoder_stack = EncoderStack(params, train) - self.decoder_stack = DecoderStack(params, train) - - def __call__(self, inputs, targets=None): - """Calculate target logits or inferred target sequences. - - Args: - inputs: int tensor with shape [batch_size, input_length]. - targets: None or int tensor with shape [batch_size, target_length]. - - Returns: - If targets is defined, then return logits for each word in the target - sequence. float tensor with shape [batch_size, target_length, vocab_size] - If target is none, then generate output sequence one token at a time. - returns a dictionary { - output: [batch_size, decoded length] - score: [batch_size, float]} - """ - # Variance scaling is used here because it seems to work in many problems. - # Other reasonable initializers may also work just as well. - initializer = tf.variance_scaling_initializer( - self.params["initializer_gain"], mode="fan_avg", distribution="uniform") - with tf.variable_scope("Transformer", initializer=initializer): - # Calculate attention bias for encoder self-attention and decoder - # multi-headed attention layers. - attention_bias = model_utils.get_padding_bias(inputs) - - # Run the inputs through the encoder layer to map the symbol - # representations to continuous representations. - encoder_outputs = self.encode(inputs, attention_bias) - - # Generate output sequence if targets is None, or return logits if target - # sequence is known. - if targets is None: - return self.predict(encoder_outputs, attention_bias) - else: - logits = self.decode(targets, encoder_outputs, attention_bias) - return logits - - def encode(self, inputs, attention_bias): - """Generate continuous representation for inputs. - - Args: - inputs: int tensor with shape [batch_size, input_length]. - attention_bias: float tensor with shape [batch_size, 1, 1, input_length] - - Returns: - float tensor with shape [batch_size, input_length, hidden_size] - """ - with tf.name_scope("encode"): - # Prepare inputs to the layer stack by adding positional encodings and - # applying dropout. - embedded_inputs = self.embedding_softmax_layer(inputs) - inputs_padding = model_utils.get_padding(inputs) - - with tf.name_scope("add_pos_encoding"): - length = tf.shape(embedded_inputs)[1] - pos_encoding = model_utils.get_position_encoding( - length, self.params["hidden_size"]) - encoder_inputs = embedded_inputs + pos_encoding - - if self.train: - encoder_inputs = tf.nn.dropout( - encoder_inputs, 1 - self.params["layer_postprocess_dropout"]) - - return self.encoder_stack(encoder_inputs, attention_bias, inputs_padding) - - def decode(self, targets, encoder_outputs, attention_bias): - """Generate logits for each value in the target sequence. - - Args: - targets: target values for the output sequence. - int tensor with shape [batch_size, target_length] - encoder_outputs: continuous representation of input sequence. - float tensor with shape [batch_size, input_length, hidden_size] - attention_bias: float tensor with shape [batch_size, 1, 1, input_length] - - Returns: - float32 tensor with shape [batch_size, target_length, vocab_size] - """ - with tf.name_scope("decode"): - # Prepare inputs to decoder layers by shifting targets, adding positional - # encoding and applying dropout. - decoder_inputs = self.embedding_softmax_layer(targets) - with tf.name_scope("shift_targets"): - # Shift targets to the right, and remove the last element - decoder_inputs = tf.pad( - decoder_inputs, [[0, 0], [1, 0], [0, 0]])[:, :-1, :] - with tf.name_scope("add_pos_encoding"): - length = tf.shape(decoder_inputs)[1] - decoder_inputs += model_utils.get_position_encoding( - length, self.params["hidden_size"]) - if self.train: - decoder_inputs = tf.nn.dropout( - decoder_inputs, 1 - self.params["layer_postprocess_dropout"]) - - # Run values - decoder_self_attention_bias = model_utils.get_decoder_self_attention_bias( - length) - outputs = self.decoder_stack( - decoder_inputs, encoder_outputs, decoder_self_attention_bias, - attention_bias) - logits = self.embedding_softmax_layer.linear(outputs) - return logits - - def _get_symbols_to_logits_fn(self, max_decode_length): - """Returns a decoding function that calculates logits of the next tokens.""" - - timing_signal = model_utils.get_position_encoding( - max_decode_length + 1, self.params["hidden_size"]) - decoder_self_attention_bias = model_utils.get_decoder_self_attention_bias( - max_decode_length) - - def symbols_to_logits_fn(ids, i, cache): - """Generate logits for next potential IDs. - - Args: - ids: Current decoded sequences. - int tensor with shape [batch_size * beam_size, i + 1] - i: Loop index - cache: dictionary of values storing the encoder output, encoder-decoder - attention bias, and previous decoder attention values. - - Returns: - Tuple of - (logits with shape [batch_size * beam_size, vocab_size], - updated cache values) - """ - # Set decoder input to the last generated IDs - decoder_input = ids[:, -1:] - - # Preprocess decoder input by getting embeddings and adding timing signal. - decoder_input = self.embedding_softmax_layer(decoder_input) - decoder_input += timing_signal[i:i + 1] - - self_attention_bias = decoder_self_attention_bias[:, :, i:i + 1, :i + 1] - decoder_outputs = self.decoder_stack( - decoder_input, cache.get("encoder_outputs"), self_attention_bias, - cache.get("encoder_decoder_attention_bias"), cache) - logits = self.embedding_softmax_layer.linear(decoder_outputs) - logits = tf.squeeze(logits, axis=[1]) - return logits, cache - return symbols_to_logits_fn - - def predict(self, encoder_outputs, encoder_decoder_attention_bias): - """Return predicted sequence.""" - batch_size = tf.shape(encoder_outputs)[0] - input_length = tf.shape(encoder_outputs)[1] - max_decode_length = input_length + self.params["extra_decode_length"] - - symbols_to_logits_fn = self._get_symbols_to_logits_fn(max_decode_length) - - # Create initial set of IDs that will be passed into symbols_to_logits_fn. - initial_ids = tf.zeros([batch_size], dtype=tf.int32) - - # Create cache storing decoder attention values for each layer. - cache = { - "layer_%d" % layer: { - "k": tf.zeros([batch_size, 0, self.params["hidden_size"]]), - "v": tf.zeros([batch_size, 0, self.params["hidden_size"]]), - } for layer in range(self.params["num_hidden_layers"])} - - # Add encoder output and attention bias to the cache. - cache["encoder_outputs"] = encoder_outputs - cache["encoder_decoder_attention_bias"] = encoder_decoder_attention_bias - - # Use beam search to find the top beam_size sequences and scores. - decoded_ids, scores = beam_search.sequence_beam_search( - symbols_to_logits_fn=symbols_to_logits_fn, - initial_ids=initial_ids, - initial_cache=cache, - vocab_size=self.params["vocab_size"], - beam_size=self.params["beam_size"], - alpha=self.params["alpha"], - max_decode_length=max_decode_length, - eos_id=EOS_ID) - - # Get the top sequence for each batch element - top_decoded_ids = decoded_ids[:, 0, 1:] - top_scores = scores[:, 0] - - return {"outputs": top_decoded_ids, "scores": top_scores} - - -class LayerNormalization(tf.layers.Layer): - """Applies layer normalization.""" - - def __init__(self, hidden_size): - super(LayerNormalization, self).__init__() - self.hidden_size = hidden_size - - def build(self, _): - self.scale = tf.get_variable("layer_norm_scale", [self.hidden_size], - initializer=tf.ones_initializer()) - self.bias = tf.get_variable("layer_norm_bias", [self.hidden_size], - initializer=tf.zeros_initializer()) - self.built = True - - def call(self, x, epsilon=1e-6): - mean = tf.reduce_mean(x, axis=[-1], keepdims=True) - variance = tf.reduce_mean(tf.square(x - mean), axis=[-1], keepdims=True) - norm_x = (x - mean) * tf.rsqrt(variance + epsilon) - return norm_x * self.scale + self.bias - - -class PrePostProcessingWrapper(object): - """Wrapper class that applies layer pre-processing and post-processing.""" - - def __init__(self, layer, params, train): - self.layer = layer - self.postprocess_dropout = params["layer_postprocess_dropout"] - self.train = train - - # Create normalization layer - self.layer_norm = LayerNormalization(params["hidden_size"]) - - def __call__(self, x, *args, **kwargs): - # Preprocessing: apply layer normalization - y = self.layer_norm(x) - - # Get layer output - y = self.layer(y, *args, **kwargs) - - # Postprocessing: apply dropout and residual connection - if self.train: - y = tf.nn.dropout(y, 1 - self.postprocess_dropout) - return x + y - - -class EncoderStack(tf.layers.Layer): - """Transformer encoder stack. - - The encoder stack is made up of N identical layers. Each layer is composed - of the sublayers: - 1. Self-attention layer - 2. Feedforward network (which is 2 fully-connected layers) - """ - - def __init__(self, params, train): - super(EncoderStack, self).__init__() - self.layers = [] - for _ in range(params["num_hidden_layers"]): - # Create sublayers for each layer. - self_attention_layer = attention_layer.SelfAttention( - params["hidden_size"], params["num_heads"], - params["attention_dropout"], train) - feed_forward_network = ffn_layer.FeedFowardNetwork( - params["hidden_size"], params["filter_size"], - params["relu_dropout"], train, params["allow_ffn_pad"]) - - self.layers.append([ - PrePostProcessingWrapper(self_attention_layer, params, train), - PrePostProcessingWrapper(feed_forward_network, params, train)]) - - # Create final layer normalization layer. - self.output_normalization = LayerNormalization(params["hidden_size"]) - - def call(self, encoder_inputs, attention_bias, inputs_padding): - """Return the output of the encoder layer stacks. - - Args: - encoder_inputs: tensor with shape [batch_size, input_length, hidden_size] - attention_bias: bias for the encoder self-attention layer. - [batch_size, 1, 1, input_length] - inputs_padding: P - - Returns: - Output of encoder layer stack. - float32 tensor with shape [batch_size, input_length, hidden_size] - """ - for n, layer in enumerate(self.layers): - # Run inputs through the sublayers. - self_attention_layer = layer[0] - feed_forward_network = layer[1] - - with tf.variable_scope("layer_%d" % n): - with tf.variable_scope("self_attention"): - encoder_inputs = self_attention_layer(encoder_inputs, attention_bias) - with tf.variable_scope("ffn"): - encoder_inputs = feed_forward_network(encoder_inputs, inputs_padding) - - return self.output_normalization(encoder_inputs) - - -class DecoderStack(tf.layers.Layer): - """Transformer decoder stack. - - Like the encoder stack, the decoder stack is made up of N identical layers. - Each layer is composed of the sublayers: - 1. Self-attention layer - 2. Multi-headed attention layer combining encoder outputs with results from - the previous self-attention layer. - 3. Feedforward network (2 fully-connected layers) - """ - - def __init__(self, params, train): - super(DecoderStack, self).__init__() - self.layers = [] - for _ in range(params["num_hidden_layers"]): - self_attention_layer = attention_layer.SelfAttention( - params["hidden_size"], params["num_heads"], - params["attention_dropout"], train) - enc_dec_attention_layer = attention_layer.Attention( - params["hidden_size"], params["num_heads"], - params["attention_dropout"], train) - feed_forward_network = ffn_layer.FeedFowardNetwork( - params["hidden_size"], params["filter_size"], - params["relu_dropout"], train, params["allow_ffn_pad"]) - - self.layers.append([ - PrePostProcessingWrapper(self_attention_layer, params, train), - PrePostProcessingWrapper(enc_dec_attention_layer, params, train), - PrePostProcessingWrapper(feed_forward_network, params, train)]) - - self.output_normalization = LayerNormalization(params["hidden_size"]) - - def call(self, decoder_inputs, encoder_outputs, decoder_self_attention_bias, - attention_bias, cache=None): - """Return the output of the decoder layer stacks. - - Args: - decoder_inputs: tensor with shape [batch_size, target_length, hidden_size] - encoder_outputs: tensor with shape [batch_size, input_length, hidden_size] - decoder_self_attention_bias: bias for decoder self-attention layer. - [1, 1, target_len, target_length] - attention_bias: bias for encoder-decoder attention layer. - [batch_size, 1, 1, input_length] - cache: (Used for fast decoding) A nested dictionary storing previous - decoder self-attention values. The items are: - {layer_n: {"k": tensor with shape [batch_size, i, key_channels], - "v": tensor with shape [batch_size, i, value_channels]}, - ...} - - Returns: - Output of decoder layer stack. - float32 tensor with shape [batch_size, target_length, hidden_size] - """ - for n, layer in enumerate(self.layers): - self_attention_layer = layer[0] - enc_dec_attention_layer = layer[1] - feed_forward_network = layer[2] - - # Run inputs through the sublayers. - layer_name = "layer_%d" % n - layer_cache = cache[layer_name] if cache is not None else None - with tf.variable_scope(layer_name): - with tf.variable_scope("self_attention"): - decoder_inputs = self_attention_layer( - decoder_inputs, decoder_self_attention_bias, cache=layer_cache) - with tf.variable_scope("encdec_attention"): - decoder_inputs = enc_dec_attention_layer( - decoder_inputs, encoder_outputs, attention_bias) - with tf.variable_scope("ffn"): - decoder_inputs = feed_forward_network(decoder_inputs) - - return self.output_normalization(decoder_inputs) diff --git a/official/r1/transformer/transformer_main.py b/official/r1/transformer/transformer_main.py deleted file mode 100644 index eb2c2f948..000000000 --- a/official/r1/transformer/transformer_main.py +++ /dev/null @@ -1,710 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Train and evaluate the Transformer model. - -See README for description of setting the training schedule and evaluating the -BLEU score. -""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import os -import tempfile - -# pylint: disable=g-bad-import-order -from six.moves import xrange # pylint: disable=redefined-builtin -from absl import app as absl_app -from absl import flags -import tensorflow.compat.v1 as tf -# pylint: enable=g-bad-import-order - -from official.nlp.transformer import model_params -from official.r1.utils import export -from official.r1.utils import tpu as tpu_util -from official.r1.transformer import translate -from official.r1.transformer import transformer -from official.r1.transformer import dataset -from official.r1.transformer import schedule -from official.nlp.transformer import compute_bleu -from official.nlp.transformer.utils import metrics -from official.nlp.transformer.utils import tokenizer -from official.utils.flags import core as flags_core -from official.r1.utils.logs import hooks_helper -from official.r1.utils.logs import logger -from official.utils.misc import distribution_utils -from official.utils.misc import model_helpers - -PARAMS_MAP = { - "tiny": model_params.TINY_PARAMS, - "base": model_params.BASE_PARAMS, - "big": model_params.BIG_PARAMS, -} - - -DEFAULT_TRAIN_EPOCHS = 10 -INF = 1000000000 # 1e9 -BLEU_DIR = "bleu" - -# Dictionary containing tensors that are logged by the logging hooks. Each item -# maps a string to the tensor name. -TENSORS_TO_LOG = { - "learning_rate": "model/get_train_op/learning_rate/learning_rate", - "cross_entropy_loss": "model/cross_entropy"} - - -def model_fn(features, labels, mode, params): - """Defines how to train, evaluate and predict from the transformer model.""" - with tf.variable_scope("model"): - inputs, targets = features, labels - - # Create model and get output logits. - model = transformer.Transformer(params, mode == tf.estimator.ModeKeys.TRAIN) - - logits = model(inputs, targets) - - # When in prediction mode, the labels/targets is None. The model output - # is the prediction - if mode == tf.estimator.ModeKeys.PREDICT: - if params["use_tpu"]: - raise NotImplementedError("Prediction is not yet supported on TPUs.") - return tf.estimator.EstimatorSpec( - tf.estimator.ModeKeys.PREDICT, - predictions=logits, - export_outputs={ - "translate": tf.estimator.export.PredictOutput(logits) - }) - - # Explicitly set the shape of the logits for XLA (TPU). This is needed - # because the logits are passed back to the host VM CPU for metric - # evaluation, and the shape of [?, ?, vocab_size] is too vague. However - # it is known from Transformer that the first two dimensions of logits - # are the dimensions of targets. Note that the ambiguous shape of logits is - # not a problem when computing xentropy, because padded_cross_entropy_loss - # resolves the shape on the TPU. - logits.set_shape(targets.shape.as_list() + logits.shape.as_list()[2:]) - - # Calculate model loss. - # xentropy contains the cross entropy loss of every nonpadding token in the - # targets. - xentropy, weights = metrics.padded_cross_entropy_loss( - logits, targets, params["label_smoothing"], params["vocab_size"]) - loss = tf.reduce_sum(xentropy) / tf.reduce_sum(weights) - - # Save loss as named tensor that will be logged with the logging hook. - tf.identity(loss, "cross_entropy") - - if mode == tf.estimator.ModeKeys.EVAL: - if params["use_tpu"]: - # host call functions should only have tensors as arguments. - # This lambda pre-populates params so that metric_fn is - # TPUEstimator compliant. - metric_fn = lambda logits, labels: ( - metrics.get_eval_metrics(logits, labels, params=params)) - eval_metrics = (metric_fn, [logits, labels]) - return tf.estimator.tpu.TPUEstimatorSpec( - mode=mode, - loss=loss, - predictions={"predictions": logits}, - eval_metrics=eval_metrics) - return tf.estimator.EstimatorSpec( - mode=mode, loss=loss, predictions={"predictions": logits}, - eval_metric_ops=metrics.get_eval_metrics(logits, labels, params)) - else: - train_op, metric_dict = get_train_op_and_metrics(loss, params) - - # Epochs can be quite long. This gives some intermediate information - # in TensorBoard. - metric_dict["minibatch_loss"] = loss - if params["use_tpu"]: - return tf.estimator.tpu.TPUEstimatorSpec( - mode=mode, - loss=loss, - train_op=train_op, - host_call=tpu_util.construct_scalar_host_call( - metric_dict=metric_dict, - model_dir=params["model_dir"], - prefix="training/")) - record_scalars(metric_dict) - return tf.estimator.EstimatorSpec(mode=mode, loss=loss, train_op=train_op) - - -def record_scalars(metric_dict): - for key, value in metric_dict.items(): - tf.summary.scalar(name=key, tensor=value) - - -def get_learning_rate(learning_rate, hidden_size, learning_rate_warmup_steps): - """Calculate learning rate with linear warmup and rsqrt decay.""" - with tf.name_scope("learning_rate"): - warmup_steps = tf.to_float(learning_rate_warmup_steps) - step = tf.to_float(tf.train.get_or_create_global_step()) - - learning_rate *= (hidden_size ** -0.5) - # Apply linear warmup - learning_rate *= tf.minimum(1.0, step / warmup_steps) - # Apply rsqrt decay - learning_rate *= tf.rsqrt(tf.maximum(step, warmup_steps)) - - # Create a named tensor that will be logged using the logging hook. - # The full name includes variable and names scope. In this case, the name - # is model/get_train_op/learning_rate/learning_rate - tf.identity(learning_rate, "learning_rate") - - return learning_rate - - -def get_train_op_and_metrics(loss, params): - """Generate training op and metrics to save in TensorBoard.""" - with tf.variable_scope("get_train_op"): - learning_rate = get_learning_rate( - learning_rate=params["learning_rate"], - hidden_size=params["hidden_size"], - learning_rate_warmup_steps=params["learning_rate_warmup_steps"]) - - # Create optimizer. Use LazyAdamOptimizer from TF contrib, which is faster - # than the TF core Adam optimizer. - from tensorflow.contrib import opt as contrib_opt # pylint: disable=g-import-not-at-top - optimizer = contrib_opt.LazyAdamOptimizer( - learning_rate, - beta1=params["optimizer_adam_beta1"], - beta2=params["optimizer_adam_beta2"], - epsilon=params["optimizer_adam_epsilon"]) - - if params["use_tpu"] and params["tpu"] != tpu_util.LOCAL: - optimizer = tf.compat.v1.tpu.CrossShardOptimizer(optimizer) - - # Uses automatic mixed precision FP16 training if on GPU. - if params["dtype"] == "fp16": - optimizer = tf.train.experimental.enable_mixed_precision_graph_rewrite( - optimizer) - - # Calculate and apply gradients using LazyAdamOptimizer. - global_step = tf.train.get_global_step() - tvars = tf.trainable_variables() - gradients = optimizer.compute_gradients( - loss, tvars, colocate_gradients_with_ops=True) - minimize_op = optimizer.apply_gradients( - gradients, global_step=global_step, name="train") - update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) - train_op = tf.group(minimize_op, update_ops) - - train_metrics = {"learning_rate": learning_rate} - - if not params["use_tpu"]: - # gradient norm is not included as a summary when running on TPU, as - # it can cause instability between the TPU and the host controller. - gradient_norm = tf.global_norm(list(zip(*gradients))[0]) - train_metrics["global_norm/gradient_norm"] = gradient_norm - - return train_op, train_metrics - - -def translate_and_compute_bleu(estimator, subtokenizer, bleu_source, bleu_ref): - """Translate file and report the cased and uncased bleu scores.""" - # Create temporary file to store translation. - tmp = tempfile.NamedTemporaryFile(delete=False) - tmp_filename = tmp.name - - translate.translate_file( - estimator, subtokenizer, bleu_source, output_file=tmp_filename, - print_all_translations=False) - - # Compute uncased and cased bleu scores. - uncased_score = compute_bleu.bleu_wrapper(bleu_ref, tmp_filename, False) - cased_score = compute_bleu.bleu_wrapper(bleu_ref, tmp_filename, True) - os.remove(tmp_filename) - return uncased_score, cased_score - - -def get_global_step(estimator): - """Return estimator's last checkpoint.""" - return int(estimator.latest_checkpoint().split("-")[-1]) - - -def evaluate_and_log_bleu(estimator, bleu_source, bleu_ref, vocab_file): - """Calculate and record the BLEU score.""" - subtokenizer = tokenizer.Subtokenizer(vocab_file) - - uncased_score, cased_score = translate_and_compute_bleu( - estimator, subtokenizer, bleu_source, bleu_ref) - - tf.logging.info("Bleu score (uncased): %f", uncased_score) - tf.logging.info("Bleu score (cased): %f", cased_score) - return uncased_score, cased_score - - -def _validate_file(filepath): - """Make sure that file exists.""" - if not tf.io.gfile.exists(filepath): - raise tf.errors.NotFoundError(None, None, "File %s not found." % filepath) - - -def run_loop( - estimator, schedule_manager, train_hooks=None, benchmark_logger=None, - bleu_source=None, bleu_ref=None, bleu_threshold=None, vocab_file=None): - """Train and evaluate model, and optionally compute model's BLEU score. - - **Step vs. Epoch vs. Iteration** - - Steps and epochs are canonical terms used in TensorFlow and general machine - learning. They are used to describe running a single process (train/eval): - - Step refers to running the process through a single or batch of examples. - - Epoch refers to running the process through an entire dataset. - - E.g. training a dataset with 100 examples. The dataset is - divided into 20 batches with 5 examples per batch. A single training step - trains the model on one batch. After 20 training steps, the model will have - trained on every batch in the dataset, or, in other words, one epoch. - - Meanwhile, iteration is used in this implementation to describe running - multiple processes (training and eval). - - A single iteration: - 1. trains the model for a specific number of steps or epochs. - 2. evaluates the model. - 3. (if source and ref files are provided) compute BLEU score. - - This function runs through multiple train+eval+bleu iterations. - - Args: - estimator: tf.Estimator containing model to train. - schedule_manager: A schedule.Manager object to guide the run loop. - train_hooks: List of hooks to pass to the estimator during training. - benchmark_logger: a BenchmarkLogger object that logs evaluation data - bleu_source: File containing text to be translated for BLEU calculation. - bleu_ref: File containing reference translations for BLEU calculation. - bleu_threshold: minimum BLEU score before training is stopped. - vocab_file: Path to vocab file that will be used to subtokenize bleu_source. - - Returns: - Dict of results of the run. Contains the keys `eval_results`, - `train_hooks`, `bleu_cased`, and `bleu_uncased`. `train_hooks` is a list the - instances of hooks used during training. - - Raises: - ValueError: if both or none of single_iteration_train_steps and - single_iteration_train_epochs were defined. - NotFoundError: if the vocab file or bleu files don't exist. - """ - if bleu_source: - _validate_file(bleu_source) - if bleu_ref: - _validate_file(bleu_ref) - if vocab_file: - _validate_file(vocab_file) - - evaluate_bleu = bleu_source is not None and bleu_ref is not None - if evaluate_bleu and schedule_manager.use_tpu: - raise ValueError("BLEU score can not be computed when training with a TPU, " - "as it requires estimator.predict which is not yet " - "supported.") - - # Print details of training schedule. - tf.logging.info("Training schedule:") - tf.logging.info( - "\t1. Train for {}".format(schedule_manager.train_increment_str)) - tf.logging.info("\t2. Evaluate model.") - if evaluate_bleu: - tf.logging.info("\t3. Compute BLEU score.") - if bleu_threshold is not None: - tf.logging.info("Repeat above steps until the BLEU score reaches %f" % - bleu_threshold) - if not evaluate_bleu or bleu_threshold is None: - tf.logging.info("Repeat above steps %d times." % - schedule_manager.train_eval_iterations) - - if evaluate_bleu: - # Create summary writer to log bleu score (values can be displayed in - # Tensorboard). - bleu_writer = tf.summary.FileWriter( - os.path.join(estimator.model_dir, BLEU_DIR)) - if bleu_threshold is not None: - # Change loop stopping condition if bleu_threshold is defined. - schedule_manager.train_eval_iterations = INF - - # Loop training/evaluation/bleu cycles - stats = {} - for i in xrange(schedule_manager.train_eval_iterations): - tf.logging.info("Starting iteration %d" % (i + 1)) - - # Train the model for single_iteration_train_steps or until the input fn - # runs out of examples (if single_iteration_train_steps is None). - estimator.train( - dataset.train_input_fn, - steps=schedule_manager.single_iteration_train_steps, - hooks=train_hooks) - - eval_results = None - eval_results = estimator.evaluate( - input_fn=dataset.eval_input_fn, - steps=schedule_manager.single_iteration_eval_steps) - - tf.logging.info("Evaluation results (iter %d/%d):" % - (i + 1, schedule_manager.train_eval_iterations)) - tf.logging.info(eval_results) - benchmark_logger.log_evaluation_result(eval_results) - - # The results from estimator.evaluate() are measured on an approximate - # translation, which utilize the target golden values provided. The actual - # bleu score must be computed using the estimator.predict() path, which - # outputs translations that are not based on golden values. The translations - # are compared to reference file to get the actual bleu score. - if evaluate_bleu: - uncased_score, cased_score = evaluate_and_log_bleu( - estimator, bleu_source, bleu_ref, vocab_file) - - stats["bleu_uncased"] = uncased_score - stats["bleu_cased"] = cased_score - - # Write actual bleu scores using summary writer and benchmark logger - global_step = get_global_step(estimator) - summary = tf.Summary(value=[ - tf.Summary.Value(tag="bleu/uncased", simple_value=uncased_score), - tf.Summary.Value(tag="bleu/cased", simple_value=cased_score), - ]) - bleu_writer.add_summary(summary, global_step) - bleu_writer.flush() - benchmark_logger.log_metric( - "bleu_uncased", uncased_score, global_step=global_step) - benchmark_logger.log_metric( - "bleu_cased", cased_score, global_step=global_step) - - # Stop training if bleu stopping threshold is met. - if model_helpers.past_stop_threshold(bleu_threshold, uncased_score): - bleu_writer.close() - break - - stats["eval_results"] = eval_results - stats["train_hooks"] = train_hooks - - return stats - - -def define_transformer_flags(): - """Add flags and flag validators for running transformer_main.""" - # Add common flags (data_dir, model_dir, train_epochs, etc.). - flags.DEFINE_integer( - name="max_length", short_name="ml", default=None, - help=flags_core.help_wrap("Max length.")) - - flags_core.define_base(clean=True, train_epochs=True, - epochs_between_evals=True, stop_threshold=True, - num_gpu=True, hooks=True, export_dir=True, - distribution_strategy=True) - flags_core.define_performance( - num_parallel_calls=True, - inter_op=False, - intra_op=False, - synthetic_data=True, - max_train_steps=False, - dtype=True, - all_reduce_alg=True - ) - flags_core.define_benchmark() - flags_core.define_device(tpu=True) - - # Set flags from the flags_core module as "key flags" so they're listed when - # the '-h' flag is used. Without this line, the flags defined above are - # only shown in the full `--helpful` help text. - flags.adopt_module_key_flags(flags_core) - - # Add transformer-specific flags - flags.DEFINE_enum( - name="param_set", short_name="mp", default="big", - enum_values=PARAMS_MAP.keys(), - help=flags_core.help_wrap( - "Parameter set to use when creating and training the model. The " - "parameters define the input shape (batch size and max length), " - "model configuration (size of embedding, # of hidden layers, etc.), " - "and various other settings. The big parameter set increases the " - "default batch size, embedding/hidden size, and filter size. For a " - "complete list of parameters, please see model/model_params.py.")) - - flags.DEFINE_bool( - name="static_batch", default=False, - help=flags_core.help_wrap( - "Whether the batches in the dataset should have static shapes. In " - "general, this setting should be False. Dynamic shapes allow the " - "inputs to be grouped so that the number of padding tokens is " - "minimized, and helps model training. In cases where the input shape " - "must be static (e.g. running on TPU), this setting will be ignored " - "and static batching will always be used.")) - - # Flags for training with steps (may be used for debugging) - flags.DEFINE_integer( - name="train_steps", short_name="ts", default=None, - help=flags_core.help_wrap("The number of steps used to train.")) - flags.DEFINE_integer( - name="steps_between_evals", short_name="sbe", default=1000, - help=flags_core.help_wrap( - "The Number of training steps to run between evaluations. This is " - "used if --train_steps is defined.")) - - # BLEU score computation - flags.DEFINE_string( - name="bleu_source", short_name="bls", default=None, - help=flags_core.help_wrap( - "Path to source file containing text translate when calculating the " - "official BLEU score. Both --bleu_source and --bleu_ref must be set. " - "Use the flag --stop_threshold to stop the script based on the " - "uncased BLEU score.")) - flags.DEFINE_string( - name="bleu_ref", short_name="blr", default=None, - help=flags_core.help_wrap( - "Path to source file containing text translate when calculating the " - "official BLEU score. Both --bleu_source and --bleu_ref must be set. " - "Use the flag --stop_threshold to stop the script based on the " - "uncased BLEU score.")) - flags.DEFINE_string( - name="vocab_file", short_name="vf", default=None, - help=flags_core.help_wrap( - "Path to subtoken vocabulary file. If data_download.py was used to " - "download and encode the training data, look in the data_dir to find " - "the vocab file.")) - - flags_core.set_defaults(data_dir="/tmp/translate_ende", - model_dir="/tmp/transformer_model", - batch_size=None, - train_epochs=None) - - @flags.multi_flags_validator( - ["train_epochs", "train_steps"], - message="Both --train_steps and --train_epochs were set. Only one may be " - "defined.") - def _check_train_limits(flag_dict): - return flag_dict["train_epochs"] is None or flag_dict["train_steps"] is None - - @flags.multi_flags_validator( - ["bleu_source", "bleu_ref"], - message="Both or neither --bleu_source and --bleu_ref must be defined.") - def _check_bleu_files(flags_dict): - return (flags_dict["bleu_source"] is None) == ( - flags_dict["bleu_ref"] is None) - - @flags.multi_flags_validator( - ["bleu_source", "bleu_ref", "vocab_file"], - message="--vocab_file must be defined if --bleu_source and --bleu_ref " - "are defined.") - def _check_bleu_vocab_file(flags_dict): - if flags_dict["bleu_source"] and flags_dict["bleu_ref"]: - return flags_dict["vocab_file"] is not None - return True - - @flags.multi_flags_validator( - ["export_dir", "vocab_file"], - message="--vocab_file must be defined if --export_dir is set.") - def _check_export_vocab_file(flags_dict): - if flags_dict["export_dir"]: - return flags_dict["vocab_file"] is not None - return True - - flags_core.require_cloud_storage(["data_dir", "model_dir", "export_dir"]) - - -def construct_estimator(flags_obj, params, schedule_manager): - """Construct an estimator from either Estimator or TPUEstimator. - - Args: - flags_obj: The FLAGS object parsed from command line. - params: A dict of run specific parameters. - schedule_manager: A schedule.Manager object containing the run schedule. - - Returns: - An estimator object to be used for training and eval. - """ - if not params["use_tpu"]: - distribution_strategy = distribution_utils.get_distribution_strategy( - distribution_strategy=flags_obj.distribution_strategy, - num_gpus=flags_core.get_num_gpus(flags_obj), - all_reduce_alg=flags_obj.all_reduce_alg) - return tf.estimator.Estimator( - model_fn=model_fn, model_dir=flags_obj.model_dir, params=params, - config=tf.estimator.RunConfig(train_distribute=distribution_strategy)) - - tpu_cluster_resolver = tf.compat.v1.cluster_resolver.TPUClusterResolver( - tpu=flags_obj.tpu, - zone=flags_obj.tpu_zone, - project=flags_obj.tpu_gcp_project - ) - - tpu_config = tf.estimator.tpu.TPUConfig( - iterations_per_loop=schedule_manager.single_iteration_train_steps, - num_shards=flags_obj.num_tpu_shards) - - run_config = tf.estimator.tpu.RunConfig( - cluster=tpu_cluster_resolver, - model_dir=flags_obj.model_dir, - session_config=tf.ConfigProto( - allow_soft_placement=True, log_device_placement=True), - tpu_config=tpu_config) - - return tf.estimator.tpu.TPUEstimator( - model_fn=model_fn, - use_tpu=params["use_tpu"] and flags_obj.tpu != tpu_util.LOCAL, - train_batch_size=schedule_manager.batch_size, - eval_batch_size=schedule_manager.batch_size, - params={ - # TPUEstimator needs to populate batch_size itself due to sharding. - key: value for key, value in params.items() if key != "batch_size" - }, - config=run_config) - -def per_replica_batch_size(batch_size, num_gpus): - """For multi-gpu, batch-size must be a multiple of the number of GPUs. - - - Note that distribution strategy handles this automatically when used with - Keras. For using with Estimator, we need to get per GPU batch. - - Args: - batch_size: Global batch size to be divided among devices. This should be - equal to num_gpus times the single-GPU batch_size for multi-gpu training. - num_gpus: How many GPUs are used with DistributionStrategies. - - Returns: - Batch size per device. - - Raises: - ValueError: if batch_size is not divisible by number of devices - """ - if num_gpus <= 1: - return batch_size - - remainder = batch_size % num_gpus - if remainder: - err = ('When running with multiple GPUs, batch size ' - 'must be a multiple of the number of available GPUs. Found {} ' - 'GPUs with a batch size of {}; try --batch_size={} instead.' - ).format(num_gpus, batch_size, batch_size - remainder) - raise ValueError(err) - return int(batch_size / num_gpus) - - -def run_transformer(flags_obj): - """Create tf.Estimator to train and evaluate transformer model. - - Args: - flags_obj: Object containing parsed flag values. - - Returns: - Dict of results of the run. Contains the keys `eval_results`, - `train_hooks`, `bleu_cased`, and `bleu_uncased`. `train_hooks` is a list the - instances of hooks used during training. - """ - num_gpus = flags_core.get_num_gpus(flags_obj) - - # Add flag-defined parameters to params object - params = PARAMS_MAP[flags_obj.param_set] - if num_gpus > 1: - if flags_obj.param_set == "big": - params = model_params.BIG_MULTI_GPU_PARAMS - elif flags_obj.param_set == "base": - params = model_params.BASE_MULTI_GPU_PARAMS - - params["data_dir"] = flags_obj.data_dir - params["model_dir"] = flags_obj.model_dir - params["num_parallel_calls"] = flags_obj.num_parallel_calls - - params["tpu"] = flags_obj.tpu - params["use_tpu"] = bool(flags_obj.tpu) # was a tpu specified. - params["static_batch"] = flags_obj.static_batch or params["use_tpu"] - params["allow_ffn_pad"] = not params["use_tpu"] - - params["max_length"] = flags_obj.max_length or params["max_length"] - - params["use_synthetic_data"] = flags_obj.use_synthetic_data - - # Set batch size parameter, which depends on the availability of - # TPU and GPU, and distribution settings. - params["batch_size"] = (flags_obj.batch_size or ( - params["default_batch_size_tpu"] if params["use_tpu"] - else params["default_batch_size"])) - - total_batch_size = params["batch_size"] - if not params["use_tpu"]: - params["batch_size"] = per_replica_batch_size(params["batch_size"], - num_gpus) - - schedule_manager = schedule.Manager( - train_steps=flags_obj.train_steps, - steps_between_evals=flags_obj.steps_between_evals, - train_epochs=flags_obj.train_epochs, - epochs_between_evals=flags_obj.epochs_between_evals, - default_train_epochs=DEFAULT_TRAIN_EPOCHS, - batch_size=params["batch_size"], - max_length=params["max_length"], - use_tpu=params["use_tpu"], - num_tpu_shards=flags_obj.num_tpu_shards - ) - - params["repeat_dataset"] = schedule_manager.repeat_dataset - - model_helpers.apply_clean(flags.FLAGS) - - # Create hooks that log information about the training and metric values - train_hooks = hooks_helper.get_train_hooks( - flags_obj.hooks, - model_dir=flags_obj.model_dir, - tensors_to_log=TENSORS_TO_LOG, # used for logging hooks - batch_size=total_batch_size, # for ExamplesPerSecondHook - use_tpu=params["use_tpu"] # Not all hooks can run with TPUs - ) - benchmark_logger = logger.get_benchmark_logger() - benchmark_logger.log_run_info( - model_name="transformer", - dataset_name="wmt_translate_ende", - run_params=params, - test_id=flags_obj.benchmark_test_id) - - # Train and evaluate transformer model - estimator = construct_estimator(flags_obj, params, schedule_manager) - stats = run_loop( - estimator=estimator, - # Training arguments - schedule_manager=schedule_manager, - train_hooks=train_hooks, - benchmark_logger=benchmark_logger, - # BLEU calculation arguments - bleu_source=flags_obj.bleu_source, - bleu_ref=flags_obj.bleu_ref, - bleu_threshold=flags_obj.stop_threshold, - vocab_file=flags_obj.vocab_file) - - if flags_obj.export_dir and not params["use_tpu"]: - serving_input_fn = export.build_tensor_serving_input_receiver_fn( - shape=[None], dtype=tf.int64, batch_size=None) - # Export saved model, and save the vocab file as an extra asset. The vocab - # file is saved to allow consistent input encoding and output decoding. - # (See the "Export trained model" section in the README for an example of - # how to use the vocab file.) - # Since the model itself does not use the vocab file, this file is saved as - # an extra asset rather than a core asset. - estimator.export_savedmodel( - flags_obj.export_dir, serving_input_fn, - assets_extra={"vocab.txt": flags_obj.vocab_file}, - strip_default_attrs=True) - return stats - - -def main(_): - with logger.benchmark_context(flags.FLAGS): - run_transformer(flags.FLAGS) - - -if __name__ == "__main__": - tf.logging.set_verbosity(tf.logging.INFO) - define_transformer_flags() - absl_app.run(main) diff --git a/official/r1/transformer/translate.py b/official/r1/transformer/translate.py deleted file mode 100644 index 9912ee3c3..000000000 --- a/official/r1/transformer/translate.py +++ /dev/null @@ -1,237 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Translate text or files using trained transformer model.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import os - -# pylint: disable=g-bad-import-order -from absl import app as absl_app -from absl import flags -import tensorflow.compat.v1 as tf -# pylint: enable=g-bad-import-order - -from official.nlp.transformer.utils import tokenizer -from official.utils.flags import core as flags_core - -_DECODE_BATCH_SIZE = 32 -_EXTRA_DECODE_LENGTH = 100 -_BEAM_SIZE = 4 -_ALPHA = 0.6 - - -def _get_sorted_inputs(filename): - """Read and sort lines from the file sorted by decreasing length. - - Args: - filename: String name of file to read inputs from. - Returns: - Sorted list of inputs, and dictionary mapping original index->sorted index - of each element. - """ - with tf.io.gfile.GFile(filename) as f: - records = f.read().split("\n") - inputs = [record.strip() for record in records] - if not inputs[-1]: - inputs.pop() - - input_lens = [(i, len(line.split())) for i, line in enumerate(inputs)] - sorted_input_lens = sorted(input_lens, key=lambda x: x[1], reverse=True) - - sorted_inputs = [None] * len(sorted_input_lens) - sorted_keys = [0] * len(sorted_input_lens) - for i, (index, _) in enumerate(sorted_input_lens): - sorted_inputs[i] = inputs[index] - sorted_keys[index] = i - - return sorted_inputs, sorted_keys - - -def _encode_and_add_eos(line, subtokenizer): - """Encode line with subtokenizer, and add EOS id to the end.""" - return subtokenizer.encode(line) + [tokenizer.EOS_ID] - - -def _trim_and_decode(ids, subtokenizer): - """Trim EOS and PAD tokens from ids, and decode to return a string.""" - try: - index = list(ids).index(tokenizer.EOS_ID) - return subtokenizer.decode(ids[:index]) - except ValueError: # No EOS found in sequence - return subtokenizer.decode(ids) - - -def translate_file( - estimator, subtokenizer, input_file, output_file=None, - print_all_translations=True): - """Translate lines in file, and save to output file if specified. - - Args: - estimator: tf.Estimator used to generate the translations. - subtokenizer: Subtokenizer object for encoding and decoding source and - translated lines. - input_file: file containing lines to translate - output_file: file that stores the generated translations. - print_all_translations: If true, all translations are printed to stdout. - - Raises: - ValueError: if output file is invalid. - """ - batch_size = _DECODE_BATCH_SIZE - - # Read and sort inputs by length. Keep dictionary (original index-->new index - # in sorted list) to write translations in the original order. - sorted_inputs, sorted_keys = _get_sorted_inputs(input_file) - num_decode_batches = (len(sorted_inputs) - 1) // batch_size + 1 - - def input_generator(): - """Yield encoded strings from sorted_inputs.""" - for i, line in enumerate(sorted_inputs): - if i % batch_size == 0: - batch_num = (i // batch_size) + 1 - - tf.logging.info("Decoding batch %d out of %d." % - (batch_num, num_decode_batches)) - yield _encode_and_add_eos(line, subtokenizer) - - def input_fn(): - """Created batched dataset of encoded inputs.""" - ds = tf.data.Dataset.from_generator( - input_generator, tf.int64, tf.TensorShape([None])) - ds = ds.padded_batch(batch_size, [None]) - return ds - - translations = [] - for i, prediction in enumerate(estimator.predict(input_fn)): - translation = _trim_and_decode(prediction["outputs"], subtokenizer) - translations.append(translation) - - if print_all_translations: - tf.logging.info("Translating:\n\tInput: %s\n\tOutput: %s" % - (sorted_inputs[i], translation)) - - # Write translations in the order they appeared in the original file. - if output_file is not None: - if tf.io.gfile.isdir(output_file): - raise ValueError("File output is a directory, will not save outputs to " - "file.") - tf.logging.info("Writing to file %s" % output_file) - with tf.io.gfile.GFile(output_file, "w") as f: - for i in sorted_keys: - f.write("%s\n" % translations[i]) - - -def translate_text(estimator, subtokenizer, txt): - """Translate a single string.""" - encoded_txt = _encode_and_add_eos(txt, subtokenizer) - - def input_fn(): - ds = tf.data.Dataset.from_tensors(encoded_txt) - ds = ds.batch(_DECODE_BATCH_SIZE) - return ds - - predictions = estimator.predict(input_fn) - translation = next(predictions)["outputs"] - translation = _trim_and_decode(translation, subtokenizer) - tf.logging.info("Translation of \"%s\": \"%s\"" % (txt, translation)) - - -def main(unused_argv): - from official.transformer import transformer_main - - tf.logging.set_verbosity(tf.logging.INFO) - - if FLAGS.text is None and FLAGS.file is None: - tf.logging.warn("Nothing to translate. Make sure to call this script using " - "flags --text or --file.") - return - - subtokenizer = tokenizer.Subtokenizer(FLAGS.vocab_file) - - # Set up estimator and params - params = transformer_main.PARAMS_MAP[FLAGS.param_set] - params["beam_size"] = _BEAM_SIZE - params["alpha"] = _ALPHA - params["extra_decode_length"] = _EXTRA_DECODE_LENGTH - params["batch_size"] = _DECODE_BATCH_SIZE - estimator = tf.estimator.Estimator( - model_fn=transformer_main.model_fn, model_dir=FLAGS.model_dir, - params=params) - - if FLAGS.text is not None: - tf.logging.info("Translating text: %s" % FLAGS.text) - translate_text(estimator, subtokenizer, FLAGS.text) - - if FLAGS.file is not None: - input_file = os.path.abspath(FLAGS.file) - tf.logging.info("Translating file: %s" % input_file) - if not tf.gfile.Exists(FLAGS.file): - raise ValueError("File does not exist: %s" % input_file) - - output_file = None - if FLAGS.file_out is not None: - output_file = os.path.abspath(FLAGS.file_out) - tf.logging.info("File output specified: %s" % output_file) - - translate_file(estimator, subtokenizer, input_file, output_file) - - -def define_translate_flags(): - """Define flags used for translation script.""" - # Model flags - flags.DEFINE_string( - name="model_dir", short_name="md", default="/tmp/transformer_model", - help=flags_core.help_wrap( - "Directory containing Transformer model checkpoints.")) - flags.DEFINE_enum( - name="param_set", short_name="mp", default="big", - enum_values=["base", "big"], - help=flags_core.help_wrap( - "Parameter set to use when creating and training the model. The " - "parameters define the input shape (batch size and max length), " - "model configuration (size of embedding, # of hidden layers, etc.), " - "and various other settings. The big parameter set increases the " - "default batch size, embedding/hidden size, and filter size. For a " - "complete list of parameters, please see model/model_params.py.")) - flags.DEFINE_string( - name="vocab_file", short_name="vf", default=None, - help=flags_core.help_wrap( - "Path to subtoken vocabulary file. If data_download.py was used to " - "download and encode the training data, look in the data_dir to find " - "the vocab file.")) - flags.mark_flag_as_required("vocab_file") - - flags.DEFINE_string( - name="text", default=None, - help=flags_core.help_wrap( - "Text to translate. Output will be printed to console.")) - flags.DEFINE_string( - name="file", default=None, - help=flags_core.help_wrap( - "File containing text to translate. Translation will be printed to " - "console and, if --file_out is provided, saved to an output file.")) - flags.DEFINE_string( - name="file_out", default=None, - help=flags_core.help_wrap( - "If --file flag is specified, save translation to this file.")) - - -if __name__ == "__main__": - define_translate_flags() - FLAGS = flags.FLAGS - absl_app.run(main) diff --git a/official/r1/utils/__init__.py b/official/r1/utils/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/official/r1/utils/data/__init__.py b/official/r1/utils/data/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/official/r1/utils/data/file_io.py b/official/r1/utils/data/file_io.py deleted file mode 100644 index b7776fc9e..000000000 --- a/official/r1/utils/data/file_io.py +++ /dev/null @@ -1,207 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Convenience functions for managing dataset file buffers.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import atexit -import multiprocessing -import multiprocessing.dummy -import os -import tempfile -import uuid - -from absl import logging -import numpy as np -import six -import tensorflow as tf -# pylint:disable=logging-format-interpolation - - -class _GarbageCollector(object): - """Deletes temporary buffer files at exit. - - Certain tasks (such as NCF Recommendation) require writing buffers to - temporary files. (Which may be local or distributed.) It is not generally safe - to delete these files during operation, but they should be cleaned up. This - class keeps track of temporary files created, and deletes them at exit. - """ - def __init__(self): - self.temp_buffers = [] - - def register(self, filepath): - self.temp_buffers.append(filepath) - - def purge(self): - try: - for i in self.temp_buffers: - if tf.io.gfile.exists(i): - tf.io.gfile.remove(i) - logging.info("Buffer file {} removed".format(i)) - except Exception as e: - logging.error("Failed to cleanup buffer files: {}".format(e)) - - -_GARBAGE_COLLECTOR = _GarbageCollector() -atexit.register(_GARBAGE_COLLECTOR.purge) - -_ROWS_PER_CORE = 50000 - - -def write_to_temp_buffer(dataframe, buffer_folder, columns): - if buffer_folder is None: - _, buffer_path = tempfile.mkstemp() - else: - tf.io.gfile.makedirs(buffer_folder) - buffer_path = os.path.join(buffer_folder, str(uuid.uuid4())) - _GARBAGE_COLLECTOR.register(buffer_path) - - return write_to_buffer(dataframe, buffer_path, columns) - - -def iter_shard_dataframe(df, rows_per_core=1000): - """Two way shard of a dataframe. - - This function evenly shards a dataframe so that it can be mapped efficiently. - It yields a list of dataframes with length equal to the number of CPU cores, - with each dataframe having rows_per_core rows. (Except for the last batch - which may have fewer rows in the dataframes.) Passing vectorized inputs to - a pool is more effecient than iterating through a dataframe in serial and - passing a list of inputs to the pool. - - Args: - df: Pandas dataframe to be sharded. - rows_per_core: Number of rows in each shard. - - Returns: - A list of dataframe shards. - """ - n = len(df) - num_cores = min([multiprocessing.cpu_count(), n]) - - num_blocks = int(np.ceil(n / num_cores / rows_per_core)) - max_batch_size = num_cores * rows_per_core - for i in range(num_blocks): - min_index = i * max_batch_size - max_index = min([(i + 1) * max_batch_size, n]) - df_shard = df[min_index:max_index] - n_shard = len(df_shard) - boundaries = np.linspace(0, n_shard, num_cores + 1, dtype=np.int64) - yield [df_shard[boundaries[j]:boundaries[j+1]] for j in range(num_cores)] - - -def _shard_dict_to_examples(shard_dict): - """Converts a dict of arrays into a list of example bytes.""" - n = [i for i in shard_dict.values()][0].shape[0] - feature_list = [{} for _ in range(n)] - for column, values in shard_dict.items(): - if len(values.shape) == 1: - values = np.reshape(values, values.shape + (1,)) - - if values.dtype.kind == "i": - feature_map = lambda x: tf.train.Feature( - int64_list=tf.train.Int64List(value=x)) - elif values.dtype.kind == "f": - feature_map = lambda x: tf.train.Feature( - float_list=tf.train.FloatList(value=x)) - else: - raise ValueError("Invalid dtype") - for i in range(n): - feature_list[i][column] = feature_map(values[i]) - examples = [ - tf.train.Example(features=tf.train.Features(feature=example_features)) - for example_features in feature_list - ] - - return [e.SerializeToString() for e in examples] - - -def _serialize_shards(df_shards, columns, pool, writer): - """Map sharded dataframes to bytes, and write them to a buffer. - - Args: - df_shards: A list of pandas dataframes. (Should be of similar size) - columns: The dataframe columns to be serialized. - pool: A pool to serialize in parallel. - writer: A TFRecordWriter to write the serialized shards. - """ - # Pandas does not store columns of arrays as nd arrays. stack remedies this. - map_inputs = [{c: np.stack(shard[c].values, axis=0) for c in columns} - for shard in df_shards] - - # Failure within pools is very irksome. Thus, it is better to thoroughly check - # inputs in the main process. - for inp in map_inputs: - # Check that all fields have the same number of rows. - assert len(set([v.shape[0] for v in inp.values()])) == 1 - for val in inp.values(): - assert hasattr(val, "dtype") - assert hasattr(val.dtype, "kind") - assert val.dtype.kind in ("i", "f") - assert len(val.shape) in (1, 2) - shard_bytes = pool.map(_shard_dict_to_examples, map_inputs) - for s in shard_bytes: - for example in s: - writer.write(example) - - -def write_to_buffer(dataframe, buffer_path, columns, expected_size=None): - """Write a dataframe to a binary file for a dataset to consume. - - Args: - dataframe: The pandas dataframe to be serialized. - buffer_path: The path where the serialized results will be written. - columns: The dataframe columns to be serialized. - expected_size: The size in bytes of the serialized results. This is used to - lazily construct the buffer. - - Returns: - The path of the buffer. - """ - if (tf.io.gfile.exists(buffer_path) and - tf.io.gfile.stat(buffer_path).length > 0): - actual_size = tf.io.gfile.stat(buffer_path).length - if expected_size == actual_size: - return buffer_path - logging.warning( - "Existing buffer {} has size {}. Expected size {}. Deleting and " - "rebuilding buffer.".format(buffer_path, actual_size, expected_size)) - tf.io.gfile.remove(buffer_path) - - if dataframe is None: - raise ValueError( - "dataframe was None but a valid existing buffer was not found.") - - tf.io.gfile.makedirs(os.path.split(buffer_path)[0]) - - logging.info("Constructing TFRecordDataset buffer: {}".format(buffer_path)) - - count = 0 - pool = multiprocessing.dummy.Pool(multiprocessing.cpu_count()) - try: - with tf.io.TFRecordWriter(buffer_path) as writer: - for df_shards in iter_shard_dataframe(df=dataframe, - rows_per_core=_ROWS_PER_CORE): - _serialize_shards(df_shards, columns, pool, writer) - count += sum([len(s) for s in df_shards]) - logging.info("{}/{} examples written.".format( - str(count).ljust(8), len(dataframe))) - finally: - pool.terminate() - - logging.info("Buffer write complete.") - return buffer_path diff --git a/official/r1/utils/data/file_io_test.py b/official/r1/utils/data/file_io_test.py deleted file mode 100644 index 529cb4591..000000000 --- a/official/r1/utils/data/file_io_test.py +++ /dev/null @@ -1,197 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Tests for binary data file utilities.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import contextlib -import multiprocessing - -# pylint: disable=wrong-import-order -import numpy as np -import pandas as pd -import tensorflow as tf -# pylint: enable=wrong-import-order - -from official.r1.utils.data import file_io - - -_RAW_ROW = "raw_row" -_DUMMY_COL = "column_0" -_DUMMY_VEC_COL = "column_1" -_DUMMY_VEC_LEN = 4 - -_ROWS_PER_CORE = 4 -_TEST_CASES = [ - # One batch of one - dict(row_count=1, cpu_count=1, expected=[ - [[0]] - ]), - - dict(row_count=10, cpu_count=1, expected=[ - [[0, 1, 2, 3]], [[4, 5, 6, 7]], [[8, 9]] - ]), - - dict(row_count=21, cpu_count=1, expected=[ - [[0, 1, 2, 3]], [[4, 5, 6, 7]], [[8, 9, 10, 11]], - [[12, 13, 14, 15]], [[16, 17, 18, 19]], [[20]] - ]), - - dict(row_count=1, cpu_count=4, expected=[ - [[0]] - ]), - - dict(row_count=10, cpu_count=4, expected=[ - [[0, 1], [2, 3, 4], [5, 6], [7, 8, 9]] - ]), - - dict(row_count=21, cpu_count=4, expected=[ - [[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 14, 15]], - [[16], [17], [18], [19, 20]] - ]), - - dict(row_count=10, cpu_count=8, expected=[ - [[0], [1], [2], [3, 4], [5], [6], [7], [8, 9]] - ]), - - dict(row_count=40, cpu_count=8, expected=[ - [[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 14, 15], - [16, 17, 18, 19], [20, 21, 22, 23], [24, 25, 26, 27], - [28, 29, 30, 31]], - [[32], [33], [34], [35], [36], [37], [38], [39]] - ]), -] - -_FEATURE_MAP = { - _RAW_ROW: tf.io.FixedLenFeature([1], dtype=tf.int64), - _DUMMY_COL: tf.io.FixedLenFeature([1], dtype=tf.int64), - _DUMMY_VEC_COL: tf.io.FixedLenFeature([_DUMMY_VEC_LEN], dtype=tf.float32) -} - - -@contextlib.contextmanager -def fixed_core_count(cpu_count): - """Override CPU count. - - file_io.py uses the cpu_count function to scale to the size of the instance. - However, this is not desirable for testing because it can make the test flaky. - Instead, this context manager fixes the count for more robust testing. - - Args: - cpu_count: How many cores multiprocessing claims to have. - - Yields: - Nothing. (for context manager only) - """ - old_count_fn = multiprocessing.cpu_count - multiprocessing.cpu_count = lambda: cpu_count - yield - multiprocessing.cpu_count = old_count_fn - - -class BaseTest(tf.test.TestCase): - - def setUp(self): - super(BaseTest, self).setUp() - tf.compat.v1.disable_eager_execution() - - def _test_sharding(self, row_count, cpu_count, expected): - df = pd.DataFrame({_DUMMY_COL: list(range(row_count))}) - with fixed_core_count(cpu_count): - shards = list(file_io.iter_shard_dataframe(df, _ROWS_PER_CORE)) - result = [[j[_DUMMY_COL].tolist() for j in i] for i in shards] - self.assertAllEqual(expected, result) - - def test_tiny_rows_low_core(self): - self._test_sharding(**_TEST_CASES[0]) - - def test_small_rows_low_core(self): - self._test_sharding(**_TEST_CASES[1]) - - def test_large_rows_low_core(self): - self._test_sharding(**_TEST_CASES[2]) - - def test_tiny_rows_medium_core(self): - self._test_sharding(**_TEST_CASES[3]) - - def test_small_rows_medium_core(self): - self._test_sharding(**_TEST_CASES[4]) - - def test_large_rows_medium_core(self): - self._test_sharding(**_TEST_CASES[5]) - - def test_small_rows_large_core(self): - self._test_sharding(**_TEST_CASES[6]) - - def test_large_rows_large_core(self): - self._test_sharding(**_TEST_CASES[7]) - - def _serialize_deserialize(self, num_cores=1, num_rows=20): - np.random.seed(1) - df = pd.DataFrame({ - # Serialization order is only deterministic for num_cores=1. raw_row is - # used in validation after the deserialization. - _RAW_ROW: np.array(range(num_rows), dtype=np.int64), - _DUMMY_COL: np.random.randint(0, 35, size=(num_rows,)), - _DUMMY_VEC_COL: [ - np.array([np.random.random() for _ in range(_DUMMY_VEC_LEN)]) - for i in range(num_rows) # pylint: disable=unused-variable - ] - }) - - with fixed_core_count(num_cores): - buffer_path = file_io.write_to_temp_buffer( - df, self.get_temp_dir(), [_RAW_ROW, _DUMMY_COL, _DUMMY_VEC_COL]) - - with self.session(graph=tf.Graph()) as sess: - dataset = tf.data.TFRecordDataset(buffer_path) - dataset = dataset.batch(1).map( - lambda x: tf.io.parse_example(serialized=x, features=_FEATURE_MAP)) - - data_iter = tf.compat.v1.data.make_one_shot_iterator(dataset) - seen_rows = set() - for i in range(num_rows+5): - row = data_iter.get_next() - try: - row_id, val_0, val_1 = sess.run( - [row[_RAW_ROW], row[_DUMMY_COL], row[_DUMMY_VEC_COL]]) - row_id, val_0, val_1 = row_id[0][0], val_0[0][0], val_1[0] - assert row_id not in seen_rows - seen_rows.add(row_id) - - self.assertEqual(val_0, df[_DUMMY_COL][row_id]) - self.assertAllClose(val_1, df[_DUMMY_VEC_COL][row_id]) - - self.assertLess(i, num_rows, msg="Too many rows.") - except tf.errors.OutOfRangeError: - self.assertGreaterEqual(i, num_rows, msg="Too few rows.") - - file_io._GARBAGE_COLLECTOR.purge() - assert not tf.io.gfile.exists(buffer_path) - - def test_serialize_deserialize_0(self): - self._serialize_deserialize(num_cores=1) - - def test_serialize_deserialize_1(self): - self._serialize_deserialize(num_cores=2) - - def test_serialize_deserialize_2(self): - self._serialize_deserialize(num_cores=8) - - -if __name__ == "__main__": - tf.test.main() diff --git a/official/r1/utils/export.py b/official/r1/utils/export.py deleted file mode 100644 index 8061c2881..000000000 --- a/official/r1/utils/export.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Convenience functions for exporting models as SavedModels or other types.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import tensorflow as tf - - -def build_tensor_serving_input_receiver_fn(shape, dtype=tf.float32, - batch_size=1): - """Returns a input_receiver_fn that can be used during serving. - - This expects examples to come through as float tensors, and simply - wraps them as TensorServingInputReceivers. - - Arguably, this should live in tf.estimator.export. Testing here first. - - Args: - shape: list representing target size of a single example. - dtype: the expected datatype for the input example - batch_size: number of input tensors that will be passed for prediction - - Returns: - A function that itself returns a TensorServingInputReceiver. - """ - def serving_input_receiver_fn(): - # Prep a placeholder where the input example will be fed in - features = tf.compat.v1.placeholder( - dtype=dtype, shape=[batch_size] + shape, name='input_tensor') - - return tf.estimator.export.TensorServingInputReceiver( - features=features, receiver_tensors=features) - - return serving_input_receiver_fn diff --git a/official/r1/utils/export_test.py b/official/r1/utils/export_test.py deleted file mode 100644 index 3785edd47..000000000 --- a/official/r1/utils/export_test.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Tests for exporting utils.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import tensorflow as tf # pylint: disable=g-bad-import-order - -from official.r1.utils import export - - -class ExportUtilsTest(tf.test.TestCase): - """Tests for the ExportUtils.""" - - def test_build_tensor_serving_input_receiver_fn(self): - receiver_fn = export.build_tensor_serving_input_receiver_fn(shape=[4, 5]) - with tf.Graph().as_default(): - receiver = receiver_fn() - self.assertIsInstance( - receiver, tf.estimator.export.TensorServingInputReceiver) - - self.assertIsInstance(receiver.features, tf.Tensor) - self.assertEqual(receiver.features.shape, tf.TensorShape([1, 4, 5])) - self.assertEqual(receiver.features.dtype, tf.float32) - self.assertIsInstance(receiver.receiver_tensors, dict) - # Note that Python 3 can no longer index .values() directly; cast to list. - self.assertEqual(list(receiver.receiver_tensors.values())[0].shape, - tf.TensorShape([1, 4, 5])) - - def test_build_tensor_serving_input_receiver_fn_batch_dtype(self): - receiver_fn = export.build_tensor_serving_input_receiver_fn( - shape=[4, 5], dtype=tf.int8, batch_size=10) - - with tf.Graph().as_default(): - receiver = receiver_fn() - self.assertIsInstance( - receiver, tf.estimator.export.TensorServingInputReceiver) - - self.assertIsInstance(receiver.features, tf.Tensor) - self.assertEqual(receiver.features.shape, tf.TensorShape([10, 4, 5])) - self.assertEqual(receiver.features.dtype, tf.int8) - self.assertIsInstance(receiver.receiver_tensors, dict) - # Note that Python 3 can no longer index .values() directly; cast to list. - self.assertEqual(list(receiver.receiver_tensors.values())[0].shape, - tf.TensorShape([10, 4, 5])) - - -if __name__ == "__main__": - tf.test.main() diff --git a/official/r1/utils/logs/__init__.py b/official/r1/utils/logs/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/official/r1/utils/logs/cloud_lib.py b/official/r1/utils/logs/cloud_lib.py deleted file mode 100644 index a2d9bd3db..000000000 --- a/official/r1/utils/logs/cloud_lib.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -"""Utilities that interact with cloud service. -""" - -import requests - -GCP_METADATA_URL = "http://metadata/computeMetadata/v1/instance/hostname" -GCP_METADATA_HEADER = {"Metadata-Flavor": "Google"} - - -def on_gcp(): - """Detect whether the current running environment is on GCP.""" - try: - # Timeout in 5 seconds, in case the test environment has connectivity issue. - # There is not default timeout, which means it might block forever. - response = requests.get( - GCP_METADATA_URL, headers=GCP_METADATA_HEADER, timeout=5) - return response.status_code == 200 - except requests.exceptions.RequestException: - return False diff --git a/official/r1/utils/logs/cloud_lib_test.py b/official/r1/utils/logs/cloud_lib_test.py deleted file mode 100644 index 15cdc3c66..000000000 --- a/official/r1/utils/logs/cloud_lib_test.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -"""Tests for cloud_lib.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import unittest - -import mock -import requests - -from official.r1.utils.logs import cloud_lib - - -class CloudLibTest(unittest.TestCase): - - @mock.patch("requests.get") - def test_on_gcp(self, mock_requests_get): - mock_response = mock.MagicMock() - mock_requests_get.return_value = mock_response - mock_response.status_code = 200 - - self.assertEqual(cloud_lib.on_gcp(), True) - - @mock.patch("requests.get") - def test_not_on_gcp(self, mock_requests_get): - mock_requests_get.side_effect = requests.exceptions.ConnectionError() - - self.assertEqual(cloud_lib.on_gcp(), False) - - -if __name__ == "__main__": - unittest.main() diff --git a/official/r1/utils/logs/guidelines.md b/official/r1/utils/logs/guidelines.md deleted file mode 100644 index 408c3cd58..000000000 --- a/official/r1/utils/logs/guidelines.md +++ /dev/null @@ -1,58 +0,0 @@ -# Logging in official models - -This library adds logging functions that print or save tensor values. Official models should define all common hooks -(using hooks helper) and a benchmark logger. - -1. **Training Hooks** - - Hooks are a TensorFlow concept that define specific actions at certain points of the execution. We use them to obtain and log - tensor values during training. - - hooks_helper.py provides an easy way to create common hooks. The following hooks are currently defined: - * LoggingTensorHook: Logs tensor values - * ProfilerHook: Writes a timeline json that can be loaded into chrome://tracing. - * ExamplesPerSecondHook: Logs the number of examples processed per second. - * LoggingMetricHook: Similar to LoggingTensorHook, except that the tensors are logged in a format defined by our data - anaylsis pipeline. - - -2. **Benchmarks** - - The benchmark logger provides useful functions for logging environment information, and evaluation results. - The module also contains a context which is used to update the status of the run. - -Example usage: - -``` -from absl import app as absl_app - -from official.utils.logs import hooks_helper -from official.utils.logs import logger - -def model_main(flags_obj): - estimator = ... - - benchmark_logger = logger.get_benchmark_logger() - benchmark_logger.log_run_info(...) - - train_hooks = hooks_helper.get_train_hooks(...) - - for epoch in range(10): - estimator.train(..., hooks=train_hooks) - eval_results = estimator.evaluate(...) - - # Log a dictionary of metrics - benchmark_logger.log_evaluation_result(eval_results) - - # Log an individual metric - benchmark_logger.log_metric(...) - - -def main(_): - with logger.benchmark_context(flags.FLAGS): - model_main(flags.FLAGS) - -if __name__ == "__main__": - # define flags - absl_app.run(main) -``` diff --git a/official/r1/utils/logs/hooks.py b/official/r1/utils/logs/hooks.py deleted file mode 100644 index c595eabb3..000000000 --- a/official/r1/utils/logs/hooks.py +++ /dev/null @@ -1,130 +0,0 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -"""Hook that counts examples per second every N steps or seconds.""" - - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import tensorflow as tf # pylint: disable=g-bad-import-order - -from official.r1.utils.logs import logger - - -class ExamplesPerSecondHook(tf.estimator.SessionRunHook): - """Hook to print out examples per second. - - Total time is tracked and then divided by the total number of steps - to get the average step time and then batch_size is used to determine - the running average of examples per second. The examples per second for the - most recent interval is also logged. - """ - - def __init__(self, - batch_size, - every_n_steps=None, - every_n_secs=None, - warm_steps=0, - metric_logger=None): - """Initializer for ExamplesPerSecondHook. - - Args: - batch_size: Total batch size across all workers used to calculate - examples/second from global time. - every_n_steps: Log stats every n steps. - every_n_secs: Log stats every n seconds. Exactly one of the - `every_n_steps` or `every_n_secs` should be set. - warm_steps: The number of steps to be skipped before logging and running - average calculation. warm_steps steps refers to global steps across all - workers, not on each worker - metric_logger: instance of `BenchmarkLogger`, the benchmark logger that - hook should use to write the log. If None, BaseBenchmarkLogger will - be used. - - Raises: - ValueError: if neither `every_n_steps` or `every_n_secs` is set, or - both are set. - """ - - if (every_n_steps is None) == (every_n_secs is None): - raise ValueError("exactly one of every_n_steps" - " and every_n_secs should be provided.") - - self._logger = metric_logger or logger.BaseBenchmarkLogger() - - self._timer = tf.estimator.SecondOrStepTimer( - every_steps=every_n_steps, every_secs=every_n_secs) - - self._step_train_time = 0 - self._total_steps = 0 - self._batch_size = batch_size - self._warm_steps = warm_steps - # List of examples per second logged every_n_steps. - self.current_examples_per_sec_list = [] - - def begin(self): - """Called once before using the session to check global step.""" - self._global_step_tensor = tf.compat.v1.train.get_global_step() - if self._global_step_tensor is None: - raise RuntimeError( - "Global step should be created to use StepCounterHook.") - - def before_run(self, run_context): # pylint: disable=unused-argument - """Called before each call to run(). - - Args: - run_context: A SessionRunContext object. - - Returns: - A SessionRunArgs object or None if never triggered. - """ - return tf.estimator.SessionRunArgs(self._global_step_tensor) - - def after_run(self, run_context, run_values): # pylint: disable=unused-argument - """Called after each call to run(). - - Args: - run_context: A SessionRunContext object. - run_values: A SessionRunValues object. - """ - global_step = run_values.results - - if self._timer.should_trigger_for_step( - global_step) and global_step > self._warm_steps: - elapsed_time, elapsed_steps = self._timer.update_last_triggered_step( - global_step) - if elapsed_time is not None: - self._step_train_time += elapsed_time - self._total_steps += elapsed_steps - - # average examples per second is based on the total (accumulative) - # training steps and training time so far - average_examples_per_sec = self._batch_size * ( - self._total_steps / self._step_train_time) - # current examples per second is based on the elapsed training steps - # and training time per batch - current_examples_per_sec = self._batch_size * ( - elapsed_steps / elapsed_time) - # Logs entries to be read from hook during or after run. - self.current_examples_per_sec_list.append(current_examples_per_sec) - self._logger.log_metric( - "average_examples_per_sec", average_examples_per_sec, - global_step=global_step) - - self._logger.log_metric( - "current_examples_per_sec", current_examples_per_sec, - global_step=global_step) diff --git a/official/r1/utils/logs/hooks_helper.py b/official/r1/utils/logs/hooks_helper.py deleted file mode 100644 index 1e823241b..000000000 --- a/official/r1/utils/logs/hooks_helper.py +++ /dev/null @@ -1,173 +0,0 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -"""Hooks helper to return a list of TensorFlow hooks for training by name. - -More hooks can be added to this set. To add a new hook, 1) add the new hook to -the registry in HOOKS, 2) add a corresponding function that parses out necessary -parameters. -""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import tensorflow as tf # pylint: disable=g-bad-import-order -from absl import logging - -from official.r1.utils.logs import hooks -from official.r1.utils.logs import logger -from official.r1.utils.logs import metric_hook - -_TENSORS_TO_LOG = dict((x, x) for x in ['learning_rate', - 'cross_entropy', - 'train_accuracy']) - - -def get_train_hooks(name_list, use_tpu=False, **kwargs): - """Factory for getting a list of TensorFlow hooks for training by name. - - Args: - name_list: a list of strings to name desired hook classes. Allowed: - LoggingTensorHook, ProfilerHook, ExamplesPerSecondHook, which are defined - as keys in HOOKS - use_tpu: Boolean of whether computation occurs on a TPU. This will disable - hooks altogether. - **kwargs: a dictionary of arguments to the hooks. - - Returns: - list of instantiated hooks, ready to be used in a classifier.train call. - - Raises: - ValueError: if an unrecognized name is passed. - """ - - if not name_list: - return [] - - if use_tpu: - logging.warning( - 'hooks_helper received name_list `%s`, but a ' - 'TPU is specified. No hooks will be used.', name_list) - return [] - - train_hooks = [] - for name in name_list: - hook_name = HOOKS.get(name.strip().lower()) - if hook_name is None: - raise ValueError('Unrecognized training hook requested: {}'.format(name)) - else: - train_hooks.append(hook_name(**kwargs)) - - return train_hooks - - -def get_logging_tensor_hook(every_n_iter=100, tensors_to_log=None, **kwargs): # pylint: disable=unused-argument - """Function to get LoggingTensorHook. - - Args: - every_n_iter: `int`, print the values of `tensors` once every N local - steps taken on the current worker. - tensors_to_log: List of tensor names or dictionary mapping labels to tensor - names. If not set, log _TENSORS_TO_LOG by default. - **kwargs: a dictionary of arguments to LoggingTensorHook. - - Returns: - Returns a LoggingTensorHook with a standard set of tensors that will be - printed to stdout. - """ - if tensors_to_log is None: - tensors_to_log = _TENSORS_TO_LOG - - return tf.estimator.LoggingTensorHook( - tensors=tensors_to_log, - every_n_iter=every_n_iter) - - -def get_profiler_hook(model_dir, save_steps=1000, **kwargs): # pylint: disable=unused-argument - """Function to get ProfilerHook. - - Args: - model_dir: The directory to save the profile traces to. - save_steps: `int`, print profile traces every N steps. - **kwargs: a dictionary of arguments to ProfilerHook. - - Returns: - Returns a ProfilerHook that writes out timelines that can be loaded into - profiling tools like chrome://tracing. - """ - return tf.estimator.ProfilerHook(save_steps=save_steps, output_dir=model_dir) - - -def get_examples_per_second_hook(every_n_steps=100, - batch_size=128, - warm_steps=5, - **kwargs): # pylint: disable=unused-argument - """Function to get ExamplesPerSecondHook. - - Args: - every_n_steps: `int`, print current and average examples per second every - N steps. - batch_size: `int`, total batch size used to calculate examples/second from - global time. - warm_steps: skip this number of steps before logging and running average. - **kwargs: a dictionary of arguments to ExamplesPerSecondHook. - - Returns: - Returns a ProfilerHook that writes out timelines that can be loaded into - profiling tools like chrome://tracing. - """ - return hooks.ExamplesPerSecondHook( - batch_size=batch_size, every_n_steps=every_n_steps, - warm_steps=warm_steps, metric_logger=logger.get_benchmark_logger()) - - -def get_logging_metric_hook(tensors_to_log=None, - every_n_secs=600, - **kwargs): # pylint: disable=unused-argument - """Function to get LoggingMetricHook. - - Args: - tensors_to_log: List of tensor names or dictionary mapping labels to tensor - names. If not set, log _TENSORS_TO_LOG by default. - every_n_secs: `int`, the frequency for logging the metric. Default to every - 10 mins. - **kwargs: a dictionary of arguments. - - Returns: - Returns a LoggingMetricHook that saves tensor values in a JSON format. - """ - if tensors_to_log is None: - tensors_to_log = _TENSORS_TO_LOG - return metric_hook.LoggingMetricHook( - tensors=tensors_to_log, - metric_logger=logger.get_benchmark_logger(), - every_n_secs=every_n_secs) - - -def get_step_counter_hook(**kwargs): - """Function to get StepCounterHook.""" - del kwargs - return tf.estimator.StepCounterHook() - - -# A dictionary to map one hook name and its corresponding function -HOOKS = { - 'loggingtensorhook': get_logging_tensor_hook, - 'profilerhook': get_profiler_hook, - 'examplespersecondhook': get_examples_per_second_hook, - 'loggingmetrichook': get_logging_metric_hook, - 'stepcounterhook': get_step_counter_hook -} diff --git a/official/r1/utils/logs/hooks_test.py b/official/r1/utils/logs/hooks_test.py deleted file mode 100644 index 5ce4c5e53..000000000 --- a/official/r1/utils/logs/hooks_test.py +++ /dev/null @@ -1,159 +0,0 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -"""Tests for hooks.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import time - -from absl import logging -import tensorflow as tf # pylint: disable=g-bad-import-order - -from official.r1.utils.logs import hooks -from official.r1.utils.logs import mock_lib - -logging.set_verbosity(logging.DEBUG) - - -class ExamplesPerSecondHookTest(tf.test.TestCase): - """Tests for the ExamplesPerSecondHook. - - In the test, we explicitly run global_step tensor after train_op in order to - keep the global_step value and the train_op (which increase the glboal_step - by 1) consistent. This is to correct the discrepancies in reported global_step - value when running on GPUs. - """ - - def setUp(self): - """Mock out logging calls to verify if correct info is being monitored.""" - self._logger = mock_lib.MockBenchmarkLogger() - - self.graph = tf.Graph() - with self.graph.as_default(): - tf.compat.v1.train.create_global_step() - self.train_op = tf.compat.v1.assign_add( - tf.compat.v1.train.get_global_step(), 1) - self.global_step = tf.compat.v1.train.get_global_step() - - def test_raise_in_both_secs_and_steps(self): - with self.assertRaises(ValueError): - hooks.ExamplesPerSecondHook( - batch_size=256, - every_n_steps=10, - every_n_secs=20, - metric_logger=self._logger) - - def test_raise_in_none_secs_and_steps(self): - with self.assertRaises(ValueError): - hooks.ExamplesPerSecondHook( - batch_size=256, - every_n_steps=None, - every_n_secs=None, - metric_logger=self._logger) - - def _validate_log_every_n_steps(self, every_n_steps, warm_steps): - hook = hooks.ExamplesPerSecondHook( - batch_size=256, - every_n_steps=every_n_steps, - warm_steps=warm_steps, - metric_logger=self._logger) - - with tf.compat.v1.train.MonitoredSession( - tf.compat.v1.train.ChiefSessionCreator(), [hook]) as mon_sess: - for _ in range(every_n_steps): - # Explicitly run global_step after train_op to get the accurate - # global_step value - mon_sess.run(self.train_op) - mon_sess.run(self.global_step) - # Nothing should be in the list yet - self.assertFalse(self._logger.logged_metric) - - mon_sess.run(self.train_op) - global_step_val = mon_sess.run(self.global_step) - - if global_step_val > warm_steps: - self._assert_metrics() - else: - # Nothing should be in the list yet - self.assertFalse(self._logger.logged_metric) - - # Add additional run to verify proper reset when called multiple times. - prev_log_len = len(self._logger.logged_metric) - mon_sess.run(self.train_op) - global_step_val = mon_sess.run(self.global_step) - - if every_n_steps == 1 and global_step_val > warm_steps: - # Each time, we log two additional metrics. Did exactly 2 get added? - self.assertEqual(len(self._logger.logged_metric), prev_log_len + 2) - else: - # No change in the size of the metric list. - self.assertEqual(len(self._logger.logged_metric), prev_log_len) - - def test_examples_per_sec_every_1_steps(self): - with self.graph.as_default(): - self._validate_log_every_n_steps(1, 0) - - def test_examples_per_sec_every_5_steps(self): - with self.graph.as_default(): - self._validate_log_every_n_steps(5, 0) - - def test_examples_per_sec_every_1_steps_with_warm_steps(self): - with self.graph.as_default(): - self._validate_log_every_n_steps(1, 10) - - def test_examples_per_sec_every_5_steps_with_warm_steps(self): - with self.graph.as_default(): - self._validate_log_every_n_steps(5, 10) - - def _validate_log_every_n_secs(self, every_n_secs): - hook = hooks.ExamplesPerSecondHook( - batch_size=256, - every_n_steps=None, - every_n_secs=every_n_secs, - metric_logger=self._logger) - - with tf.compat.v1.train.MonitoredSession( - tf.compat.v1.train.ChiefSessionCreator(), [hook]) as mon_sess: - # Explicitly run global_step after train_op to get the accurate - # global_step value - mon_sess.run(self.train_op) - mon_sess.run(self.global_step) - # Nothing should be in the list yet - self.assertFalse(self._logger.logged_metric) - time.sleep(every_n_secs) - - mon_sess.run(self.train_op) - mon_sess.run(self.global_step) - self._assert_metrics() - - def test_examples_per_sec_every_1_secs(self): - with self.graph.as_default(): - self._validate_log_every_n_secs(1) - - def test_examples_per_sec_every_5_secs(self): - with self.graph.as_default(): - self._validate_log_every_n_secs(5) - - def _assert_metrics(self): - metrics = self._logger.logged_metric - self.assertEqual(metrics[-2]["name"], "average_examples_per_sec") - self.assertEqual(metrics[-1]["name"], "current_examples_per_sec") - - -if __name__ == "__main__": - tf.test.main() diff --git a/official/r1/utils/logs/logger.py b/official/r1/utils/logs/logger.py deleted file mode 100644 index 587053e98..000000000 --- a/official/r1/utils/logs/logger.py +++ /dev/null @@ -1,305 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -"""Logging utilities for benchmark. - -For collecting local environment metrics like CPU and memory, certain python -packages need be installed. See README for details. -""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import contextlib -import datetime -import json -import numbers -import os -import threading -import uuid - -from absl import flags -from absl import logging -from six.moves import _thread as thread -import tensorflow as tf -from tensorflow.python.client import device_lib -from official.r1.utils.logs import cloud_lib - -METRIC_LOG_FILE_NAME = "metric.log" -BENCHMARK_RUN_LOG_FILE_NAME = "benchmark_run.log" -_DATE_TIME_FORMAT_PATTERN = "%Y-%m-%dT%H:%M:%S.%fZ" -GCP_TEST_ENV = "GCP" -RUN_STATUS_SUCCESS = "success" -RUN_STATUS_FAILURE = "failure" -RUN_STATUS_RUNNING = "running" - - -FLAGS = flags.FLAGS - -# Don't use it directly. Use get_benchmark_logger to access a logger. -_benchmark_logger = None -_logger_lock = threading.Lock() - - -def config_benchmark_logger(flag_obj=None): - """Config the global benchmark logger.""" - _logger_lock.acquire() - try: - global _benchmark_logger - if not flag_obj: - flag_obj = FLAGS - - if (not hasattr(flag_obj, "benchmark_logger_type") or - flag_obj.benchmark_logger_type == "BaseBenchmarkLogger"): - _benchmark_logger = BaseBenchmarkLogger() - elif flag_obj.benchmark_logger_type == "BenchmarkFileLogger": - _benchmark_logger = BenchmarkFileLogger(flag_obj.benchmark_log_dir) - else: - raise ValueError("Unrecognized benchmark_logger_type: %s" - % flag_obj.benchmark_logger_type) - - finally: - _logger_lock.release() - return _benchmark_logger - - -def get_benchmark_logger(): - if not _benchmark_logger: - config_benchmark_logger() - return _benchmark_logger - - -@contextlib.contextmanager -def benchmark_context(flag_obj): - """Context of benchmark, which will update status of the run accordingly.""" - benchmark_logger = config_benchmark_logger(flag_obj) - try: - yield - benchmark_logger.on_finish(RUN_STATUS_SUCCESS) - except Exception: # pylint: disable=broad-except - # Catch all the exception, update the run status to be failure, and re-raise - benchmark_logger.on_finish(RUN_STATUS_FAILURE) - raise - - -class BaseBenchmarkLogger(object): - """Class to log the benchmark information to STDOUT.""" - - def log_evaluation_result(self, eval_results): - """Log the evaluation result. - - The evaluate result is a dictionary that contains metrics defined in - model_fn. It also contains a entry for global_step which contains the value - of the global step when evaluation was performed. - - Args: - eval_results: dict, the result of evaluate. - """ - if not isinstance(eval_results, dict): - logging.warning("eval_results should be dictionary for logging. Got %s", - type(eval_results)) - return - global_step = eval_results[tf.compat.v1.GraphKeys.GLOBAL_STEP] - for key in sorted(eval_results): - if key != tf.compat.v1.GraphKeys.GLOBAL_STEP: - self.log_metric(key, eval_results[key], global_step=global_step) - - def log_metric(self, name, value, unit=None, global_step=None, extras=None): - """Log the benchmark metric information to local file. - - Currently the logging is done in a synchronized way. This should be updated - to log asynchronously. - - Args: - name: string, the name of the metric to log. - value: number, the value of the metric. The value will not be logged if it - is not a number type. - unit: string, the unit of the metric, E.g "image per second". - global_step: int, the global_step when the metric is logged. - extras: map of string:string, the extra information about the metric. - """ - metric = _process_metric_to_json(name, value, unit, global_step, extras) - if metric: - logging.info("Benchmark metric: %s", metric) - - def log_run_info(self, model_name, dataset_name, run_params, test_id=None): - logging.info( - "Benchmark run: %s", - _gather_run_info(model_name, dataset_name, run_params, test_id)) - - def on_finish(self, status): - pass - - -class BenchmarkFileLogger(BaseBenchmarkLogger): - """Class to log the benchmark information to local disk.""" - - def __init__(self, logging_dir): - super(BenchmarkFileLogger, self).__init__() - self._logging_dir = logging_dir - if not tf.io.gfile.isdir(self._logging_dir): - tf.io.gfile.makedirs(self._logging_dir) - self._metric_file_handler = tf.io.gfile.GFile( - os.path.join(self._logging_dir, METRIC_LOG_FILE_NAME), "a") - - def log_metric(self, name, value, unit=None, global_step=None, extras=None): - """Log the benchmark metric information to local file. - - Currently the logging is done in a synchronized way. This should be updated - to log asynchronously. - - Args: - name: string, the name of the metric to log. - value: number, the value of the metric. The value will not be logged if it - is not a number type. - unit: string, the unit of the metric, E.g "image per second". - global_step: int, the global_step when the metric is logged. - extras: map of string:string, the extra information about the metric. - """ - metric = _process_metric_to_json(name, value, unit, global_step, extras) - if metric: - try: - json.dump(metric, self._metric_file_handler) - self._metric_file_handler.write("\n") - self._metric_file_handler.flush() - except (TypeError, ValueError) as e: - logging.warning( - "Failed to dump metric to log file: name %s, value %s, error %s", - name, value, e) - - def log_run_info(self, model_name, dataset_name, run_params, test_id=None): - """Collect most of the TF runtime information for the local env. - - The schema of the run info follows official/benchmark/datastore/schema. - - Args: - model_name: string, the name of the model. - dataset_name: string, the name of dataset for training and evaluation. - run_params: dict, the dictionary of parameters for the run, it could - include hyperparameters or other params that are important for the run. - test_id: string, the unique name of the test run by the combination of key - parameters, eg batch size, num of GPU. It is hardware independent. - """ - run_info = _gather_run_info(model_name, dataset_name, run_params, test_id) - - with tf.io.gfile.GFile(os.path.join( - self._logging_dir, BENCHMARK_RUN_LOG_FILE_NAME), "w") as f: - try: - json.dump(run_info, f) - f.write("\n") - except (TypeError, ValueError) as e: - logging.warning("Failed to dump benchmark run info to log file: %s", e) - - def on_finish(self, status): - self._metric_file_handler.flush() - self._metric_file_handler.close() - - -def _gather_run_info(model_name, dataset_name, run_params, test_id): - """Collect the benchmark run information for the local environment.""" - run_info = { - "model_name": model_name, - "dataset": {"name": dataset_name}, - "machine_config": {}, - "test_id": test_id, - "run_date": datetime.datetime.utcnow().strftime( - _DATE_TIME_FORMAT_PATTERN)} - _collect_tensorflow_info(run_info) - _collect_tensorflow_environment_variables(run_info) - _collect_run_params(run_info, run_params) - _collect_memory_info(run_info) - _collect_test_environment(run_info) - return run_info - - -def _process_metric_to_json( - name, value, unit=None, global_step=None, extras=None): - """Validate the metric data and generate JSON for insert.""" - if not isinstance(value, numbers.Number): - logging.warning("Metric value to log should be a number. Got %s", - type(value)) - return None - - extras = _convert_to_json_dict(extras) - return { - "name": name, - "value": float(value), - "unit": unit, - "global_step": global_step, - "timestamp": datetime.datetime.utcnow().strftime( - _DATE_TIME_FORMAT_PATTERN), - "extras": extras} - - -def _collect_tensorflow_info(run_info): - run_info["tensorflow_version"] = { - "version": tf.version.VERSION, "git_hash": tf.version.GIT_VERSION} - - -def _collect_run_params(run_info, run_params): - """Log the parameter information for the benchmark run.""" - def process_param(name, value): - type_check = { - str: {"name": name, "string_value": value}, - int: {"name": name, "long_value": value}, - bool: {"name": name, "bool_value": str(value)}, - float: {"name": name, "float_value": value}, - } - return type_check.get(type(value), - {"name": name, "string_value": str(value)}) - if run_params: - run_info["run_parameters"] = [ - process_param(k, v) for k, v in sorted(run_params.items())] - - -def _collect_tensorflow_environment_variables(run_info): - run_info["tensorflow_environment_variables"] = [ - {"name": k, "value": v} - for k, v in sorted(os.environ.items()) if k.startswith("TF_")] - - -def _collect_memory_info(run_info): - try: - # Note: psutil is not installed in the TensorFlow OSS tree. - # It is installable via pip. - import psutil # pylint: disable=g-import-not-at-top - vmem = psutil.virtual_memory() - run_info["machine_config"]["memory_total"] = vmem.total - run_info["machine_config"]["memory_available"] = vmem.available - except ImportError: - logging.warn("'psutil' not imported. Memory info will not be logged.") - - -def _collect_test_environment(run_info): - """Detect the local environment, eg GCE, AWS or DGX, etc.""" - if cloud_lib.on_gcp(): - run_info["test_environment"] = GCP_TEST_ENV - # TODO(scottzhu): Add more testing env detection for other platform - - -def _parse_gpu_model(physical_device_desc): - # Assume all the GPU connected are same model - for kv in physical_device_desc.split(","): - k, _, v = kv.partition(":") - if k.strip() == "name": - return v.strip() - return None - - -def _convert_to_json_dict(input_dict): - if input_dict: - return [{"name": k, "value": v} for k, v in sorted(input_dict.items())] - else: - return [] diff --git a/official/r1/utils/logs/logger_test.py b/official/r1/utils/logs/logger_test.py deleted file mode 100644 index 96a8802d5..000000000 --- a/official/r1/utils/logs/logger_test.py +++ /dev/null @@ -1,253 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -"""Tests for benchmark logger.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import json -import os -import tempfile -import time -import unittest - -from absl import logging -from absl.testing import flagsaver -import tensorflow as tf - -from official.r1.utils.logs import logger -from official.utils.flags import core as flags_core -from official.utils.misc import keras_utils - - -class BenchmarkLoggerTest(tf.test.TestCase): - - @classmethod - def setUpClass(cls): # pylint: disable=invalid-name - super(BenchmarkLoggerTest, cls).setUpClass() - flags_core.define_benchmark() - - def test_get_default_benchmark_logger(self): - with flagsaver.flagsaver(benchmark_logger_type="foo"): - self.assertIsInstance(logger.get_benchmark_logger(), - logger.BaseBenchmarkLogger) - - def test_config_base_benchmark_logger(self): - with flagsaver.flagsaver(benchmark_logger_type="BaseBenchmarkLogger"): - logger.config_benchmark_logger() - self.assertIsInstance(logger.get_benchmark_logger(), - logger.BaseBenchmarkLogger) - - def test_config_benchmark_file_logger(self): - # Set the benchmark_log_dir first since the benchmark_logger_type will need - # the value to be set when it does the validation. - with flagsaver.flagsaver(benchmark_log_dir="/tmp"): - with flagsaver.flagsaver(benchmark_logger_type="BenchmarkFileLogger"): - logger.config_benchmark_logger() - self.assertIsInstance(logger.get_benchmark_logger(), - logger.BenchmarkFileLogger) - - -class BaseBenchmarkLoggerTest(tf.test.TestCase): - - def setUp(self): - super(BaseBenchmarkLoggerTest, self).setUp() - self._actual_log = logging.info - self.logged_message = None - - def mock_log(*args, **kwargs): - self.logged_message = args - self._actual_log(*args, **kwargs) - - logging.info = mock_log - - def tearDown(self): - super(BaseBenchmarkLoggerTest, self).tearDown() - logging.info = self._actual_log - - def test_log_metric(self): - log = logger.BaseBenchmarkLogger() - log.log_metric("accuracy", 0.999, global_step=1e4, extras={"name": "value"}) - - expected_log_prefix = "Benchmark metric:" - self.assertRegexpMatches(str(self.logged_message), expected_log_prefix) - - -class BenchmarkFileLoggerTest(tf.test.TestCase): - - def setUp(self): - super(BenchmarkFileLoggerTest, self).setUp() - # Avoid pulling extra env vars from test environment which affects the test - # result, eg. Kokoro test has a TF_PKG env which affect the test case - # test_collect_tensorflow_environment_variables() - self.original_environ = dict(os.environ) - os.environ.clear() - - def tearDown(self): - super(BenchmarkFileLoggerTest, self).tearDown() - tf.io.gfile.rmtree(self.get_temp_dir()) - os.environ.clear() - os.environ.update(self.original_environ) - - def test_create_logging_dir(self): - non_exist_temp_dir = os.path.join(self.get_temp_dir(), "unknown_dir") - self.assertFalse(tf.io.gfile.isdir(non_exist_temp_dir)) - - logger.BenchmarkFileLogger(non_exist_temp_dir) - self.assertTrue(tf.io.gfile.isdir(non_exist_temp_dir)) - - def test_log_metric(self): - log_dir = tempfile.mkdtemp(dir=self.get_temp_dir()) - log = logger.BenchmarkFileLogger(log_dir) - log.log_metric("accuracy", 0.999, global_step=1e4, extras={"name": "value"}) - - metric_log = os.path.join(log_dir, "metric.log") - self.assertTrue(tf.io.gfile.exists(metric_log)) - with tf.io.gfile.GFile(metric_log) as f: - metric = json.loads(f.readline()) - self.assertEqual(metric["name"], "accuracy") - self.assertEqual(metric["value"], 0.999) - self.assertEqual(metric["unit"], None) - self.assertEqual(metric["global_step"], 1e4) - self.assertEqual(metric["extras"], [{"name": "name", "value": "value"}]) - - def test_log_multiple_metrics(self): - log_dir = tempfile.mkdtemp(dir=self.get_temp_dir()) - log = logger.BenchmarkFileLogger(log_dir) - log.log_metric("accuracy", 0.999, global_step=1e4, extras={"name": "value"}) - log.log_metric("loss", 0.02, global_step=1e4) - - metric_log = os.path.join(log_dir, "metric.log") - self.assertTrue(tf.io.gfile.exists(metric_log)) - with tf.io.gfile.GFile(metric_log) as f: - accuracy = json.loads(f.readline()) - self.assertEqual(accuracy["name"], "accuracy") - self.assertEqual(accuracy["value"], 0.999) - self.assertEqual(accuracy["unit"], None) - self.assertEqual(accuracy["global_step"], 1e4) - self.assertEqual(accuracy["extras"], [{"name": "name", "value": "value"}]) - - loss = json.loads(f.readline()) - self.assertEqual(loss["name"], "loss") - self.assertEqual(loss["value"], 0.02) - self.assertEqual(loss["unit"], None) - self.assertEqual(loss["global_step"], 1e4) - self.assertEqual(loss["extras"], []) - - def test_log_non_number_value(self): - log_dir = tempfile.mkdtemp(dir=self.get_temp_dir()) - log = logger.BenchmarkFileLogger(log_dir) - const = tf.constant(1) - log.log_metric("accuracy", const) - - metric_log = os.path.join(log_dir, "metric.log") - self.assertFalse(tf.io.gfile.exists(metric_log)) - - def test_log_evaluation_result(self): - eval_result = {"loss": 0.46237424, - "global_step": 207082, - "accuracy": 0.9285} - log_dir = tempfile.mkdtemp(dir=self.get_temp_dir()) - log = logger.BenchmarkFileLogger(log_dir) - log.log_evaluation_result(eval_result) - - metric_log = os.path.join(log_dir, "metric.log") - self.assertTrue(tf.io.gfile.exists(metric_log)) - with tf.io.gfile.GFile(metric_log) as f: - accuracy = json.loads(f.readline()) - self.assertEqual(accuracy["name"], "accuracy") - self.assertEqual(accuracy["value"], 0.9285) - self.assertEqual(accuracy["unit"], None) - self.assertEqual(accuracy["global_step"], 207082) - - loss = json.loads(f.readline()) - self.assertEqual(loss["name"], "loss") - self.assertEqual(loss["value"], 0.46237424) - self.assertEqual(loss["unit"], None) - self.assertEqual(loss["global_step"], 207082) - - def test_log_evaluation_result_with_invalid_type(self): - eval_result = "{'loss': 0.46237424, 'global_step': 207082}" - log_dir = tempfile.mkdtemp(dir=self.get_temp_dir()) - log = logger.BenchmarkFileLogger(log_dir) - log.log_evaluation_result(eval_result) - - metric_log = os.path.join(log_dir, "metric.log") - self.assertFalse(tf.io.gfile.exists(metric_log)) - - def test_collect_tensorflow_info(self): - run_info = {} - logger._collect_tensorflow_info(run_info) - self.assertNotEqual(run_info["tensorflow_version"], {}) - self.assertEqual(run_info["tensorflow_version"]["version"], - tf.version.VERSION) - self.assertEqual(run_info["tensorflow_version"]["git_hash"], - tf.version.GIT_VERSION) - - def test_collect_run_params(self): - run_info = {} - run_parameters = { - "batch_size": 32, - "synthetic_data": True, - "train_epochs": 100.00, - "dtype": "fp16", - "resnet_size": 50, - "random_tensor": tf.constant(2.0) - } - logger._collect_run_params(run_info, run_parameters) - self.assertEqual(len(run_info["run_parameters"]), 6) - self.assertEqual(run_info["run_parameters"][0], - {"name": "batch_size", "long_value": 32}) - self.assertEqual(run_info["run_parameters"][1], - {"name": "dtype", "string_value": "fp16"}) - v1_tensor = {"name": "random_tensor", "string_value": - "Tensor(\"Const:0\", shape=(), dtype=float32)"} - v2_tensor = {"name": "random_tensor", "string_value": - "tf.Tensor(2.0, shape=(), dtype=float32)"} - self.assertIn(run_info["run_parameters"][2], [v1_tensor, v2_tensor]) - - - self.assertEqual(run_info["run_parameters"][3], - {"name": "resnet_size", "long_value": 50}) - self.assertEqual(run_info["run_parameters"][4], - {"name": "synthetic_data", "bool_value": "True"}) - self.assertEqual(run_info["run_parameters"][5], - {"name": "train_epochs", "float_value": 100.00}) - - def test_collect_tensorflow_environment_variables(self): - os.environ["TF_ENABLE_WINOGRAD_NONFUSED"] = "1" - os.environ["TF_OTHER"] = "2" - os.environ["OTHER"] = "3" - - run_info = {} - logger._collect_tensorflow_environment_variables(run_info) - self.assertIsNotNone(run_info["tensorflow_environment_variables"]) - expected_tf_envs = [ - {"name": "TF_ENABLE_WINOGRAD_NONFUSED", "value": "1"}, - {"name": "TF_OTHER", "value": "2"}, - ] - self.assertEqual(run_info["tensorflow_environment_variables"], - expected_tf_envs) - - def test_collect_memory_info(self): - run_info = {"machine_config": {}} - logger._collect_memory_info(run_info) - self.assertIsNotNone(run_info["machine_config"]["memory_total"]) - self.assertIsNotNone(run_info["machine_config"]["memory_available"]) - - -if __name__ == "__main__": - tf.test.main() diff --git a/official/r1/utils/logs/metric_hook.py b/official/r1/utils/logs/metric_hook.py deleted file mode 100644 index f408e3e95..000000000 --- a/official/r1/utils/logs/metric_hook.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Session hook for logging benchmark metric.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import tensorflow as tf # pylint: disable=g-bad-import-order - - -class LoggingMetricHook(tf.estimator.LoggingTensorHook): - """Hook to log benchmark metric information. - - This hook is very similar as tf.train.LoggingTensorHook, which logs given - tensors every N local steps, every N seconds, or at the end. The metric - information will be logged to given log_dir or via metric_logger in JSON - format, which can be consumed by data analysis pipeline later. - - Note that if `at_end` is True, `tensors` should not include any tensor - whose evaluation produces a side effect such as consuming additional inputs. - """ - - def __init__(self, tensors, metric_logger=None, - every_n_iter=None, every_n_secs=None, at_end=False): - """Initializer for LoggingMetricHook. - - Args: - tensors: `dict` that maps string-valued tags to tensors/tensor names, - or `iterable` of tensors/tensor names. - metric_logger: instance of `BenchmarkLogger`, the benchmark logger that - hook should use to write the log. - every_n_iter: `int`, print the values of `tensors` once every N local - steps taken on the current worker. - every_n_secs: `int` or `float`, print the values of `tensors` once every N - seconds. Exactly one of `every_n_iter` and `every_n_secs` should be - provided. - at_end: `bool` specifying whether to print the values of `tensors` at the - end of the run. - - Raises: - ValueError: - 1. `every_n_iter` is non-positive, or - 2. Exactly one of every_n_iter and every_n_secs should be provided. - 3. Exactly one of log_dir and metric_logger should be provided. - """ - super(LoggingMetricHook, self).__init__( - tensors=tensors, - every_n_iter=every_n_iter, - every_n_secs=every_n_secs, - at_end=at_end) - - if metric_logger is None: - raise ValueError("metric_logger should be provided.") - self._logger = metric_logger - - def begin(self): - super(LoggingMetricHook, self).begin() - self._global_step_tensor = tf.compat.v1.train.get_global_step() - if self._global_step_tensor is None: - raise RuntimeError( - "Global step should be created to use LoggingMetricHook.") - if self._global_step_tensor.name not in self._current_tensors: - self._current_tensors[self._global_step_tensor.name] = ( - self._global_step_tensor) - - def after_run(self, unused_run_context, run_values): - # should_trigger is a internal state that populated at before_run, and it is - # using self_timer to determine whether it should trigger. - if self._should_trigger: - self._log_metric(run_values.results) - - self._iter_count += 1 - - def end(self, session): - if self._log_at_end: - values = session.run(self._current_tensors) - self._log_metric(values) - - def _log_metric(self, tensor_values): - self._timer.update_last_triggered_step(self._iter_count) - global_step = tensor_values[self._global_step_tensor.name] - # self._tag_order is populated during the init of LoggingTensorHook - for tag in self._tag_order: - self._logger.log_metric(tag, tensor_values[tag], global_step=global_step) diff --git a/official/r1/utils/logs/metric_hook_test.py b/official/r1/utils/logs/metric_hook_test.py deleted file mode 100644 index eba93014c..000000000 --- a/official/r1/utils/logs/metric_hook_test.py +++ /dev/null @@ -1,217 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Tests for metric_hook.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import tempfile -import time - -import tensorflow as tf # pylint: disable=g-bad-import-order -from tensorflow.python.training import monitored_session # pylint: disable=g-bad-import-order - -from official.r1.utils.logs import metric_hook -from official.r1.utils.logs import mock_lib - - -class LoggingMetricHookTest(tf.test.TestCase): - """Tests for LoggingMetricHook.""" - - def setUp(self): - super(LoggingMetricHookTest, self).setUp() - - self._log_dir = tempfile.mkdtemp(dir=self.get_temp_dir()) - self._logger = mock_lib.MockBenchmarkLogger() - - def tearDown(self): - super(LoggingMetricHookTest, self).tearDown() - tf.io.gfile.rmtree(self.get_temp_dir()) - - def test_illegal_args(self): - with self.assertRaisesRegexp(ValueError, "nvalid every_n_iter"): - metric_hook.LoggingMetricHook(tensors=["t"], every_n_iter=0) - with self.assertRaisesRegexp(ValueError, "nvalid every_n_iter"): - metric_hook.LoggingMetricHook(tensors=["t"], every_n_iter=-10) - with self.assertRaisesRegexp(ValueError, "xactly one of"): - metric_hook.LoggingMetricHook( - tensors=["t"], every_n_iter=5, every_n_secs=5) - with self.assertRaisesRegexp(ValueError, "xactly one of"): - metric_hook.LoggingMetricHook(tensors=["t"]) - with self.assertRaisesRegexp(ValueError, "metric_logger"): - metric_hook.LoggingMetricHook(tensors=["t"], every_n_iter=5) - - def test_print_at_end_only(self): - with tf.Graph().as_default(), tf.compat.v1.Session() as sess: - tf.compat.v1.train.get_or_create_global_step() - t = tf.constant(42.0, name="foo") - train_op = tf.constant(3) - hook = metric_hook.LoggingMetricHook( - tensors=[t.name], at_end=True, metric_logger=self._logger) - hook.begin() - mon_sess = monitored_session._HookedSession(sess, [hook]) # pylint: disable=protected-access - sess.run(tf.compat.v1.global_variables_initializer()) - - for _ in range(3): - mon_sess.run(train_op) - self.assertEqual(self._logger.logged_metric, []) - - hook.end(sess) - self.assertEqual(len(self._logger.logged_metric), 1) - metric = self._logger.logged_metric[0] - self.assertRegexpMatches(metric["name"], "foo") - self.assertEqual(metric["value"], 42.0) - self.assertEqual(metric["unit"], None) - self.assertEqual(metric["global_step"], 0) - - def test_global_step_not_found(self): - with tf.Graph().as_default(): - t = tf.constant(42.0, name="foo") - hook = metric_hook.LoggingMetricHook( - tensors=[t.name], at_end=True, metric_logger=self._logger) - - with self.assertRaisesRegexp( - RuntimeError, "should be created to use LoggingMetricHook."): - hook.begin() - - def test_log_tensors(self): - with tf.Graph().as_default(), tf.compat.v1.Session() as sess: - tf.compat.v1.train.get_or_create_global_step() - t1 = tf.constant(42.0, name="foo") - t2 = tf.constant(43.0, name="bar") - train_op = tf.constant(3) - hook = metric_hook.LoggingMetricHook( - tensors=[t1, t2], at_end=True, metric_logger=self._logger) - hook.begin() - mon_sess = monitored_session._HookedSession(sess, [hook]) # pylint: disable=protected-access - sess.run(tf.compat.v1.global_variables_initializer()) - - for _ in range(3): - mon_sess.run(train_op) - self.assertEqual(self._logger.logged_metric, []) - - hook.end(sess) - self.assertEqual(len(self._logger.logged_metric), 2) - metric1 = self._logger.logged_metric[0] - self.assertRegexpMatches(str(metric1["name"]), "foo") - self.assertEqual(metric1["value"], 42.0) - self.assertEqual(metric1["unit"], None) - self.assertEqual(metric1["global_step"], 0) - - metric2 = self._logger.logged_metric[1] - self.assertRegexpMatches(str(metric2["name"]), "bar") - self.assertEqual(metric2["value"], 43.0) - self.assertEqual(metric2["unit"], None) - self.assertEqual(metric2["global_step"], 0) - - def _validate_print_every_n_steps(self, sess, at_end): - t = tf.constant(42.0, name="foo") - - train_op = tf.constant(3) - hook = metric_hook.LoggingMetricHook( - tensors=[t.name], every_n_iter=10, at_end=at_end, - metric_logger=self._logger) - hook.begin() - mon_sess = monitored_session._HookedSession(sess, [hook]) # pylint: disable=protected-access - sess.run(tf.compat.v1.global_variables_initializer()) - mon_sess.run(train_op) - self.assertRegexpMatches(str(self._logger.logged_metric), t.name) - for _ in range(3): - self._logger.logged_metric = [] - for _ in range(9): - mon_sess.run(train_op) - # assertNotRegexpMatches is not supported by python 3.1 and later - self.assertEqual(str(self._logger.logged_metric).find(t.name), -1) - mon_sess.run(train_op) - self.assertRegexpMatches(str(self._logger.logged_metric), t.name) - - # Add additional run to verify proper reset when called multiple times. - self._logger.logged_metric = [] - mon_sess.run(train_op) - # assertNotRegexpMatches is not supported by python 3.1 and later - self.assertEqual(str(self._logger.logged_metric).find(t.name), -1) - - self._logger.logged_metric = [] - hook.end(sess) - if at_end: - self.assertRegexpMatches(str(self._logger.logged_metric), t.name) - else: - # assertNotRegexpMatches is not supported by python 3.1 and later - self.assertEqual(str(self._logger.logged_metric).find(t.name), -1) - - def test_print_every_n_steps(self): - with tf.Graph().as_default(), tf.compat.v1.Session() as sess: - tf.compat.v1.train.get_or_create_global_step() - self._validate_print_every_n_steps(sess, at_end=False) - # Verify proper reset. - self._validate_print_every_n_steps(sess, at_end=False) - - def test_print_every_n_steps_and_end(self): - with tf.Graph().as_default(), tf.compat.v1.Session() as sess: - tf.compat.v1.train.get_or_create_global_step() - self._validate_print_every_n_steps(sess, at_end=True) - # Verify proper reset. - self._validate_print_every_n_steps(sess, at_end=True) - - def _validate_print_every_n_secs(self, sess, at_end): - t = tf.constant(42.0, name="foo") - train_op = tf.constant(3) - - hook = metric_hook.LoggingMetricHook( - tensors=[t.name], every_n_secs=1.0, at_end=at_end, - metric_logger=self._logger) - hook.begin() - mon_sess = monitored_session._HookedSession(sess, [hook]) # pylint: disable=protected-access - sess.run(tf.compat.v1.global_variables_initializer()) - - mon_sess.run(train_op) - self.assertRegexpMatches(str(self._logger.logged_metric), t.name) - - # assertNotRegexpMatches is not supported by python 3.1 and later - self._logger.logged_metric = [] - mon_sess.run(train_op) - self.assertEqual(str(self._logger.logged_metric).find(t.name), -1) - time.sleep(1.0) - - self._logger.logged_metric = [] - mon_sess.run(train_op) - self.assertRegexpMatches(str(self._logger.logged_metric), t.name) - - self._logger.logged_metric = [] - hook.end(sess) - if at_end: - self.assertRegexpMatches(str(self._logger.logged_metric), t.name) - else: - # assertNotRegexpMatches is not supported by python 3.1 and later - self.assertEqual(str(self._logger.logged_metric).find(t.name), -1) - - def test_print_every_n_secs(self): - with tf.Graph().as_default(), tf.compat.v1.Session() as sess: - tf.compat.v1.train.get_or_create_global_step() - self._validate_print_every_n_secs(sess, at_end=False) - # Verify proper reset. - self._validate_print_every_n_secs(sess, at_end=False) - - def test_print_every_n_secs_and_end(self): - with tf.Graph().as_default(), tf.compat.v1.Session() as sess: - tf.compat.v1.train.get_or_create_global_step() - self._validate_print_every_n_secs(sess, at_end=True) - # Verify proper reset. - self._validate_print_every_n_secs(sess, at_end=True) - - -if __name__ == "__main__": - tf.test.main() diff --git a/official/r1/utils/logs/mlperf_helper.py b/official/r1/utils/logs/mlperf_helper.py deleted file mode 100644 index 3f9601b6b..000000000 --- a/official/r1/utils/logs/mlperf_helper.py +++ /dev/null @@ -1,192 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -"""Wrapper for the mlperf logging utils. - -MLPerf compliance logging is only desired under a limited set of circumstances. -This module is intended to keep users from needing to consider logging (or -install the module) unless they are performing mlperf runs. -""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from collections import namedtuple -import json -import os -import re -import subprocess -import sys -from absl import logging -import typing -# pylint:disable=logging-format-interpolation - - -_MIN_VERSION = (0, 0, 10) -_STACK_OFFSET = 2 - -SUDO = "sudo" if os.geteuid() else "" - -# This indirection is used in docker. -DROP_CACHE_LOC = os.getenv("DROP_CACHE_LOC", "/proc/sys/vm/drop_caches") - -_NCF_PREFIX = "NCF_RAW_" - -# TODO(robieta): move line parsing to mlperf util -_PREFIX = r"(?:{})?:::MLPv([0-9]+).([0-9]+).([0-9]+)".format(_NCF_PREFIX) -_BENCHMARK = r"([a-zA-Z0-9_]+)" -_TIMESTAMP = r"([0-9]+\.[0-9]+)" -_CALLSITE = r"\((.+):([0-9]+)\)" -_TAG = r"([a-zA-Z0-9_]+)" -_VALUE = r"(.*)" - -ParsedLine = namedtuple("ParsedLine", ["version", "benchmark", "timestamp", - "callsite", "tag", "value"]) - -LINE_PATTERN = re.compile( - "^{prefix} {benchmark} {timestamp} {callsite} {tag}(: |$){value}?$".format( - prefix=_PREFIX, benchmark=_BENCHMARK, timestamp=_TIMESTAMP, - callsite=_CALLSITE, tag=_TAG, value=_VALUE)) - - -def parse_line(line): # type: (str) -> typing.Optional[ParsedLine] - match = LINE_PATTERN.match(line.strip()) - if not match: - return - - major, minor, micro, benchmark, timestamp = match.groups()[:5] - call_file, call_line, tag, _, value = match.groups()[5:] - - return ParsedLine(version=(int(major), int(minor), int(micro)), - benchmark=benchmark, timestamp=timestamp, - callsite=(call_file, call_line), tag=tag, value=value) - - -def unparse_line(parsed_line): # type: (ParsedLine) -> str - version_str = "{}.{}.{}".format(*parsed_line.version) - callsite_str = "({}:{})".format(*parsed_line.callsite) - value_str = ": {}".format(parsed_line.value) if parsed_line.value else "" - return ":::MLPv{} {} {} {} {} {}".format( - version_str, parsed_line.benchmark, parsed_line.timestamp, callsite_str, - parsed_line.tag, value_str) - - -def get_mlperf_log(): - """Shielded import of mlperf_log module.""" - try: - import mlperf_compliance - - def test_mlperf_log_pip_version(): - """Check that mlperf_compliance is up to date.""" - import pkg_resources - version = pkg_resources.get_distribution("mlperf_compliance") - version = tuple(int(i) for i in version.version.split(".")) - if version < _MIN_VERSION: - logging.warning("mlperf_compliance is version {}, must be >= {}".format( - ".".join([str(i) for i in version]), - ".".join([str(i) for i in _MIN_VERSION]))) - raise ImportError - return mlperf_compliance.mlperf_log - - mlperf_log = test_mlperf_log_pip_version() - - except ImportError: - mlperf_log = None - - return mlperf_log - - -class Logger(object): - """MLPerf logger indirection class. - - This logger only logs for MLPerf runs, and prevents various errors associated - with not having the mlperf_compliance package installed. - """ - class Tags(object): - def __init__(self, mlperf_log): - self._enabled = False - self._mlperf_log = mlperf_log - - def __getattr__(self, item): - if self._mlperf_log is None or not self._enabled: - return - return getattr(self._mlperf_log, item) - - def __init__(self): - self._enabled = False - self._mlperf_log = get_mlperf_log() - self.tags = self.Tags(self._mlperf_log) - - def __call__(self, enable=False): - if enable and self._mlperf_log is None: - raise ImportError("MLPerf logging was requested, but mlperf_compliance " - "module could not be loaded.") - - self._enabled = enable - self.tags._enabled = enable - return self - - def __enter__(self): - pass - - def __exit__(self, exc_type, exc_val, exc_tb): - self._enabled = False - self.tags._enabled = False - - @property - def log_file(self): - if self._mlperf_log is None: - return - return self._mlperf_log.LOG_FILE - - @property - def enabled(self): - return self._enabled - - def ncf_print(self, key, value=None, stack_offset=_STACK_OFFSET, - deferred=False, extra_print=False, prefix=_NCF_PREFIX): - if self._mlperf_log is None or not self.enabled: - return - self._mlperf_log.ncf_print(key=key, value=value, stack_offset=stack_offset, - deferred=deferred, extra_print=extra_print, - prefix=prefix) - - def set_ncf_root(self, path): - if self._mlperf_log is None: - return - self._mlperf_log.ROOT_DIR_NCF = path - - -LOGGER = Logger() -ncf_print, set_ncf_root = LOGGER.ncf_print, LOGGER.set_ncf_root -TAGS = LOGGER.tags - - -def clear_system_caches(): - if not LOGGER.enabled: - return - ret_code = subprocess.call( - ["sync && echo 3 | {} tee {}".format(SUDO, DROP_CACHE_LOC)], - shell=True) - - if ret_code: - raise ValueError("Failed to clear caches") - - -if __name__ == "__main__": - logging.set_verbosity(logging.INFO) - with LOGGER(True): - ncf_print(key=TAGS.RUN_START) diff --git a/official/r1/utils/logs/mock_lib.py b/official/r1/utils/logs/mock_lib.py deleted file mode 100644 index ee4de3c48..000000000 --- a/official/r1/utils/logs/mock_lib.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -"""Mock objects and related functions for testing.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - - -class MockBenchmarkLogger(object): - """This is a mock logger that can be used in dependent tests.""" - - def __init__(self): - self.logged_metric = [] - - def log_metric(self, name, value, unit=None, global_step=None, - extras=None): - self.logged_metric.append({ - "name": name, - "value": float(value), - "unit": unit, - "global_step": global_step, - "extras": extras}) diff --git a/official/r1/utils/tpu.py b/official/r1/utils/tpu.py deleted file mode 100644 index 737a79422..000000000 --- a/official/r1/utils/tpu.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Functions specific to running TensorFlow on TPUs.""" - -import tensorflow as tf - - -# "local" is a magic word in the TPU cluster resolver; it informs the resolver -# to use the local CPU as the compute device. This is useful for testing and -# debugging; the code flow is ostensibly identical, but without the need to -# actually have a TPU on the other end. -LOCAL = "local" - - -def construct_scalar_host_call(metric_dict, model_dir, prefix=""): - """Construct a host call to log scalars when training on TPU. - - Args: - metric_dict: A dict of the tensors to be logged. - model_dir: The location to write the summary. - prefix: The prefix (if any) to prepend to the metric names. - - Returns: - A tuple of (function, args_to_be_passed_to_said_function) - """ - # type: (dict, str) -> (function, list) - metric_names = list(metric_dict.keys()) - - def host_call_fn(global_step, *args): - """Training host call. Creates scalar summaries for training metrics. - - This function is executed on the CPU and should not directly reference - any Tensors in the rest of the `model_fn`. To pass Tensors from the - model to the `metric_fn`, provide as part of the `host_call`. See - https://www.tensorflow.org/api_docs/python/tf/contrib/tpu/TPUEstimatorSpec - for more information. - - Arguments should match the list of `Tensor` objects passed as the second - element in the tuple passed to `host_call`. - - Args: - global_step: `Tensor with shape `[batch]` for the global_step - *args: Remaining tensors to log. - - Returns: - List of summary ops to run on the CPU host. - """ - step = global_step[0] - with tf.compat.v1.summary.create_file_writer( - logdir=model_dir, filename_suffix=".host_call").as_default(): - with tf.compat.v1.summary.always_record_summaries(): - for i, name in enumerate(metric_names): - tf.compat.v1.summary.scalar(prefix + name, args[i][0], step=step) - - return tf.compat.v1.summary.all_summary_ops() - - # To log the current learning rate, and gradient norm for Tensorboard, the - # summary op needs to be run on the host CPU via host_call. host_call - # expects [batch_size, ...] Tensors, thus reshape to introduce a batch - # dimension. These Tensors are implicitly concatenated to - # [params['batch_size']]. - global_step_tensor = tf.reshape( - tf.compat.v1.train.get_or_create_global_step(), [1]) - other_tensors = [tf.reshape(metric_dict[key], [1]) for key in metric_names] - - return host_call_fn, [global_step_tensor] + other_tensors - - -def embedding_matmul(embedding_table, values, mask, name="embedding_matmul"): - """Performs embedding lookup via a matmul. - - The matrix to be multiplied by the embedding table Tensor is constructed - via an implementation of scatter based on broadcasting embedding indices - and performing an equality comparison against a broadcasted - range(num_embedding_table_rows). All masked positions will produce an - embedding vector of zeros. - - Args: - embedding_table: Tensor of embedding table. - Rank 2 (table_size x embedding dim) - values: Tensor of embedding indices. Rank 2 (batch x n_indices) - mask: Tensor of mask / weights. Rank 2 (batch x n_indices) - name: Optional name scope for created ops - - Returns: - Rank 3 tensor of embedding vectors. - """ - - with tf.name_scope(name): - n_embeddings = embedding_table.get_shape().as_list()[0] - batch_size, padded_size = values.shape.as_list() - - emb_idcs = tf.tile( - tf.reshape(values, (batch_size, padded_size, 1)), (1, 1, n_embeddings)) - emb_weights = tf.tile( - tf.reshape(mask, (batch_size, padded_size, 1)), (1, 1, n_embeddings)) - col_idcs = tf.tile( - tf.reshape(tf.range(n_embeddings), (1, 1, n_embeddings)), - (batch_size, padded_size, 1)) - one_hot = tf.where( - tf.equal(emb_idcs, col_idcs), emb_weights, - tf.zeros((batch_size, padded_size, n_embeddings))) - - return tf.tensordot(one_hot, embedding_table, 1) diff --git a/official/r1/utils/tpu_test.py b/official/r1/utils/tpu_test.py deleted file mode 100644 index ba5b868a6..000000000 --- a/official/r1/utils/tpu_test.py +++ /dev/null @@ -1,108 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Test TPU optimized matmul embedding.""" - -import numpy as np -import tensorflow as tf - -from official.r1.utils import tpu as tpu_utils - - -TEST_CASES = [ - dict(embedding_dim=256, vocab_size=1000, sequence_length=64, - batch_size=32, seed=54131), - dict(embedding_dim=8, vocab_size=15, sequence_length=12, - batch_size=256, seed=536413), - dict(embedding_dim=2048, vocab_size=512, sequence_length=50, - batch_size=8, seed=35124) -] - - -class TPUBaseTester(tf.test.TestCase): - def construct_embedding_and_values(self, embedding_dim, vocab_size, - sequence_length, batch_size, seed): - np.random.seed(seed) - - embeddings = np.random.random(size=(vocab_size, embedding_dim)) - embedding_table = tf.convert_to_tensor(value=embeddings, dtype=tf.float32) - - tokens = np.random.randint(low=1, high=vocab_size-1, - size=(batch_size, sequence_length)) - for i in range(batch_size): - tokens[i, np.random.randint(low=0, high=sequence_length-1):] = 0 - values = tf.convert_to_tensor(value=tokens, dtype=tf.int32) - mask = tf.cast(tf.not_equal(values, 0), dtype=tf.float32) - return embedding_table, values, mask - - def _test_embedding(self, embedding_dim, vocab_size, - sequence_length, batch_size, seed): - """Test that matmul embedding matches embedding lookup (gather).""" - - with self.test_session(): - embedding_table, values, mask = self.construct_embedding_and_values( - embedding_dim=embedding_dim, - vocab_size=vocab_size, - sequence_length=sequence_length, - batch_size=batch_size, - seed=seed - ) - - embedding = (tf.nn.embedding_lookup(params=embedding_table, ids=values) * - tf.expand_dims(mask, -1)) - - matmul_embedding = tpu_utils.embedding_matmul( - embedding_table=embedding_table, values=values, mask=mask) - - self.assertAllClose(embedding, matmul_embedding) - - def _test_masking(self, embedding_dim, vocab_size, - sequence_length, batch_size, seed): - """Test that matmul embedding properly zeros masked positions.""" - with self.test_session(): - embedding_table, values, mask = self.construct_embedding_and_values( - embedding_dim=embedding_dim, - vocab_size=vocab_size, - sequence_length=sequence_length, - batch_size=batch_size, - seed=seed - ) - - matmul_embedding = tpu_utils.embedding_matmul( - embedding_table=embedding_table, values=values, mask=mask) - - self.assertAllClose(matmul_embedding, - matmul_embedding * tf.expand_dims(mask, -1)) - - def test_embedding_0(self): - self._test_embedding(**TEST_CASES[0]) - - def test_embedding_1(self): - self._test_embedding(**TEST_CASES[1]) - - def test_embedding_2(self): - self._test_embedding(**TEST_CASES[2]) - - def test_masking_0(self): - self._test_masking(**TEST_CASES[0]) - - def test_masking_1(self): - self._test_masking(**TEST_CASES[1]) - - def test_masking_2(self): - self._test_masking(**TEST_CASES[2]) - - -if __name__ == "__main__": - tf.test.main() diff --git a/official/r1/wide_deep/README.md b/official/r1/wide_deep/README.md deleted file mode 100644 index 6598d8955..000000000 --- a/official/r1/wide_deep/README.md +++ /dev/null @@ -1,102 +0,0 @@ -![No Maintenance Intended](https://img.shields.io/badge/No%20Maintenance%20Intended-%E2%9C%95-red.svg) -![TensorFlow Requirement: 1.x](https://img.shields.io/badge/TensorFlow%20Requirement-1.x-brightgreen) -![TensorFlow 2 Not Supported](https://img.shields.io/badge/TensorFlow%202%20Not%20Supported-%E2%9C%95-red.svg) - -# Predicting Income with the Census Income Dataset - -The implementation is based on TensorFlow 1.x. - -## Overview -The [Census Income Data Set](https://archive.ics.uci.edu/ml/datasets/Census+Income) contains over 48,000 samples with attributes including age, occupation, education, and income (a binary label, either `>50K` or `<=50K`). The dataset is split into roughly 32,000 training and 16,000 testing samples. - -Here, we use the [wide and deep model](https://research.googleblog.com/2016/06/wide-deep-learning-better-together-with.html) to predict the income labels. The **wide model** is able to memorize interactions with data with a large number of features but not able to generalize these learned interactions on new data. The **deep model** generalizes well but is unable to learn exceptions within the data. The **wide and deep model** combines the two models and is able to generalize while learning exceptions. - -For the purposes of this example code, the Census Income Data Set was chosen to allow the model to train in a reasonable amount of time. You'll notice that the deep model performs almost as well as the wide and deep model on this dataset. The wide and deep model truly shines on larger data sets with high-cardinality features, where each feature has millions/billions of unique possible values (which is the specialty of the wide model). - -Finally, a key point. As a modeler and developer, think about how this dataset is used and the potential benefits and harm a model's predictions can cause. A model like this could reinforce societal biases and disparities. Is a feature relevant to the problem you want to solve, or will it introduce bias? For more information, read about [ML fairness](https://developers.google.com/machine-learning/fairness-overview/). - ---- - -The code sample in this directory uses the high level `tf.estimator.Estimator` API. This API is great for fast iteration and quickly adapting models to your own datasets without major code overhauls. It allows you to move from single-worker training to distributed training, and it makes it easy to export model binaries for prediction. - -The input function for the `Estimator` uses `tf.contrib.data.TextLineDataset`, which creates a `Dataset` object. The `Dataset` API makes it easy to apply transformations (map, batch, shuffle, etc.) to the data. [Read more here](https://www.tensorflow.org/guide/datasets). - -The `Estimator` and `Dataset` APIs are both highly encouraged for fast development and efficient training. - -## Running the code -First make sure you've [added the models folder to your Python path](/official/#running-the-models); otherwise you may encounter an error like `ImportError: No module named official.wide_deep`. - -### Setup -The [Census Income Data Set](https://archive.ics.uci.edu/ml/datasets/Census+Income) that this sample uses for training is hosted by the [UC Irvine Machine Learning Repository](https://archive.ics.uci.edu/ml/datasets/). We have provided a script that downloads and cleans the necessary files. - -``` -python census_dataset.py -``` - -This will download the files to `/tmp/census_data`. To change the directory, set the `--data_dir` flag. - -### Training -You can run the code locally as follows: - -``` -python census_main.py -``` - -The model is saved to `/tmp/census_model` by default, which can be changed using the `--model_dir` flag. - -To run the *wide* or *deep*-only models, set the `--model_type` flag to `wide` or `deep`. Other flags are configurable as well; see `census_main.py` for details. - -The final accuracy should be over 83% with any of the three model types. - -You can also experiment with `-inter` and `-intra` flag to explore inter/intra op parallelism for potential better performance as follows: - -``` -python census_main.py --inter= --intra= -``` -Please note the above optional inter/intra op does not affect model accuracy. These are TensorFlow framework configurations that only affect execution time. -For more details regarding the above inter/intra flags, please refer to [Optimizing_for_CPU](https://www.tensorflow.org/performance/performance_guide#optimizing_for_cpu) or [TensorFlow config.proto source code](https://github.com/tensorflow/tensorflow/blob/26b4dfa65d360f2793ad75083c797d57f8661b93/tensorflow/core/protobuf/config.proto#L165). - -### TensorBoard - -Run TensorBoard to inspect the details about the graph and training progression. - -``` -tensorboard --logdir=/tmp/census_model -``` - -## Inference with SavedModel -You can export the model into Tensorflow [SavedModel](https://www.tensorflow.org/guide/saved_model) format by using the argument `--export_dir`: - -``` -python census_main.py --export_dir /tmp/wide_deep_saved_model -``` - -After the model finishes training, use [`saved_model_cli`](https://www.tensorflow.org/guide/saved_model#cli_to_inspect_and_execute_savedmodel) to inspect and execute the SavedModel. - -Try the following commands to inspect the SavedModel: - -**Replace `${TIMESTAMP}` with the folder produced (e.g. 1524249124)** -``` -# List possible tag_sets. Only one metagraph is saved, so there will be one option. -saved_model_cli show --dir /tmp/wide_deep_saved_model/${TIMESTAMP}/ - -# Show SignatureDefs for tag_set=serve. SignatureDefs define the outputs to show. -saved_model_cli show --dir /tmp/wide_deep_saved_model/${TIMESTAMP}/ \ - --tag_set serve --all -``` - -### Inference -Let's use the model to predict the income group of two examples: -``` -saved_model_cli run --dir /tmp/wide_deep_saved_model/${TIMESTAMP}/ \ ---tag_set serve --signature_def="predict" \ ---input_examples='examples=[{"age":[46.], "education_num":[10.], "capital_gain":[7688.], "capital_loss":[0.], "hours_per_week":[38.]}, {"age":[24.], "education_num":[13.], "capital_gain":[0.], "capital_loss":[0.], "hours_per_week":[50.]}]' -``` - -This will print out the predicted classes and class probabilities. Class 0 is the <=50k group and 1 is the >50k group. - -## Additional Links - -If you are interested in distributed training, take a look at [Distributed TensorFlow](https://www.tensorflow.org/deploy/distributed). - -You can also [run this model on Cloud ML Engine](https://cloud.google.com/ml-engine/docs/getting-started-training-prediction), which provides [hyperparameter tuning](https://cloud.google.com/ml-engine/docs/getting-started-training-prediction#hyperparameter_tuning) to maximize your model's results and enables [deploying your model for prediction](https://cloud.google.com/ml-engine/docs/getting-started-training-prediction#deploy_a_model_to_support_prediction). diff --git a/official/r1/wide_deep/__init__.py b/official/r1/wide_deep/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/official/r1/wide_deep/census_dataset.py b/official/r1/wide_deep/census_dataset.py deleted file mode 100644 index f3a07ac61..000000000 --- a/official/r1/wide_deep/census_dataset.py +++ /dev/null @@ -1,205 +0,0 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Download and clean the Census Income Dataset.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import os -import sys - -# pylint: disable=wrong-import-order -from absl import app as absl_app -from absl import flags -from six.moves import urllib -from six.moves import zip -import tensorflow.compat.v1 as tf -# pylint: enable=wrong-import-order - -from official.utils.flags import core as flags_core - - -DATA_URL = 'https://archive.ics.uci.edu/ml/machine-learning-databases/adult' -TRAINING_FILE = 'adult.data' -TRAINING_URL = '%s/%s' % (DATA_URL, TRAINING_FILE) -EVAL_FILE = 'adult.test' -EVAL_URL = '%s/%s' % (DATA_URL, EVAL_FILE) - - -_CSV_COLUMNS = [ - 'age', 'workclass', 'fnlwgt', 'education', 'education_num', - 'marital_status', 'occupation', 'relationship', 'race', 'gender', - 'capital_gain', 'capital_loss', 'hours_per_week', 'native_country', - 'income_bracket' -] - -_CSV_COLUMN_DEFAULTS = [[0], [''], [0], [''], [0], [''], [''], [''], [''], [''], - [0], [0], [0], [''], ['']] - -_HASH_BUCKET_SIZE = 1000 - -_NUM_EXAMPLES = { - 'train': 32561, - 'validation': 16281, -} - - -def _download_and_clean_file(filename, url): - """Downloads data from url, and makes changes to match the CSV format.""" - temp_file, _ = urllib.request.urlretrieve(url) - with tf.gfile.Open(temp_file, 'r') as temp_eval_file: - with tf.gfile.Open(filename, 'w') as eval_file: - for line in temp_eval_file: - line = line.strip() - line = line.replace(', ', ',') - if not line or ',' not in line: - continue - if line[-1] == '.': - line = line[:-1] - line += '\n' - eval_file.write(line) - tf.gfile.Remove(temp_file) - - -def download(data_dir): - """Download census data if it is not already present.""" - tf.gfile.MakeDirs(data_dir) - - training_file_path = os.path.join(data_dir, TRAINING_FILE) - if not tf.gfile.Exists(training_file_path): - _download_and_clean_file(training_file_path, TRAINING_URL) - - eval_file_path = os.path.join(data_dir, EVAL_FILE) - if not tf.gfile.Exists(eval_file_path): - _download_and_clean_file(eval_file_path, EVAL_URL) - - -def build_model_columns(): - """Builds a set of wide and deep feature columns.""" - # Continuous variable columns - age = tf.feature_column.numeric_column('age') - education_num = tf.feature_column.numeric_column('education_num') - capital_gain = tf.feature_column.numeric_column('capital_gain') - capital_loss = tf.feature_column.numeric_column('capital_loss') - hours_per_week = tf.feature_column.numeric_column('hours_per_week') - - education = tf.feature_column.categorical_column_with_vocabulary_list( - 'education', [ - 'Bachelors', 'HS-grad', '11th', 'Masters', '9th', 'Some-college', - 'Assoc-acdm', 'Assoc-voc', '7th-8th', 'Doctorate', 'Prof-school', - '5th-6th', '10th', '1st-4th', 'Preschool', '12th']) - - marital_status = tf.feature_column.categorical_column_with_vocabulary_list( - 'marital_status', [ - 'Married-civ-spouse', 'Divorced', 'Married-spouse-absent', - 'Never-married', 'Separated', 'Married-AF-spouse', 'Widowed']) - - relationship = tf.feature_column.categorical_column_with_vocabulary_list( - 'relationship', [ - 'Husband', 'Not-in-family', 'Wife', 'Own-child', 'Unmarried', - 'Other-relative']) - - workclass = tf.feature_column.categorical_column_with_vocabulary_list( - 'workclass', [ - 'Self-emp-not-inc', 'Private', 'State-gov', 'Federal-gov', - 'Local-gov', '?', 'Self-emp-inc', 'Without-pay', 'Never-worked']) - - # To show an example of hashing: - occupation = tf.feature_column.categorical_column_with_hash_bucket( - 'occupation', hash_bucket_size=_HASH_BUCKET_SIZE) - - # Transformations. - age_buckets = tf.feature_column.bucketized_column( - age, boundaries=[18, 25, 30, 35, 40, 45, 50, 55, 60, 65]) - - # Wide columns and deep columns. - base_columns = [ - education, marital_status, relationship, workclass, occupation, - age_buckets, - ] - - crossed_columns = [ - tf.feature_column.crossed_column( - ['education', 'occupation'], hash_bucket_size=_HASH_BUCKET_SIZE), - tf.feature_column.crossed_column( - [age_buckets, 'education', 'occupation'], - hash_bucket_size=_HASH_BUCKET_SIZE), - ] - - wide_columns = base_columns + crossed_columns - - deep_columns = [ - age, - education_num, - capital_gain, - capital_loss, - hours_per_week, - tf.feature_column.indicator_column(workclass), - tf.feature_column.indicator_column(education), - tf.feature_column.indicator_column(marital_status), - tf.feature_column.indicator_column(relationship), - # To show an example of embedding - tf.feature_column.embedding_column(occupation, dimension=8), - ] - - return wide_columns, deep_columns - - -def input_fn(data_file, num_epochs, shuffle, batch_size): - """Generate an input function for the Estimator.""" - assert tf.gfile.Exists(data_file), ( - '%s not found. Please make sure you have run census_dataset.py and ' - 'set the --data_dir argument to the correct path.' % data_file) - - def parse_csv(value): - tf.logging.info('Parsing {}'.format(data_file)) - columns = tf.decode_csv(value, record_defaults=_CSV_COLUMN_DEFAULTS) - features = dict(list(zip(_CSV_COLUMNS, columns))) - labels = features.pop('income_bracket') - classes = tf.equal(labels, '>50K') # binary classification - return features, classes - - # Extract lines from input files using the Dataset API. - dataset = tf.data.TextLineDataset(data_file) - - if shuffle: - dataset = dataset.shuffle(buffer_size=_NUM_EXAMPLES['train']) - - dataset = dataset.map(parse_csv, num_parallel_calls=5) - - # We call repeat after shuffling, rather than before, to prevent separate - # epochs from blending together. - dataset = dataset.repeat(num_epochs) - dataset = dataset.batch(batch_size) - return dataset - - -def define_data_download_flags(): - """Add flags specifying data download arguments.""" - flags.DEFINE_string( - name="data_dir", default="/tmp/census_data/", - help=flags_core.help_wrap( - "Directory to download and extract data.")) - - -def main(_): - download(flags.FLAGS.data_dir) - - -if __name__ == '__main__': - tf.logging.set_verbosity(tf.logging.INFO) - define_data_download_flags() - absl_app.run(main) diff --git a/official/r1/wide_deep/census_main.py b/official/r1/wide_deep/census_main.py deleted file mode 100644 index 39a1610ee..000000000 --- a/official/r1/wide_deep/census_main.py +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Train DNN on census income dataset.""" - -import os - -from absl import app as absl_app -from absl import flags -import tensorflow.compat.v1 as tf -from official.r1.utils.logs import logger -from official.r1.wide_deep import census_dataset -from official.r1.wide_deep import wide_deep_run_loop -from official.utils.flags import core as flags_core - - -def define_census_flags(): - wide_deep_run_loop.define_wide_deep_flags() - flags.adopt_module_key_flags(wide_deep_run_loop) - flags_core.set_defaults(data_dir='/tmp/census_data', - model_dir='/tmp/census_model', - train_epochs=40, - epochs_between_evals=2, - inter_op_parallelism_threads=0, - intra_op_parallelism_threads=0, - batch_size=40) - - -def build_estimator(model_dir, model_type, model_column_fn, inter_op, intra_op): - """Build an estimator appropriate for the given model type.""" - wide_columns, deep_columns = model_column_fn() - hidden_units = [100, 75, 50, 25] - - # Create a tf.estimator.RunConfig to ensure the model is run on CPU, which - # trains faster than GPU for this model. - run_config = tf.estimator.RunConfig().replace( - session_config=tf.ConfigProto(device_count={'GPU': 0}, - inter_op_parallelism_threads=inter_op, - intra_op_parallelism_threads=intra_op)) - - if model_type == 'wide': - return tf.estimator.LinearClassifier( - model_dir=model_dir, - feature_columns=wide_columns, - config=run_config) - elif model_type == 'deep': - return tf.estimator.DNNClassifier( - model_dir=model_dir, - feature_columns=deep_columns, - hidden_units=hidden_units, - config=run_config) - else: - return tf.estimator.DNNLinearCombinedClassifier( - model_dir=model_dir, - linear_feature_columns=wide_columns, - dnn_feature_columns=deep_columns, - dnn_hidden_units=hidden_units, - config=run_config) - - -def run_census(flags_obj): - """Construct all necessary functions and call run_loop. - - Args: - flags_obj: Object containing user specified flags. - """ - if flags_obj.download_if_missing: - census_dataset.download(flags_obj.data_dir) - - train_file = os.path.join(flags_obj.data_dir, census_dataset.TRAINING_FILE) - test_file = os.path.join(flags_obj.data_dir, census_dataset.EVAL_FILE) - - # Train and evaluate the model every `flags.epochs_between_evals` epochs. - def train_input_fn(): - return census_dataset.input_fn( - train_file, flags_obj.epochs_between_evals, True, flags_obj.batch_size) - - def eval_input_fn(): - return census_dataset.input_fn(test_file, 1, False, flags_obj.batch_size) - - tensors_to_log = { - 'average_loss': '{loss_prefix}head/truediv', - 'loss': '{loss_prefix}head/weighted_loss/Sum' - } - - wide_deep_run_loop.run_loop( - name="Census Income", train_input_fn=train_input_fn, - eval_input_fn=eval_input_fn, - model_column_fn=census_dataset.build_model_columns, - build_estimator_fn=build_estimator, - flags_obj=flags_obj, - tensors_to_log=tensors_to_log, - early_stop=True) - - -def main(_): - with logger.benchmark_context(flags.FLAGS): - run_census(flags.FLAGS) - - -if __name__ == '__main__': - tf.logging.set_verbosity(tf.logging.INFO) - define_census_flags() - absl_app.run(main) diff --git a/official/r1/wide_deep/census_test.csv b/official/r1/wide_deep/census_test.csv deleted file mode 100644 index 374397dbd..000000000 --- a/official/r1/wide_deep/census_test.csv +++ /dev/null @@ -1,30 +0,0 @@ -39,State-gov,77516,Bachelors,13,Never-married,Adm-clerical,Not-in-family,,,2174,0,40,,<=50K -50,Self-emp-not-inc,83311,Bachelors,13,Married-civ-spouse,Exec-managerial,Husband,,,0,0,13,,<=50K -38,Private,215646,HS-grad,9,Divorced,Handlers-cleaners,Not-in-family,,,0,0,40,,<=50K -53,Private,234721,11th,7,Married-civ-spouse,Handlers-cleaners,Husband,,,0,0,40,,<=50K -28,Private,338409,Bachelors,13,Married-civ-spouse,Prof-specialty,Wife,,,0,0,40,,<=50K -37,Private,284582,Masters,14,Married-civ-spouse,Exec-managerial,Wife,,,0,0,40,,<=50K -49,Private,160187,9th,5,Married-spouse-absent,Other-service,Not-in-family,,,0,0,16,,<=50K -52,Self-emp-not-inc,209642,HS-grad,9,Married-civ-spouse,Exec-managerial,Husband,,,0,0,45,,>50K -31,Private,45781,Masters,14,Never-married,Prof-specialty,Not-in-family,,,14084,0,50,,>50K -42,Private,159449,Bachelors,13,Married-civ-spouse,Exec-managerial,Husband,,,5178,0,40,,>50K -37,Private,280464,Some-college,10,Married-civ-spouse,Exec-managerial,Husband,,,0,0,80,,>50K -30,State-gov,141297,Bachelors,13,Married-civ-spouse,Prof-specialty,Husband,,,0,0,40,,>50K -23,Private,122272,Bachelors,13,Never-married,Adm-clerical,Own-child,,,0,0,30,,<=50K -32,Private,205019,Assoc-acdm,12,Never-married,Sales,Not-in-family,,,0,0,50,,<=50K -40,Private,121772,Assoc-voc,11,Married-civ-spouse,Craft-repair,Husband,,,0,0,40,,>50K -34,Private,245487,7th-8th,4,Married-civ-spouse,Transport-moving,Husband,,,0,0,45,,<=50K -25,Self-emp-not-inc,176756,HS-grad,9,Never-married,Farming-fishing,Own-child,,,0,0,35,,<=50K -32,Private,186824,HS-grad,9,Never-married,Machine-op-inspct,Unmarried,,,0,0,40,,<=50K -38,Private,28887,11th,7,Married-civ-spouse,Sales,Husband,,,0,0,50,,<=50K -43,Self-emp-not-inc,292175,Masters,14,Divorced,Exec-managerial,Unmarried,,,0,0,45,,>50K -40,Private,193524,Doctorate,16,Married-civ-spouse,Prof-specialty,Husband,,,0,0,60,,>50K -56,Local-gov,216851,Bachelors,13,Married-civ-spouse,Tech-support,Husband,,,0,0,40,,>50K -54,?,180211,Some-college,10,Married-civ-spouse,?,Husband,,,0,0,60,,>50K -22,State-gov,311512,Some-college,10,Married-civ-spouse,Other-service,Husband,,,0,0,15,,<=50K -31,Private,84154,Some-college,10,Married-civ-spouse,Sales,Husband,,,0,0,38,,>50K -57,Federal-gov,337895,Bachelors,13,Married-civ-spouse,Prof-specialty,Husband,,,0,0,40,,>50K -47,Private,51835,Prof-school,15,Married-civ-spouse,Prof-specialty,Wife,,,0,1902,60,,>50K -50,Federal-gov,251585,Bachelors,13,Divorced,Exec-managerial,Not-in-family,,,0,0,55,,>50K -25,Private,289980,HS-grad,9,Never-married,Handlers-cleaners,Not-in-family,,,0,0,35,,<=50K -42,Private,116632,Doctorate,16,Married-civ-spouse,Prof-specialty,Husband,,,0,0,45,,>50K diff --git a/official/r1/wide_deep/census_test.py b/official/r1/wide_deep/census_test.py deleted file mode 100644 index 811651560..000000000 --- a/official/r1/wide_deep/census_test.py +++ /dev/null @@ -1,163 +0,0 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import os - -from absl import logging -import tensorflow.compat.v1 as tf - -from official.r1.wide_deep import census_dataset -from official.r1.wide_deep import census_main -from official.utils.testing import integration - -logging.set_verbosity(logging.ERROR) - -TEST_INPUT = ('18,Self-emp-not-inc,987,Bachelors,12,Married-civ-spouse,abc,' - 'Husband,zyx,wvu,34,56,78,tsr,<=50K') - -TEST_INPUT_VALUES = { - 'age': 18, - 'education_num': 12, - 'capital_gain': 34, - 'capital_loss': 56, - 'hours_per_week': 78, - 'education': 'Bachelors', - 'marital_status': 'Married-civ-spouse', - 'relationship': 'Husband', - 'workclass': 'Self-emp-not-inc', - 'occupation': 'abc', -} - -TEST_CSV = os.path.join(os.path.dirname(__file__), 'census_test.csv') - - -class BaseTest(tf.test.TestCase): - """Tests for Wide Deep model.""" - - @classmethod - def setUpClass(cls): # pylint: disable=invalid-name - super(BaseTest, cls).setUpClass() - census_main.define_census_flags() - - def setUp(self): - # Create temporary CSV file - self.temp_dir = self.get_temp_dir() - self.input_csv = os.path.join(self.temp_dir, 'test.csv') - with tf.io.gfile.GFile(self.input_csv, 'w') as temp_csv: - temp_csv.write(TEST_INPUT) - - with tf.io.gfile.GFile(TEST_CSV, 'r') as temp_csv: - test_csv_contents = temp_csv.read() - - # Used for end-to-end tests. - for fname in [census_dataset.TRAINING_FILE, census_dataset.EVAL_FILE]: - with tf.io.gfile.GFile( - os.path.join(self.temp_dir, fname), 'w') as test_csv: - test_csv.write(test_csv_contents) - - def test_input_fn(self): - dataset = census_dataset.input_fn(self.input_csv, 1, False, 1) - features, labels = dataset.make_one_shot_iterator().get_next() - - with self.test_session() as sess: - features, labels = sess.run((features, labels)) - - # Compare the two features dictionaries. - for key in TEST_INPUT_VALUES: - self.assertTrue(key in features) - self.assertEqual(len(features[key]), 1) - feature_value = features[key][0] - - # Convert from bytes to string for Python 3. - if isinstance(feature_value, bytes): - feature_value = feature_value.decode() - - self.assertEqual(TEST_INPUT_VALUES[key], feature_value) - - self.assertFalse(labels) - - def build_and_test_estimator(self, model_type): - """Ensure that model trains and minimizes loss.""" - model = census_main.build_estimator( - self.temp_dir, model_type, - model_column_fn=census_dataset.build_model_columns, - inter_op=0, intra_op=0) - - # Train for 1 step to initialize model and evaluate initial loss - def get_input_fn(num_epochs, shuffle, batch_size): - def input_fn(): - return census_dataset.input_fn( - TEST_CSV, num_epochs=num_epochs, shuffle=shuffle, - batch_size=batch_size) - return input_fn - - model.train(input_fn=get_input_fn(1, True, 1), steps=1) - initial_results = model.evaluate(input_fn=get_input_fn(1, False, 1)) - - # Train for 100 epochs at batch size 3 and evaluate final loss - model.train(input_fn=get_input_fn(100, True, 3)) - final_results = model.evaluate(input_fn=get_input_fn(1, False, 1)) - - print('%s initial results:' % model_type, initial_results) - print('%s final results:' % model_type, final_results) - - # Ensure loss has decreased, while accuracy and both AUCs have increased. - self.assertLess(final_results['loss'], initial_results['loss']) - self.assertGreater(final_results['auc'], initial_results['auc']) - self.assertGreater(final_results['auc_precision_recall'], - initial_results['auc_precision_recall']) - self.assertGreater(final_results['accuracy'], initial_results['accuracy']) - - def test_wide_deep_estimator_training(self): - self.build_and_test_estimator('wide_deep') - - def test_end_to_end_wide(self): - integration.run_synthetic( - main=census_main.main, tmp_root=self.get_temp_dir(), - extra_flags=[ - '--data_dir', self.get_temp_dir(), - '--model_type', 'wide', - '--download_if_missing=false' - ], - synth=False) - - def test_end_to_end_deep(self): - integration.run_synthetic( - main=census_main.main, tmp_root=self.get_temp_dir(), - extra_flags=[ - '--data_dir', self.get_temp_dir(), - '--model_type', 'deep', - '--download_if_missing=false' - ], - synth=False) - - def test_end_to_end_wide_deep(self): - integration.run_synthetic( - main=census_main.main, tmp_root=self.get_temp_dir(), - extra_flags=[ - '--data_dir', self.get_temp_dir(), - '--model_type', 'wide_deep', - '--download_if_missing=false' - ], - synth=False) - - -if __name__ == '__main__': - tf.disable_eager_execution() - tf.test.main() diff --git a/official/r1/wide_deep/movielens_dataset.py b/official/r1/wide_deep/movielens_dataset.py deleted file mode 100644 index 676062cbb..000000000 --- a/official/r1/wide_deep/movielens_dataset.py +++ /dev/null @@ -1,165 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Prepare MovieLens dataset for wide-deep.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import functools -import os - -# pylint: disable=wrong-import-order -from absl import app as absl_app -from absl import flags -import numpy as np -import tensorflow.compat.v1 as tf -# pylint: enable=wrong-import-order - -from official.recommendation import movielens -from official.r1.utils.data import file_io -from official.utils.flags import core as flags_core - - -_BUFFER_SUBDIR = "wide_deep_buffer" -_FEATURE_MAP = { - movielens.USER_COLUMN: tf.compat.v1.FixedLenFeature([1], dtype=tf.int64), - movielens.ITEM_COLUMN: tf.compat.v1.FixedLenFeature([1], dtype=tf.int64), - movielens.TIMESTAMP_COLUMN: tf.compat.v1.FixedLenFeature([1], - dtype=tf.int64), - movielens.GENRE_COLUMN: tf.compat.v1.FixedLenFeature( - [movielens.N_GENRE], dtype=tf.int64), - movielens.RATING_COLUMN: tf.compat.v1.FixedLenFeature([1], - dtype=tf.float32), -} - -_BUFFER_SIZE = { - movielens.ML_1M: {"train": 107978119, "eval": 26994538}, - movielens.ML_20M: {"train": 2175203810, "eval": 543802008} -} - -_USER_EMBEDDING_DIM = 16 -_ITEM_EMBEDDING_DIM = 64 - -def build_model_columns(dataset): - """Builds a set of wide and deep feature columns.""" - user_id = tf.feature_column.categorical_column_with_vocabulary_list( - movielens.USER_COLUMN, range(1, movielens.NUM_USER_IDS[dataset])) - user_embedding = tf.feature_column.embedding_column( - user_id, _USER_EMBEDDING_DIM, max_norm=np.sqrt(_USER_EMBEDDING_DIM)) - - item_id = tf.feature_column.categorical_column_with_vocabulary_list( - movielens.ITEM_COLUMN, range(1, movielens.NUM_ITEM_IDS)) - item_embedding = tf.feature_column.embedding_column( - item_id, _ITEM_EMBEDDING_DIM, max_norm=np.sqrt(_ITEM_EMBEDDING_DIM)) - - time = tf.feature_column.numeric_column(movielens.TIMESTAMP_COLUMN) - genres = tf.feature_column.numeric_column( - movielens.GENRE_COLUMN, shape=(movielens.N_GENRE,), dtype=tf.uint8) - - deep_columns = [user_embedding, item_embedding, time, genres] - wide_columns = [] - - return wide_columns, deep_columns - - -def _deserialize(examples_serialized): - features = tf.parse_example(examples_serialized, _FEATURE_MAP) - return features, features[movielens.RATING_COLUMN] / movielens.MAX_RATING - - -def _buffer_path(data_dir, dataset, name): - return os.path.join(data_dir, _BUFFER_SUBDIR, - "{}_{}_buffer".format(dataset, name)) - - -def _df_to_input_fn(df, name, dataset, data_dir, batch_size, repeat, shuffle): - """Serialize a dataframe and write it to a buffer file.""" - buffer_path = _buffer_path(data_dir, dataset, name) - expected_size = _BUFFER_SIZE[dataset].get(name) - - file_io.write_to_buffer( - dataframe=df, buffer_path=buffer_path, - columns=list(_FEATURE_MAP.keys()), expected_size=expected_size) - - def input_fn(): - dataset = tf.data.TFRecordDataset(buffer_path) - # batch comes before map because map can deserialize multiple examples. - dataset = dataset.batch(batch_size) - dataset = dataset.map(_deserialize, num_parallel_calls=16) - if shuffle: - dataset = dataset.shuffle(shuffle) - - dataset = dataset.repeat(repeat) - return dataset.prefetch(1) - - return input_fn - - -def _check_buffers(data_dir, dataset): - train_path = os.path.join(data_dir, _BUFFER_SUBDIR, - "{}_{}_buffer".format(dataset, "train")) - eval_path = os.path.join(data_dir, _BUFFER_SUBDIR, - "{}_{}_buffer".format(dataset, "eval")) - - if not tf.gfile.Exists(train_path) or not tf.gfile.Exists(eval_path): - return False - - return all([ - tf.gfile.Stat(_buffer_path(data_dir, dataset, "train")).length == - _BUFFER_SIZE[dataset]["train"], - tf.gfile.Stat(_buffer_path(data_dir, dataset, "eval")).length == - _BUFFER_SIZE[dataset]["eval"], - ]) - - -def construct_input_fns(dataset, data_dir, batch_size=16, repeat=1): - """Construct train and test input functions, as well as the column fn.""" - if _check_buffers(data_dir, dataset): - train_df, eval_df = None, None - else: - df = movielens.csv_to_joint_dataframe(dataset=dataset, data_dir=data_dir) - df = movielens.integerize_genres(dataframe=df) - df = df.drop(columns=[movielens.TITLE_COLUMN]) - - train_df = df.sample(frac=0.8, random_state=0) - eval_df = df.drop(train_df.index) - - train_df = train_df.reset_index(drop=True) - eval_df = eval_df.reset_index(drop=True) - - train_input_fn = _df_to_input_fn( - df=train_df, name="train", dataset=dataset, data_dir=data_dir, - batch_size=batch_size, repeat=repeat, - shuffle=movielens.NUM_RATINGS[dataset]) - eval_input_fn = _df_to_input_fn( - df=eval_df, name="eval", dataset=dataset, data_dir=data_dir, - batch_size=batch_size, repeat=repeat, shuffle=None) - model_column_fn = functools.partial(build_model_columns, dataset=dataset) - - train_input_fn() - return train_input_fn, eval_input_fn, model_column_fn - - -def main(_): - movielens.download(dataset=flags.FLAGS.dataset, data_dir=flags.FLAGS.data_dir) - construct_input_fns(flags.FLAGS.dataset, flags.FLAGS.data_dir) - -if __name__ == "__main__": - tf.logging.set_verbosity(tf.logging.INFO) - movielens.define_data_download_flags() - flags.adopt_module_key_flags(movielens) - flags_core.set_defaults(dataset="ml-1m") - absl_app.run(main) diff --git a/official/r1/wide_deep/movielens_main.py b/official/r1/wide_deep/movielens_main.py deleted file mode 100644 index 45f7453c7..000000000 --- a/official/r1/wide_deep/movielens_main.py +++ /dev/null @@ -1,114 +0,0 @@ -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Train DNN on Kaggle movie dataset.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import os - -from absl import app as absl_app -from absl import flags -import tensorflow.compat.v1 as tf -from official.r1.utils.logs import logger -from official.r1.wide_deep import movielens_dataset -from official.r1.wide_deep import wide_deep_run_loop -from official.recommendation import movielens -from official.utils.flags import core as flags_core - - -def define_movie_flags(): - """Define flags for movie dataset training.""" - wide_deep_run_loop.define_wide_deep_flags() - flags.DEFINE_enum( - name="dataset", default=movielens.ML_1M, - enum_values=movielens.DATASETS, case_sensitive=False, - help=flags_core.help_wrap("Dataset to be trained and evaluated.")) - flags.adopt_module_key_flags(wide_deep_run_loop) - flags_core.set_defaults(data_dir="/tmp/movielens-data/", - model_dir='/tmp/movie_model', - model_type="deep", - train_epochs=50, - epochs_between_evals=5, - inter_op_parallelism_threads=0, - intra_op_parallelism_threads=0, - batch_size=256) - - @flags.validator("stop_threshold", - message="stop_threshold not supported for movielens model") - def _no_stop(stop_threshold): - return stop_threshold is None - - -def build_estimator(model_dir, model_type, model_column_fn, inter_op, intra_op): - """Build an estimator appropriate for the given model type.""" - if model_type != "deep": - raise NotImplementedError("movie dataset only supports `deep` model_type") - _, deep_columns = model_column_fn() - hidden_units = [256, 256, 256, 128] - - run_config = tf.estimator.RunConfig().replace( - session_config=tf.ConfigProto(device_count={'GPU': 0}, - inter_op_parallelism_threads=inter_op, - intra_op_parallelism_threads=intra_op)) - return tf.estimator.DNNRegressor( - model_dir=model_dir, - feature_columns=deep_columns, - hidden_units=hidden_units, - optimizer=tf.compat.v1.train.AdamOptimizer(), - activation_fn=tf.nn.sigmoid, - dropout=0.3, - loss_reduction=tf.losses.Reduction.MEAN) - - -def run_movie(flags_obj): - """Construct all necessary functions and call run_loop. - - Args: - flags_obj: Object containing user specified flags. - """ - - if flags_obj.download_if_missing: - movielens.download(dataset=flags_obj.dataset, data_dir=flags_obj.data_dir) - - train_input_fn, eval_input_fn, model_column_fn = \ - movielens_dataset.construct_input_fns( - dataset=flags_obj.dataset, data_dir=flags_obj.data_dir, - batch_size=flags_obj.batch_size, repeat=flags_obj.epochs_between_evals) - - tensors_to_log = { - 'loss': '{loss_prefix}head/weighted_loss/value' - } - - wide_deep_run_loop.run_loop( - name="MovieLens", train_input_fn=train_input_fn, - eval_input_fn=eval_input_fn, - model_column_fn=model_column_fn, - build_estimator_fn=build_estimator, - flags_obj=flags_obj, - tensors_to_log=tensors_to_log, - early_stop=False) - - -def main(_): - with logger.benchmark_context(flags.FLAGS): - run_movie(flags.FLAGS) - - -if __name__ == '__main__': - tf.logging.set_verbosity(tf.logging.INFO) - define_movie_flags() - absl_app.run(main) diff --git a/official/r1/wide_deep/wide_deep_run_loop.py b/official/r1/wide_deep/wide_deep_run_loop.py deleted file mode 100644 index d81bfc853..000000000 --- a/official/r1/wide_deep/wide_deep_run_loop.py +++ /dev/null @@ -1,133 +0,0 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Core run logic for TensorFlow Wide & Deep Tutorial using tf.estimator API.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import os -import shutil - -from absl import app as absl_app -from absl import flags -import tensorflow.compat.v1 as tf - -from official.r1.utils.logs import hooks_helper -from official.r1.utils.logs import logger -from official.utils.flags import core as flags_core -from official.utils.misc import model_helpers - - -LOSS_PREFIX = {'wide': 'linear/', 'deep': 'dnn/'} - - -def define_wide_deep_flags(): - """Add supervised learning flags, as well as wide-deep model type.""" - flags_core.define_base(clean=True, train_epochs=True, - epochs_between_evals=True, stop_threshold=True, - hooks=True, export_dir=True) - flags_core.define_benchmark() - flags_core.define_performance( - num_parallel_calls=False, inter_op=True, intra_op=True, - synthetic_data=False, max_train_steps=False, dtype=False, - all_reduce_alg=False) - - flags.adopt_module_key_flags(flags_core) - - flags.DEFINE_enum( - name="model_type", short_name="mt", default="wide_deep", - enum_values=['wide', 'deep', 'wide_deep'], - help="Select model topology.") - flags.DEFINE_boolean( - name="download_if_missing", default=True, help=flags_core.help_wrap( - "Download data to data_dir if it is not already present.")) - - -def export_model(model, model_type, export_dir, model_column_fn): - """Export to SavedModel format. - - Args: - model: Estimator object - model_type: string indicating model type. "wide", "deep" or "wide_deep" - export_dir: directory to export the model. - model_column_fn: Function to generate model feature columns. - """ - wide_columns, deep_columns = model_column_fn() - if model_type == 'wide': - columns = wide_columns - elif model_type == 'deep': - columns = deep_columns - else: - columns = wide_columns + deep_columns - feature_spec = tf.feature_column.make_parse_example_spec(columns) - example_input_fn = ( - tf.estimator.export.build_parsing_serving_input_receiver_fn(feature_spec)) - model.export_savedmodel(export_dir, example_input_fn, - strip_default_attrs=True) - - -def run_loop(name, train_input_fn, eval_input_fn, model_column_fn, - build_estimator_fn, flags_obj, tensors_to_log, early_stop=False): - """Define training loop.""" - model_helpers.apply_clean(flags.FLAGS) - model = build_estimator_fn( - model_dir=flags_obj.model_dir, model_type=flags_obj.model_type, - model_column_fn=model_column_fn, - inter_op=flags_obj.inter_op_parallelism_threads, - intra_op=flags_obj.intra_op_parallelism_threads) - - run_params = { - 'batch_size': flags_obj.batch_size, - 'train_epochs': flags_obj.train_epochs, - 'model_type': flags_obj.model_type, - } - - benchmark_logger = logger.get_benchmark_logger() - benchmark_logger.log_run_info('wide_deep', name, run_params, - test_id=flags_obj.benchmark_test_id) - - loss_prefix = LOSS_PREFIX.get(flags_obj.model_type, '') - tensors_to_log = {k: v.format(loss_prefix=loss_prefix) - for k, v in tensors_to_log.items()} - train_hooks = hooks_helper.get_train_hooks( - flags_obj.hooks, model_dir=flags_obj.model_dir, - batch_size=flags_obj.batch_size, tensors_to_log=tensors_to_log) - - # Train and evaluate the model every `flags.epochs_between_evals` epochs. - for n in range(flags_obj.train_epochs // flags_obj.epochs_between_evals): - model.train(input_fn=train_input_fn, hooks=train_hooks) - - results = model.evaluate(input_fn=eval_input_fn) - - # Display evaluation metrics - tf.logging.info('Results at epoch %d / %d', - (n + 1) * flags_obj.epochs_between_evals, - flags_obj.train_epochs) - tf.logging.info('-' * 60) - - for key in sorted(results): - tf.logging.info('%s: %s' % (key, results[key])) - - benchmark_logger.log_evaluation_result(results) - - if early_stop and model_helpers.past_stop_threshold( - flags_obj.stop_threshold, results['accuracy']): - break - - # Export the model - if flags_obj.export_dir is not None: - export_model(model, flags_obj.model_type, flags_obj.export_dir, - model_column_fn) -- GitLab From ff5bd71285709a2e880debb978841c3661c01997 Mon Sep 17 00:00:00 2001 From: Jaeyoun Kim Date: Mon, 22 Jun 2020 23:12:03 -0700 Subject: [PATCH 46/79] Update README.md Add a milestone for the Object Detection API --- README.md | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index d4eab2a22..5b52e4a5c 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,8 @@ # Welcome to the Model Garden for TensorFlow -The TensorFlow Model Garden is a repository with a number of different implementations of state-of-the-art (SOTA) models and modeling solutions for TensorFlow users. We aim to demonstrate the best practices for modeling so that TensorFlow users can take full advantage of TensorFlow for their research and product development. +The TensorFlow Model Garden is a repository with a number of different implementations of state-of-the-art (SOTA) models and modeling solutions for TensorFlow users. We aim to demonstrate the best practices for modeling so that TensorFlow users +can take full advantage of TensorFlow for their research and product development. | Directory | Description | |-----------|-------------| @@ -10,7 +11,7 @@ The TensorFlow Model Garden is a repository with a number of different implement | [research](research) | • A collection of research model implementations in TensorFlow 1 or 2 by researchers
    • Maintained and supported by researchers | | [community](community) | • A curated list of the GitHub repositories with machine learning models and implementations powered by TensorFlow 2 | -## [Announcements](../../wiki/Announcements) +## [Announcements](https://github.com/tensorflow/models/wiki/Announcements) | Date | News | |------|------| @@ -21,11 +22,17 @@ The TensorFlow Model Garden is a repository with a number of different implement | May 1, 2020 | [DELF: DEep Local Features](https://github.com/tensorflow/models/tree/master/research/delf) updated to support TensorFlow 2.1 | March 31, 2020 | [Introducing the Model Garden for TensorFlow 2](https://blog.tensorflow.org/2020/03/introducing-model-garden-for-tensorflow-2.html) ([Tweet](https://twitter.com/TensorFlow/status/1245029834633297921)) | +## [Milestones](https://github.com/tensorflow/models/milestones) + +| Date | Milestone | +|------|-----------| +| July 7, 2020 | [![GitHub milestone](https://img.shields.io/github/milestones/progress/tensorflow/models/1)](https://github.com/tensorflow/models/milestone/1) | + ## Contributions [![help wanted:paper implementation](https://img.shields.io/github/issues/tensorflow/models/help%20wanted%3Apaper%20implementation)](https://github.com/tensorflow/models/labels/help%20wanted%3Apaper%20implementation) -If you want to contribute, please review the [contribution guidelines](../../wiki/How-to-contribute). +If you want to contribute, please review the [contribution guidelines](https://github.com/tensorflow/models/wiki/How-to-contribute). ## License -- GitLab From b9a87a522448a53a03c9a4431e43544f3cdaee1b Mon Sep 17 00:00:00 2001 From: ian-cannon <50238327+ian-cannon@users.noreply.github.com> Date: Tue, 23 Jun 2020 11:18:47 -0400 Subject: [PATCH 47/79] Update agent.py (#8722) Update agent.py according to changes noted https://github.com/tensorflow/models/issues/7719 --- research/efficient-hrl/agent.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/research/efficient-hrl/agent.py b/research/efficient-hrl/agent.py index cb02b51fa..0028ddffa 100644 --- a/research/efficient-hrl/agent.py +++ b/research/efficient-hrl/agent.py @@ -149,7 +149,7 @@ class UvfAgentCore(object): error = tf.square(actions - pred_actions) spec_range = (self._action_spec.maximum - self._action_spec.minimum) / 2 - normalized_error = error / tf.constant(spec_range) ** 2 + normalized_error = tf.cast(error, tf.float64) / tf.constant(spec_range) ** 2 return -normalized_error @gin.configurable('uvf_add_noise_fn') -- GitLab From ee35a03041b642a2c1e4e5e528e48c0ab0a8afd6 Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Tue, 23 Jun 2020 10:44:31 -0700 Subject: [PATCH 48/79] Update readme with ShapeMask instructions. PiperOrigin-RevId: 317896878 --- official/README.md | 2 + official/vision/detection/README.md | 138 +++++++++++++++++++++++++++- 2 files changed, 136 insertions(+), 4 deletions(-) diff --git a/official/README.md b/official/README.md index 84fd2e634..2b3f2dd76 100644 --- a/official/README.md +++ b/official/README.md @@ -43,6 +43,7 @@ In the near future, we will add: |-------|-------------------| | [MNIST](vision/image_classification) | A basic model to classify digits from the [MNIST dataset](http://yann.lecun.com/exdb/mnist/) | | [ResNet](vision/image_classification) | [Deep Residual Learning for Image Recognition](https://arxiv.org/abs/1512.03385) | +| [EfficientNet](vision/image_classification) | [EfficientNet: Rethinking Model Scaling for Convolutional Neural Networks](https://arxiv.org/abs/1905.11946) | #### Object Detection and Segmentation @@ -50,6 +51,7 @@ In the near future, we will add: |-------|-------------------| | [RetinaNet](vision/detection) | [Focal Loss for Dense Object Detection](https://arxiv.org/abs/1708.02002) | | [Mask R-CNN](vision/detection) | [Mask R-CNN](https://arxiv.org/abs/1703.06870) | +| [ShapeMask](vision/detection) | [ShapeMask: Learning to Segment Novel Objects by Refining Shape Priors](https://arxiv.org/abs/1904.03239) | ### Natural Language Processing diff --git a/official/vision/detection/README.md b/official/vision/detection/README.md index 363e2f6c4..53134ec55 100644 --- a/official/vision/detection/README.md +++ b/official/vision/detection/README.md @@ -123,8 +123,6 @@ predict: predict_batch_size: 8 architecture: use_bfloat16: False -retinanet_parser: - use_bfloat16: False train: total_steps: 1 batch_size: 8 @@ -245,8 +243,6 @@ predict: predict_batch_size: 8 architecture: use_bfloat16: False -maskrcnn_parser: - use_bfloat16: False train: total_steps: 1000 batch_size: 8 @@ -255,6 +251,140 @@ use_tpu: False " ``` +## Train ShapeMask on TPU + +### Train a ResNet-50 based ShapeMask. + +```bash +TPU_NAME="" +MODEL_DIR="" +RESNET_CHECKPOINT="" +TRAIN_FILE_PATTERN="" +EVAL_FILE_PATTERN="" +VAL_JSON_FILE="" +SHAPE_PRIOR_PATH="" +python3 ~/models/official/vision/detection/main.py \ + --strategy_type=tpu \ + --tpu=${TPU_NAME} \ + --model_dir=${MODEL_DIR} \ + --mode=train \ + --model=shapemask \ + --params_override="{train: { checkpoint: { path: ${RESNET_CHECKPOINT}, prefix: resnet50/ }, train_file_pattern: ${TRAIN_FILE_PATTERN} }, eval: { val_json_file: ${VAL_JSON_FILE}, eval_file_pattern: ${EVAL_FILE_PATTERN} } shapemask_head: {use_category_for_mask: true, shape_prior_path: ${SHAPE_PRIOR_PATH}} }" +``` + +The pre-trained ResNet-50 checkpoint can be downloaded [here](https://storage.cloud.google.com/cloud-tpu-checkpoints/model-garden-vision/detection/resnet50-2018-02-07.tar.gz). + +The shape priors can be downloaded [here] +(https://storage.googleapis.com/cloud-tpu-checkpoints/shapemask/kmeans_class_priors_91x20x32x32.npy) + + +### Train a custom ShapeMask using the config file. + +First, create a YAML config file, e.g. *my_shapemask.yaml*. +This file specifies the parameters to be overridden: + +```YAML +# my_shapemask.yaml +train: + train_file_pattern: + total_steps: + batch_size: +eval: + eval_file_pattern: + val_json_file: + batch_size: +shapemask_head: + shape_prior_path: +``` + +Once the YAML config file is created, you can launch the training using the +following command. + +```bash +TPU_NAME="" +MODEL_DIR="" +python3 ~/models/official/vision/detection/main.py \ + --strategy_type=tpu \ + --tpu=${TPU_NAME} \ + --model_dir=${MODEL_DIR} \ + --mode=train \ + --model=shapemask \ + --config_file="my_shapemask.yaml" +``` + +## Train ShapeMask on GPU + +Training on GPU is similar to that on TPU. The major change is the strategy type +(use +"[mirrored](https://www.tensorflow.org/api_docs/python/tf/distribute/MirroredStrategy)" +for multiple GPU and +"[one_device](https://www.tensorflow.org/api_docs/python/tf/distribute/OneDeviceStrategy)" +for single GPU). + +Multi-GPUs example (assuming there are 8GPU connected to the host): + +```bash +MODEL_DIR="" +python3 ~/models/official/vision/detection/main.py \ + --strategy_type=mirrored \ + --num_gpus=8 \ + --model_dir=${MODEL_DIR} \ + --mode=train \ + --model=shapemask \ + --config_file="my_shapemask.yaml" +``` + +A single GPU example + +```bash +MODEL_DIR="" +python3 ~/models/official/vision/detection/main.py \ + --strategy_type=one_device \ + --num_gpus=1 \ + --model_dir=${MODEL_DIR} \ + --mode=train \ + --model=shapemask \ + --config_file="my_shapemask.yaml" +``` + + +An example with inline configuration (YAML or JSON format): + +``` +python3 ~/models/official/vision/detection/main.py \ + --model_dir= \ + --strategy_type=one_device \ + --num_gpus=1 \ + --mode=train \ + --model=shapemask \ + --params_override="eval: + eval_file_pattern: + batch_size: 8 + val_json_file: +train: + total_steps: 1000 + batch_size: 8 + train_file_pattern: +use_tpu: False +" +``` + + +### Run the evaluation (after training) + +``` +python3 /usr/share/models/official/vision/detection/main.py \ + --strategy_type=tpu \ + --tpu=${TPU_NAME} \ + --model_dir=${MODEL_DIR} \ + --mode=eval \ + --model=shapemask \ + --params_override="{eval: { val_json_file: ${VAL_JSON_FILE}, eval_file_pattern: ${EVAL_FILE_PATTERN}, eval_samples: 5000 } }" +``` + +`MODEL_DIR` needs to point to the trained path of ShapeMask model. +Change `strategy_type=mirrored` and `num_gpus=1` to run on a GPU. + Note: The JSON groundtruth file is useful for [COCO dataset](http://cocodataset.org/#home) and can be downloaded from the [COCO website](http://cocodataset.org/#download). For custom dataset, it is unncessary because the groundtruth can be included in the TFRecord files. -- GitLab From 67996f8777080cf990162c356377cf935d03aefd Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Tue, 23 Jun 2020 10:53:28 -0700 Subject: [PATCH 49/79] Internal change PiperOrigin-RevId: 317898942 --- official/nlp/bert/input_pipeline.py | 36 +++++ official/nlp/data/classifier_data_lib.py | 26 ++- official/nlp/data/create_finetuning_data.py | 45 +++++- official/nlp/data/sentence_retrieval_lib.py | 168 ++++++++++++++++++++ 4 files changed, 268 insertions(+), 7 deletions(-) create mode 100644 official/nlp/data/sentence_retrieval_lib.py diff --git a/official/nlp/bert/input_pipeline.py b/official/nlp/bert/input_pipeline.py index 73c2a096e..ed3fd173d 100644 --- a/official/nlp/bert/input_pipeline.py +++ b/official/nlp/bert/input_pipeline.py @@ -247,3 +247,39 @@ def create_squad_dataset(file_path, dataset = dataset.batch(batch_size, drop_remainder=True) dataset = dataset.prefetch(tf.data.experimental.AUTOTUNE) return dataset + + +def create_retrieval_dataset(file_path, + seq_length, + batch_size, + input_pipeline_context=None): + """Creates input dataset from (tf)records files for scoring.""" + name_to_features = { + 'input_ids': tf.io.FixedLenFeature([seq_length], tf.int64), + 'input_mask': tf.io.FixedLenFeature([seq_length], tf.int64), + 'segment_ids': tf.io.FixedLenFeature([seq_length], tf.int64), + 'int_iden': tf.io.FixedLenFeature([1], tf.int64), + } + dataset = single_file_dataset(file_path, name_to_features) + + # The dataset is always sharded by number of hosts. + # num_input_pipelines is the number of hosts rather than number of cores. + if input_pipeline_context and input_pipeline_context.num_input_pipelines > 1: + dataset = dataset.shard(input_pipeline_context.num_input_pipelines, + input_pipeline_context.input_pipeline_id) + + def _select_data_from_record(record): + x = { + 'input_word_ids': record['input_ids'], + 'input_mask': record['input_mask'], + 'input_type_ids': record['segment_ids'] + } + y = record['int_iden'] + return (x, y) + + dataset = dataset.map( + _select_data_from_record, + num_parallel_calls=tf.data.experimental.AUTOTUNE) + dataset = dataset.batch(batch_size, drop_remainder=False) + dataset = dataset.prefetch(tf.data.experimental.AUTOTUNE) + return dataset diff --git a/official/nlp/data/classifier_data_lib.py b/official/nlp/data/classifier_data_lib.py index 462501a59..67c47d787 100644 --- a/official/nlp/data/classifier_data_lib.py +++ b/official/nlp/data/classifier_data_lib.py @@ -33,7 +33,13 @@ from official.nlp.bert import tokenization class InputExample(object): """A single training/test example for simple sequence classification.""" - def __init__(self, guid, text_a, text_b=None, label=None, weight=None): + def __init__(self, + guid, + text_a, + text_b=None, + label=None, + weight=None, + int_iden=None): """Constructs a InputExample. Args: @@ -46,12 +52,15 @@ class InputExample(object): specified for train and dev examples, but not for test examples. weight: (Optional) float. The weight of the example to be used during training. + int_iden: (Optional) int. The int identification number of example in the + corpus. """ self.guid = guid self.text_a = text_a self.text_b = text_b self.label = label self.weight = weight + self.int_iden = int_iden class InputFeatures(object): @@ -63,13 +72,15 @@ class InputFeatures(object): segment_ids, label_id, is_real_example=True, - weight=None): + weight=None, + int_iden=None): self.input_ids = input_ids self.input_mask = input_mask self.segment_ids = segment_ids self.label_id = label_id self.is_real_example = is_real_example self.weight = weight + self.int_iden = int_iden class DataProcessor(object): @@ -908,8 +919,9 @@ def convert_single_example(ex_index, example, label_list, max_seq_length, logging.info("input_ids: %s", " ".join([str(x) for x in input_ids])) logging.info("input_mask: %s", " ".join([str(x) for x in input_mask])) logging.info("segment_ids: %s", " ".join([str(x) for x in segment_ids])) - logging.info("label: %s (id = %d)", example.label, label_id) + logging.info("label: %s (id = %s)", example.label, str(label_id)) logging.info("weight: %s", example.weight) + logging.info("int_iden: %s", str(example.int_iden)) feature = InputFeatures( input_ids=input_ids, @@ -917,7 +929,9 @@ def convert_single_example(ex_index, example, label_list, max_seq_length, segment_ids=segment_ids, label_id=label_id, is_real_example=True, - weight=example.weight) + weight=example.weight, + int_iden=example.int_iden) + return feature @@ -953,12 +967,14 @@ def file_based_convert_examples_to_features(examples, features["segment_ids"] = create_int_feature(feature.segment_ids) if label_type is not None and label_type == float: features["label_ids"] = create_float_feature([feature.label_id]) - else: + elif feature.label_id is not None: features["label_ids"] = create_int_feature([feature.label_id]) features["is_real_example"] = create_int_feature( [int(feature.is_real_example)]) if feature.weight is not None: features["weight"] = create_float_feature([feature.weight]) + if feature.int_iden is not None: + features["int_iden"] = create_int_feature([feature.int_iden]) tf_example = tf.train.Example(features=tf.train.Features(feature=features)) writer.write(tf_example.SerializeToString()) diff --git a/official/nlp/data/create_finetuning_data.py b/official/nlp/data/create_finetuning_data.py index 814ae4060..8fae97e12 100644 --- a/official/nlp/data/create_finetuning_data.py +++ b/official/nlp/data/create_finetuning_data.py @@ -27,6 +27,7 @@ from absl import flags import tensorflow as tf from official.nlp.bert import tokenization from official.nlp.data import classifier_data_lib +from official.nlp.data import sentence_retrieval_lib # word-piece tokenizer based squad_lib from official.nlp.data import squad_lib as squad_lib_wp # sentence-piece tokenizer based squad_lib @@ -36,7 +37,7 @@ FLAGS = flags.FLAGS flags.DEFINE_enum( "fine_tuning_task_type", "classification", - ["classification", "regression", "squad"], + ["classification", "regression", "squad", "retrieval"], "The name of the BERT fine tuning task for which data " "will be generated..") @@ -55,6 +56,9 @@ flags.DEFINE_enum("classification_task_name", "MNLI", "only and for XNLI is all languages combined. Same for " "PAWS-X.") +flags.DEFINE_enum("retrieval_task_name", "bucc", ["bucc", "tatoeba"], + "The name of sentence retrieval task for scoring") + # XNLI task specific flag. flags.DEFINE_string( "xnli_language", "en", @@ -246,6 +250,39 @@ def generate_squad_dataset(): FLAGS.max_query_length, FLAGS.doc_stride, FLAGS.version_2_with_negative) +def generate_retrieval_dataset(): + """Generate retrieval test and dev dataset and returns input meta data.""" + assert (FLAGS.input_data_dir and FLAGS.retrieval_task_name) + if FLAGS.tokenizer_impl == "word_piece": + tokenizer = tokenization.FullTokenizer( + vocab_file=FLAGS.vocab_file, do_lower_case=FLAGS.do_lower_case) + processor_text_fn = tokenization.convert_to_unicode + else: + assert FLAGS.tokenizer_impl == "sentence_piece" + tokenizer = tokenization.FullSentencePieceTokenizer(FLAGS.sp_model_file) + processor_text_fn = functools.partial( + tokenization.preprocess_text, lower=FLAGS.do_lower_case) + + processors = { + "bucc": sentence_retrieval_lib.BuccProcessor, + "tatoeba": sentence_retrieval_lib.TatoebaProcessor, + } + + task_name = FLAGS.retrieval_task_name.lower() + if task_name not in processors: + raise ValueError("Task not found: %s" % task_name) + + processor = processors[task_name](process_text_fn=processor_text_fn) + + return sentence_retrieval_lib.generate_sentence_retrevial_tf_record( + processor, + FLAGS.input_data_dir, + tokenizer, + FLAGS.eval_data_output_path, + FLAGS.test_data_output_path, + FLAGS.max_seq_length) + + def main(_): if FLAGS.tokenizer_impl == "word_piece": if not FLAGS.vocab_file: @@ -257,10 +294,15 @@ def main(_): raise ValueError( "FLAG sp_model_file for sentence-piece tokenizer is not specified.") + if FLAGS.fine_tuning_task_type != "retrieval": + flags.mark_flag_as_required("train_data_output_path") + if FLAGS.fine_tuning_task_type == "classification": input_meta_data = generate_classifier_dataset() elif FLAGS.fine_tuning_task_type == "regression": input_meta_data = generate_regression_dataset() + elif FLAGS.fine_tuning_task_type == "retrieval": + input_meta_data = generate_retrieval_dataset() else: input_meta_data = generate_squad_dataset() @@ -270,6 +312,5 @@ def main(_): if __name__ == "__main__": - flags.mark_flag_as_required("train_data_output_path") flags.mark_flag_as_required("meta_data_file_path") app.run(main) diff --git a/official/nlp/data/sentence_retrieval_lib.py b/official/nlp/data/sentence_retrieval_lib.py new file mode 100644 index 000000000..d8e83ae57 --- /dev/null +++ b/official/nlp/data/sentence_retrieval_lib.py @@ -0,0 +1,168 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""BERT library to process data for cross lingual sentence retrieval task.""" + +import os + +from absl import logging +from official.nlp.bert import tokenization +from official.nlp.data import classifier_data_lib + + +class BuccProcessor(classifier_data_lib.DataProcessor): + """Procssor for Xtreme BUCC data set.""" + supported_languages = ["de", "fr", "ru", "zh"] + + def __init__(self, + process_text_fn=tokenization.convert_to_unicode): + super(BuccProcessor, self).__init__(process_text_fn) + self.languages = BuccProcessor.supported_languages + + def get_dev_examples(self, data_dir, file_pattern): + return self._create_examples( + self._read_tsv(os.path.join(data_dir, file_pattern.format("dev"))), + "sample") + + def get_test_examples(self, data_dir, file_pattern): + return self._create_examples( + self._read_tsv(os.path.join(data_dir, file_pattern.format("test"))), + "test") + + @staticmethod + def get_processor_name(): + """See base class.""" + return "BUCC" + + def _create_examples(self, lines, set_type): + """Creates examples for the training and dev sets.""" + examples = [] + for (i, line) in enumerate(lines): + guid = "%s-%s" % (set_type, i) + int_iden = int(line[0].split("-")[1]) + text_a = self.process_text_fn(line[1]) + examples.append( + classifier_data_lib.InputExample( + guid=guid, text_a=text_a, int_iden=int_iden)) + return examples + + +class TatoebaProcessor(classifier_data_lib.DataProcessor): + """Procssor for Xtreme Tatoeba data set.""" + supported_languages = [ + "af", "ar", "bg", "bn", "de", "el", "es", "et", "eu", "fa", "fi", "fr", + "he", "hi", "hu", "id", "it", "ja", "jv", "ka", "kk", "ko", "ml", "mr", + "nl", "pt", "ru", "sw", "ta", "te", "th", "tl", "tr", "ur", "vi", "zh" + ] + + def __init__(self, + process_text_fn=tokenization.convert_to_unicode): + super(TatoebaProcessor, self).__init__(process_text_fn) + self.languages = TatoebaProcessor.supported_languages + + def get_test_examples(self, data_dir, file_path): + return self._create_examples( + self._read_tsv(os.path.join(data_dir, file_path)), "test") + + @staticmethod + def get_processor_name(): + """See base class.""" + return "TATOEBA" + + def _create_examples(self, lines, set_type): + """Creates examples for the training and dev sets.""" + examples = [] + for (i, line) in enumerate(lines): + guid = "%s-%s" % (set_type, i) + text_a = self.process_text_fn(line[0]) + examples.append( + classifier_data_lib.InputExample( + guid=guid, text_a=text_a, int_iden=i)) + return examples + + +def generate_sentence_retrevial_tf_record(processor, + data_dir, + tokenizer, + eval_data_output_path=None, + test_data_output_path=None, + max_seq_length=128): + """Generates the tf records for retrieval tasks. + + Args: + processor: Input processor object to be used for generating data. Subclass + of `DataProcessor`. + data_dir: Directory that contains train/eval data to process. Data files + should be in from. + tokenizer: The tokenizer to be applied on the data. + eval_data_output_path: Output to which processed tf record for evaluation + will be saved. + test_data_output_path: Output to which processed tf record for testing + will be saved. Must be a pattern template with {} if processor has + language specific test data. + max_seq_length: Maximum sequence length of the to be generated + training/eval data. + + Returns: + A dictionary containing input meta data. + """ + assert eval_data_output_path or test_data_output_path + + if processor.get_processor_name() == "BUCC": + path_pattern = "{}-en.{{}}.{}" + + if processor.get_processor_name() == "TATOEBA": + path_pattern = "{}-en.{}" + + meta_data = { + "processor_type": processor.get_processor_name(), + "max_seq_length": max_seq_length, + "number_eval_data": {}, + "number_test_data": {}, + } + logging.info("Start to process %s task data", processor.get_processor_name()) + + for lang_a in processor.languages: + for lang_b in [lang_a, "en"]: + if eval_data_output_path: + eval_input_data_examples = processor.get_dev_examples( + data_dir, os.path.join(path_pattern.format(lang_a, lang_b))) + + num_eval_data = len(eval_input_data_examples) + logging.info("Processing %d dev examples of %s-en.%s", num_eval_data, + lang_a, lang_b) + output_file = os.path.join( + eval_data_output_path, + "{}-en-{}.{}.tfrecords".format(lang_a, lang_b, "dev")) + classifier_data_lib.file_based_convert_examples_to_features( + eval_input_data_examples, None, max_seq_length, tokenizer, + output_file, None) + meta_data["number_eval_data"][f"{lang_a}-en.{lang_b}"] = num_eval_data + + if test_data_output_path: + test_input_data_examples = processor.get_test_examples( + data_dir, os.path.join(path_pattern.format(lang_a, lang_b))) + + num_test_data = len(test_input_data_examples) + logging.info("Processing %d test examples of %s-en.%s", num_test_data, + lang_a, lang_b) + output_file = os.path.join( + test_data_output_path, + "{}-en-{}.{}.tfrecords".format(lang_a, lang_b, "test")) + classifier_data_lib.file_based_convert_examples_to_features( + test_input_data_examples, None, max_seq_length, tokenizer, + output_file, None) + meta_data["number_test_data"][f"{lang_a}-en.{lang_b}"] = num_test_data + + return meta_data -- GitLab From ecf13b0e5f0d47d60b0eedb15e7944327969a1fb Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Tue, 23 Jun 2020 11:38:19 -0700 Subject: [PATCH 50/79] Internal change PiperOrigin-RevId: 317908656 --- official/nlp/transformer/compute_bleu.py | 4 ++++ official/nlp/transformer/compute_bleu_test.py | 8 ++++++++ 2 files changed, 12 insertions(+) diff --git a/official/nlp/transformer/compute_bleu.py b/official/nlp/transformer/compute_bleu.py index 92d54c30e..f7dfd542b 100644 --- a/official/nlp/transformer/compute_bleu.py +++ b/official/nlp/transformer/compute_bleu.py @@ -92,7 +92,11 @@ def bleu_wrapper(ref_filename, hyp_filename, case_sensitive=False): tf.io.gfile.GFile(ref_filename).read()).strip().splitlines() hyp_lines = tokenizer.native_to_unicode( tf.io.gfile.GFile(hyp_filename).read()).strip().splitlines() + return bleu_on_list(ref_lines, hyp_lines, case_sensitive) + +def bleu_on_list(ref_lines, hyp_lines, case_sensitive=False): + """Compute BLEU for two list of strings (reference and hypothesis).""" if len(ref_lines) != len(hyp_lines): raise ValueError( "Reference and translation files have different number of " diff --git a/official/nlp/transformer/compute_bleu_test.py b/official/nlp/transformer/compute_bleu_test.py index 6c578e369..5b370947d 100644 --- a/official/nlp/transformer/compute_bleu_test.py +++ b/official/nlp/transformer/compute_bleu_test.py @@ -59,6 +59,14 @@ class ComputeBleuTest(tf.test.TestCase): tokenized = compute_bleu.bleu_tokenize(s) self.assertEqual(["Test0", ",", "1", "two", ",", "3"], tokenized) + def test_bleu_list(self): + ref = ["test 1 two 3", "more tests!"] + hyp = ["test 1 two 3", "More tests!"] + uncased_score = compute_bleu.bleu_on_list(ref, hyp, False) + cased_score = compute_bleu.bleu_on_list(ref, hyp, True) + self.assertEqual(uncased_score, 100) + self.assertLess(cased_score, 100) + if __name__ == "__main__": tf.test.main() -- GitLab From a09021be55d50d2bb534be1402747822d8e8b82a Mon Sep 17 00:00:00 2001 From: Jaeyoun Kim Date: Tue, 23 Jun 2020 13:19:48 -0700 Subject: [PATCH 51/79] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 5b52e4a5c..d155607ae 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ can take full advantage of TensorFlow for their research and product development | Date | Milestone | |------|-----------| -| July 7, 2020 | [![GitHub milestone](https://img.shields.io/github/milestones/progress/tensorflow/models/1)](https://github.com/tensorflow/models/milestone/1) | +| July 8, 2020 | [![GitHub milestone](https://img.shields.io/github/milestones/progress/tensorflow/models/1)](https://github.com/tensorflow/models/milestone/1) | ## Contributions -- GitLab From 6d0cf37254de1dee7e2ddea0dee22858a1bb7502 Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Tue, 23 Jun 2020 14:52:42 -0700 Subject: [PATCH 52/79] Add support for WNLI data preprocessor, and update classifier_data_lib_test to run through dev/test sets (just RTE/WNLI for now). PiperOrigin-RevId: 317943685 --- official/nlp/data/classifier_data_lib.py | 75 +++++++++++++++------ official/nlp/data/create_finetuning_data.py | 1 + 2 files changed, 57 insertions(+), 19 deletions(-) diff --git a/official/nlp/data/classifier_data_lib.py b/official/nlp/data/classifier_data_lib.py index 67c47d787..bf6feae31 100644 --- a/official/nlp/data/classifier_data_lib.py +++ b/official/nlp/data/classifier_data_lib.py @@ -611,13 +611,11 @@ class RteProcessor(DataProcessor): if i == 0: continue guid = "%s-%s" % (set_type, i) + text_a = tokenization.convert_to_unicode(line[1]) + text_b = tokenization.convert_to_unicode(line[2]) if set_type == "test": - text_a = tokenization.convert_to_unicode(line[1]) - text_b = tokenization.convert_to_unicode(line[2]) label = "entailment" else: - text_a = tokenization.convert_to_unicode(line[1]) - text_b = tokenization.convert_to_unicode(line[2]) label = tokenization.convert_to_unicode(line[3]) examples.append( InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) @@ -836,6 +834,51 @@ class TfdsProcessor(DataProcessor): return examples +class WnliProcessor(DataProcessor): + """Processor for the WNLI data set (GLUE version).""" + + def get_train_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "train.tsv")), "train") + + def get_dev_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev") + + def get_test_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "test.tsv")), "test") + + def get_labels(self): + """See base class.""" + return ["0", "1"] + + @staticmethod + def get_processor_name(): + """See base class.""" + return "WNLI" + + def _create_examples(self, lines, set_type): + """Creates examples for the training and dev sets.""" + examples = [] + for i, line in enumerate(lines): + if i == 0: + continue + guid = "%s-%s" % (set_type, i) + text_a = tokenization.convert_to_unicode(line[1]) + text_b = tokenization.convert_to_unicode(line[2]) + if set_type == "test": + label = "0" + else: + label = tokenization.convert_to_unicode(line[3]) + examples.append( + InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) + return examples + + def convert_single_example(ex_index, example, label_list, max_seq_length, tokenizer): """Converts a single `InputExample` into a single `InputFeatures`.""" @@ -1010,8 +1053,7 @@ def generate_tf_record_from_data_file(processor, Arguments: processor: Input processor object to be used for generating data. Subclass of `DataProcessor`. - data_dir: Directory that contains train/eval data to process. Data files - should be in from "dev.tsv", "test.tsv", or "train.tsv". + data_dir: Directory that contains train/eval/test data to process. tokenizer: The tokenizer to be applied on the data. train_data_output_path: Output to which processed tf record for training will be saved. @@ -1047,6 +1089,12 @@ def generate_tf_record_from_data_file(processor, tokenizer, eval_data_output_path, label_type) + meta_data = { + "processor_type": processor.get_processor_name(), + "train_data_size": num_training_data, + "max_seq_length": max_seq_length, + } + if test_data_output_path: test_input_data_examples = processor.get_test_examples(data_dir) if isinstance(test_input_data_examples, dict): @@ -1054,17 +1102,14 @@ def generate_tf_record_from_data_file(processor, file_based_convert_examples_to_features( examples, label_list, max_seq_length, tokenizer, test_data_output_path.format(language), label_type) + meta_data["test_{}_data_size".format(language)] = len(examples) else: file_based_convert_examples_to_features(test_input_data_examples, label_list, max_seq_length, tokenizer, test_data_output_path, label_type) + meta_data["test_data_size"] = len(test_input_data_examples) - meta_data = { - "processor_type": processor.get_processor_name(), - "train_data_size": num_training_data, - "max_seq_length": max_seq_length, - } if is_regression: meta_data["task_type"] = "bert_regression" meta_data["label_type"] = {int: "int", float: "float"}[label_type] @@ -1077,12 +1122,4 @@ def generate_tf_record_from_data_file(processor, if eval_data_output_path: meta_data["eval_data_size"] = len(eval_input_data_examples) - if test_data_output_path: - test_input_data_examples = processor.get_test_examples(data_dir) - if isinstance(test_input_data_examples, dict): - for language, examples in test_input_data_examples.items(): - meta_data["test_{}_data_size".format(language)] = len(examples) - else: - meta_data["test_data_size"] = len(test_input_data_examples) - return meta_data diff --git a/official/nlp/data/create_finetuning_data.py b/official/nlp/data/create_finetuning_data.py index 8fae97e12..7a74745fa 100644 --- a/official/nlp/data/create_finetuning_data.py +++ b/official/nlp/data/create_finetuning_data.py @@ -186,6 +186,7 @@ def generate_classifier_dataset(): "paws-x": functools.partial(classifier_data_lib.PawsxProcessor, language=FLAGS.pawsx_language), + "wnli": classifier_data_lib.WnliProcessor, "xtreme-xnli": functools.partial(classifier_data_lib.XtremeXnliProcessor), "xtreme-paws-x": -- GitLab From 3ed24a682d17d04ef46549726c17474ec32308fb Mon Sep 17 00:00:00 2001 From: Hongkun Yu Date: Tue, 23 Jun 2020 16:15:33 -0700 Subject: [PATCH 53/79] Internal change PiperOrigin-RevId: 317958778 --- official/modeling/hyperparams/config_definitions.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/official/modeling/hyperparams/config_definitions.py b/official/modeling/hyperparams/config_definitions.py index 78180cd8a..292094653 100644 --- a/official/modeling/hyperparams/config_definitions.py +++ b/official/modeling/hyperparams/config_definitions.py @@ -178,14 +178,17 @@ class TrainerConfig(base_config.Config): checkpoints, if set to None, continuous eval will wait indefinetely. """ optimizer_config: OptimizationConfig = OptimizationConfig() - train_tf_while_loop: bool = True - train_tf_function: bool = True - eval_tf_function: bool = True + train_steps: int = 0 + validation_steps: Optional[int] = None + validation_interval: int = 100 steps_per_loop: int = 1000 summary_interval: int = 1000 checkpoint_interval: int = 1000 max_to_keep: int = 5 continuous_eval_timeout: Optional[int] = None + train_tf_while_loop: bool = True + train_tf_function: bool = True + eval_tf_function: bool = True @dataclasses.dataclass @@ -201,9 +204,6 @@ class ExperimentConfig(base_config.Config): task: TaskConfig = TaskConfig() trainer: TrainerConfig = TrainerConfig() runtime: RuntimeConfig = RuntimeConfig() - train_steps: int = 0 - validation_steps: Optional[int] = None - validation_interval: int = 100 _REGISTERED_CONFIGS = {} -- GitLab From 037abae4f200220f604f4475ebf9fa781a7bc30c Mon Sep 17 00:00:00 2001 From: Pengchong Jin Date: Tue, 23 Jun 2020 16:35:57 -0700 Subject: [PATCH 54/79] Internal change PiperOrigin-RevId: 317962481 --- official/modeling/hyperparams/config_definitions.py | 2 +- official/nlp/tasks/masked_lm.py | 4 ++-- official/nlp/tasks/masked_lm_test.py | 2 +- official/nlp/tasks/question_answering.py | 6 +++--- official/nlp/tasks/question_answering_test.py | 6 +++--- official/nlp/tasks/sentence_prediction.py | 6 +++--- official/nlp/tasks/sentence_prediction_test.py | 10 +++++----- official/nlp/tasks/tagging.py | 8 ++++---- official/nlp/tasks/tagging_test.py | 6 +++--- 9 files changed, 25 insertions(+), 25 deletions(-) diff --git a/official/modeling/hyperparams/config_definitions.py b/official/modeling/hyperparams/config_definitions.py index 292094653..34e9818d0 100644 --- a/official/modeling/hyperparams/config_definitions.py +++ b/official/modeling/hyperparams/config_definitions.py @@ -193,7 +193,7 @@ class TrainerConfig(base_config.Config): @dataclasses.dataclass class TaskConfig(base_config.Config): - network: base_config.Config = None + model: base_config.Config = None train_data: DataConfig = DataConfig() validation_data: DataConfig = DataConfig() diff --git a/official/nlp/tasks/masked_lm.py b/official/nlp/tasks/masked_lm.py index 4d392ad11..bb5398458 100644 --- a/official/nlp/tasks/masked_lm.py +++ b/official/nlp/tasks/masked_lm.py @@ -27,7 +27,7 @@ from official.nlp.modeling import losses as loss_lib @dataclasses.dataclass class MaskedLMConfig(cfg.TaskConfig): """The model config.""" - network: bert.BertPretrainerConfig = bert.BertPretrainerConfig(cls_heads=[ + model: bert.BertPretrainerConfig = bert.BertPretrainerConfig(cls_heads=[ bert.ClsHeadConfig( inner_dim=768, num_classes=2, dropout_rate=0.1, name='next_sentence') ]) @@ -40,7 +40,7 @@ class MaskedLMTask(base_task.Task): """Mock task object for testing.""" def build_model(self): - return bert.instantiate_bertpretrainer_from_cfg(self.task_config.network) + return bert.instantiate_bertpretrainer_from_cfg(self.task_config.model) def build_losses(self, labels, diff --git a/official/nlp/tasks/masked_lm_test.py b/official/nlp/tasks/masked_lm_test.py index 0124165ed..15b7e1be4 100644 --- a/official/nlp/tasks/masked_lm_test.py +++ b/official/nlp/tasks/masked_lm_test.py @@ -26,7 +26,7 @@ class MLMTaskTest(tf.test.TestCase): def test_task(self): config = masked_lm.MaskedLMConfig( - network=bert.BertPretrainerConfig( + model=bert.BertPretrainerConfig( encoders.TransformerEncoderConfig(vocab_size=30522, num_layers=1), num_masked_tokens=20, cls_heads=[ diff --git a/official/nlp/tasks/question_answering.py b/official/nlp/tasks/question_answering.py index 7b3cb7f4e..aa066cc02 100644 --- a/official/nlp/tasks/question_answering.py +++ b/official/nlp/tasks/question_answering.py @@ -33,7 +33,7 @@ class QuestionAnsweringConfig(cfg.TaskConfig): # At most one of `init_checkpoint` and `hub_module_url` can be specified. init_checkpoint: str = '' hub_module_url: str = '' - network: encoders.TransformerEncoderConfig = ( + model: encoders.TransformerEncoderConfig = ( encoders.TransformerEncoderConfig()) train_data: cfg.DataConfig = cfg.DataConfig() validation_data: cfg.DataConfig = cfg.DataConfig() @@ -61,12 +61,12 @@ class QuestionAnsweringTask(base_task.Task): encoder_network = utils.get_encoder_from_hub(self._hub_module) else: encoder_network = encoders.instantiate_encoder_from_cfg( - self.task_config.network) + self.task_config.model) return models.BertSpanLabeler( network=encoder_network, initializer=tf.keras.initializers.TruncatedNormal( - stddev=self.task_config.network.initializer_range)) + stddev=self.task_config.model.initializer_range)) def build_losses(self, labels, model_outputs, aux_losses=None) -> tf.Tensor: start_positions = labels['start_positions'] diff --git a/official/nlp/tasks/question_answering_test.py b/official/nlp/tasks/question_answering_test.py index 8e0f3f10b..21d13a030 100644 --- a/official/nlp/tasks/question_answering_test.py +++ b/official/nlp/tasks/question_answering_test.py @@ -64,7 +64,7 @@ class QuestionAnsweringTaskTest(tf.test.TestCase): config = question_answering.QuestionAnsweringConfig( init_checkpoint=saved_path, - network=self._encoder_config, + model=self._encoder_config, train_data=self._train_data_config) task = question_answering.QuestionAnsweringTask(config) model = task.build_model() @@ -79,7 +79,7 @@ class QuestionAnsweringTaskTest(tf.test.TestCase): def test_task_with_fit(self): config = question_answering.QuestionAnsweringConfig( - network=self._encoder_config, + model=self._encoder_config, train_data=self._train_data_config) task = question_answering.QuestionAnsweringTask(config) model = task.build_model() @@ -121,7 +121,7 @@ class QuestionAnsweringTaskTest(tf.test.TestCase): hub_module_url = self._export_bert_tfhub() config = question_answering.QuestionAnsweringConfig( hub_module_url=hub_module_url, - network=self._encoder_config, + model=self._encoder_config, train_data=self._train_data_config) self._run_task(config) diff --git a/official/nlp/tasks/sentence_prediction.py b/official/nlp/tasks/sentence_prediction.py index b2eb0bf47..b7c45dfe6 100644 --- a/official/nlp/tasks/sentence_prediction.py +++ b/official/nlp/tasks/sentence_prediction.py @@ -38,7 +38,7 @@ class SentencePredictionConfig(cfg.TaskConfig): init_checkpoint: str = '' hub_module_url: str = '' metric_type: str = 'accuracy' - network: bert.BertPretrainerConfig = bert.BertPretrainerConfig( + model: bert.BertPretrainerConfig = bert.BertPretrainerConfig( num_masked_tokens=0, # No masked language modeling head. cls_heads=[ bert.ClsHeadConfig( @@ -70,9 +70,9 @@ class SentencePredictionTask(base_task.Task): if self._hub_module: encoder_from_hub = utils.get_encoder_from_hub(self._hub_module) return bert.instantiate_bertpretrainer_from_cfg( - self.task_config.network, encoder_network=encoder_from_hub) + self.task_config.model, encoder_network=encoder_from_hub) else: - return bert.instantiate_bertpretrainer_from_cfg(self.task_config.network) + return bert.instantiate_bertpretrainer_from_cfg(self.task_config.model) def build_losses(self, labels, model_outputs, aux_losses=None) -> tf.Tensor: loss = loss_lib.weighted_sparse_categorical_crossentropy_loss( diff --git a/official/nlp/tasks/sentence_prediction_test.py b/official/nlp/tasks/sentence_prediction_test.py index 09419f54c..b494f7dc7 100644 --- a/official/nlp/tasks/sentence_prediction_test.py +++ b/official/nlp/tasks/sentence_prediction_test.py @@ -34,7 +34,7 @@ class SentencePredictionTaskTest(tf.test.TestCase, parameterized.TestCase): self._train_data_config = bert.SentencePredictionDataConfig( input_path="dummy", seq_length=128, global_batch_size=1) - def get_network_config(self, num_classes): + def get_model_config(self, num_classes): return bert.BertPretrainerConfig( encoder=encoders.TransformerEncoderConfig( vocab_size=30522, num_layers=1), @@ -63,7 +63,7 @@ class SentencePredictionTaskTest(tf.test.TestCase, parameterized.TestCase): def test_task(self): config = sentence_prediction.SentencePredictionConfig( init_checkpoint=self.get_temp_dir(), - network=self.get_network_config(2), + model=self.get_model_config(2), train_data=self._train_data_config) task = sentence_prediction.SentencePredictionTask(config) model = task.build_model() @@ -96,7 +96,7 @@ class SentencePredictionTaskTest(tf.test.TestCase, parameterized.TestCase): config = sentence_prediction.SentencePredictionConfig( metric_type=metric_type, init_checkpoint=self.get_temp_dir(), - network=self.get_network_config(num_classes), + model=self.get_model_config(num_classes), train_data=self._train_data_config) task = sentence_prediction.SentencePredictionTask(config) model = task.build_model() @@ -115,7 +115,7 @@ class SentencePredictionTaskTest(tf.test.TestCase, parameterized.TestCase): def test_task_with_fit(self): config = sentence_prediction.SentencePredictionConfig( - network=self.get_network_config(2), train_data=self._train_data_config) + model=self.get_model_config(2), train_data=self._train_data_config) task = sentence_prediction.SentencePredictionTask(config) model = task.build_model() model = task.compile_model( @@ -154,7 +154,7 @@ class SentencePredictionTaskTest(tf.test.TestCase, parameterized.TestCase): hub_module_url = self._export_bert_tfhub() config = sentence_prediction.SentencePredictionConfig( hub_module_url=hub_module_url, - network=self.get_network_config(2), + model=self.get_model_config(2), train_data=self._train_data_config) self._run_task(config) diff --git a/official/nlp/tasks/tagging.py b/official/nlp/tasks/tagging.py index a1f20b136..d319301dc 100644 --- a/official/nlp/tasks/tagging.py +++ b/official/nlp/tasks/tagging.py @@ -33,7 +33,7 @@ class TaggingConfig(cfg.TaskConfig): # At most one of `init_checkpoint` and `hub_module_url` can be specified. init_checkpoint: str = '' hub_module_url: str = '' - network: encoders.TransformerEncoderConfig = ( + model: encoders.TransformerEncoderConfig = ( encoders.TransformerEncoderConfig()) num_classes: int = 0 # The ignored label id will not contribute to loss. @@ -67,14 +67,14 @@ class TaggingTask(base_task.Task): encoder_network = utils.get_encoder_from_hub(self._hub_module) else: encoder_network = encoders.instantiate_encoder_from_cfg( - self.task_config.network) + self.task_config.model) return models.BertTokenClassifier( network=encoder_network, num_classes=self.task_config.num_classes, initializer=tf.keras.initializers.TruncatedNormal( - stddev=self.task_config.network.initializer_range), - dropout_rate=self.task_config.network.dropout_rate, + stddev=self.task_config.model.initializer_range), + dropout_rate=self.task_config.model.dropout_rate, output='logits') def build_losses(self, labels, model_outputs, aux_losses=None) -> tf.Tensor: diff --git a/official/nlp/tasks/tagging_test.py b/official/nlp/tasks/tagging_test.py index 6707a50a8..1eaa398f8 100644 --- a/official/nlp/tasks/tagging_test.py +++ b/official/nlp/tasks/tagging_test.py @@ -56,7 +56,7 @@ class TaggingTest(tf.test.TestCase): config = tagging.TaggingConfig( init_checkpoint=saved_path, - network=self._encoder_config, + model=self._encoder_config, train_data=self._train_data_config, num_classes=3) task = tagging.TaggingTask(config) @@ -72,7 +72,7 @@ class TaggingTest(tf.test.TestCase): def test_task_with_fit(self): config = tagging.TaggingConfig( - network=self._encoder_config, + model=self._encoder_config, train_data=self._train_data_config, num_classes=3) @@ -115,7 +115,7 @@ class TaggingTest(tf.test.TestCase): hub_module_url = self._export_bert_tfhub() config = tagging.TaggingConfig( hub_module_url=hub_module_url, - network=self._encoder_config, + model=self._encoder_config, num_classes=4, train_data=self._train_data_config) self._run_task(config) -- GitLab From 671d542498b1e0796bd80e31773578b60001ea25 Mon Sep 17 00:00:00 2001 From: Jaeyoun Kim Date: Wed, 24 Jun 2020 13:57:11 -0700 Subject: [PATCH 55/79] Update README_TEMPLATE.md Add a TF Hub badge --- .github/README_TEMPLATE.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/README_TEMPLATE.md b/.github/README_TEMPLATE.md index 45179d0ae..43dba40f5 100644 --- a/.github/README_TEMPLATE.md +++ b/.github/README_TEMPLATE.md @@ -54,6 +54,8 @@ pip install -r requirements.txt ## Results +[![TensorFlow Hub](https://img.shields.io/badge/TF%20Hub-Models-FF6F00?logo=tensorflow)](https://tfhub.dev/...) + > :memo: Provide a table with results. (e.g., accuracy, latency) > > * Provide links to the pre-trained models (checkpoint, SavedModel files). -- GitLab From e0dade52dc470e8696556760516b65a5864e1f6d Mon Sep 17 00:00:00 2001 From: vivek rathod Date: Wed, 24 Jun 2020 14:18:02 -0700 Subject: [PATCH 56/79] Merged commit includes the following changes: (#8728) 318106429 by derekjchow: Add Dockerfiles for TF-OD API. 1.15 and 2.2 supported currently. -- 318083650 by rathodv: Internal Change. -- 317893148 by Zhichao Lu: Fix mapping from proto fields to parameters of the data augmentation functions for horizontal flip, vertical flip and 90 degree rotations. -- 317753117 by Zhichao Lu: Adds keras hyperparam option to force use_bias to True, even when using batch norm. -- 317613986 by Zhichao Lu: Improves Keypoints support for data augmentation by means of 90 degree rotation adding an option to permute keypoints. Unify the interfaces among flip and rotation ops for data augmentation by exposing additional properties to the user. -- 317136881 by Zhichao Lu: Clarifying documentation -- 317097141 by Zhichao Lu: Adding Context R-CNN Release to TFODAPI ReadMe -- 316999744 by Zhichao Lu: Add import tensorflow.compat.v2 as tf2 in the model_lib to ensure tf1 compatibility. -- 316964482 by Zhichao Lu: adding a note about a config change needed for exporting detection features -- 316944293 by Zhichao Lu: Adding install instructions for apache beam -- 316917592 by lzc: Internal change. -- PiperOrigin-RevId: 318106429 Co-authored-by: Zhichao Lu --- .../builders/hyperparams_builder.py | 17 ++++-- .../builders/hyperparams_builder_test.py | 61 +++++++++++++++++++ .../builders/preprocessor_builder.py | 10 ++- .../builders/preprocessor_builder_test.py | 19 ++++-- .../object_detection/core/keypoint_ops.py | 50 +++++++++------ .../core/keypoint_ops_test.py | 48 +++++++++++++++ .../object_detection/core/preprocessor.py | 19 ++++-- .../core/preprocessor_test.py | 10 ++- .../dockerfiles/1.15/Dockerfile | 44 +++++++++++++ .../dockerfiles/1.15/README.md | 11 ++++ .../dockerfiles/2.2/Dockerfile | 44 +++++++++++++ .../dockerfiles/2.2/README.md | 11 ++++ .../object_detection/model_lib_tf2_test.py | 52 +++++++++------- research/object_detection/model_lib_v2.py | 52 ++-------------- research/object_detection/model_main_tf2.py | 9 +-- .../object_detection/protos/hyperparams.proto | 6 ++ .../protos/preprocessor.proto | 30 ++++++--- .../object_detection/utils/config_util.py | 28 +++++++++ 18 files changed, 399 insertions(+), 122 deletions(-) create mode 100644 research/object_detection/dockerfiles/1.15/Dockerfile create mode 100644 research/object_detection/dockerfiles/1.15/README.md create mode 100644 research/object_detection/dockerfiles/2.2/Dockerfile create mode 100644 research/object_detection/dockerfiles/2.2/README.md diff --git a/research/object_detection/builders/hyperparams_builder.py b/research/object_detection/builders/hyperparams_builder.py index f34e1112a..90aef43ac 100644 --- a/research/object_detection/builders/hyperparams_builder.py +++ b/research/object_detection/builders/hyperparams_builder.py @@ -64,6 +64,7 @@ class KerasLayerHyperparams(object): self._batch_norm_params = _build_keras_batch_norm_params( hyperparams_config.batch_norm) + self._force_use_bias = hyperparams_config.force_use_bias self._activation_fn = _build_activation_fn(hyperparams_config.activation) # TODO(kaftan): Unclear if these kwargs apply to separable & depthwise conv # (Those might use depthwise_* instead of kernel_*) @@ -80,6 +81,13 @@ class KerasLayerHyperparams(object): def use_batch_norm(self): return self._batch_norm_params is not None + def force_use_bias(self): + return self._force_use_bias + + def use_bias(self): + return (self._force_use_bias or not + (self.use_batch_norm() and self.batch_norm_params()['center'])) + def batch_norm_params(self, **overrides): """Returns a dict containing batchnorm layer construction hyperparameters. @@ -168,10 +176,7 @@ class KerasLayerHyperparams(object): new_params['activation'] = None if include_activation: new_params['activation'] = self._activation_fn - if self.use_batch_norm() and self.batch_norm_params()['center']: - new_params['use_bias'] = False - else: - new_params['use_bias'] = True + new_params['use_bias'] = self.use_bias() new_params.update(**overrides) return new_params @@ -210,6 +215,10 @@ def build(hyperparams_config, is_training): raise ValueError('hyperparams_config not of type ' 'hyperparams_pb.Hyperparams.') + if hyperparams_config.force_use_bias: + raise ValueError('Hyperparams force_use_bias only supported by ' + 'KerasLayerHyperparams.') + normalizer_fn = None batch_norm_params = None if hyperparams_config.HasField('batch_norm'): diff --git a/research/object_detection/builders/hyperparams_builder_test.py b/research/object_detection/builders/hyperparams_builder_test.py index 2c6fcd5af..e48ac23bc 100644 --- a/research/object_detection/builders/hyperparams_builder_test.py +++ b/research/object_detection/builders/hyperparams_builder_test.py @@ -667,6 +667,67 @@ class KerasHyperparamsBuilderTest(tf.test.TestCase): self.assertIsInstance(identity_layer, tf.keras.layers.Lambda) + def test_do_not_use_bias_if_batch_norm_center_keras(self): + conv_hyperparams_text_proto = """ + regularizer { + l2_regularizer { + } + } + initializer { + truncated_normal_initializer { + } + } + batch_norm { + decay: 0.7 + center: true + scale: true + epsilon: 0.03 + train: true + } + """ + conv_hyperparams_proto = hyperparams_pb2.Hyperparams() + text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) + keras_config = hyperparams_builder.KerasLayerHyperparams( + conv_hyperparams_proto) + + self.assertTrue(keras_config.use_batch_norm()) + batch_norm_params = keras_config.batch_norm_params() + self.assertTrue(batch_norm_params['center']) + self.assertTrue(batch_norm_params['scale']) + hyperparams = keras_config.params() + self.assertFalse(hyperparams['use_bias']) + + def test_force_use_bias_if_batch_norm_center_keras(self): + conv_hyperparams_text_proto = """ + regularizer { + l2_regularizer { + } + } + initializer { + truncated_normal_initializer { + } + } + batch_norm { + decay: 0.7 + center: true + scale: true + epsilon: 0.03 + train: true + } + force_use_bias: true + """ + conv_hyperparams_proto = hyperparams_pb2.Hyperparams() + text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto) + keras_config = hyperparams_builder.KerasLayerHyperparams( + conv_hyperparams_proto) + + self.assertTrue(keras_config.use_batch_norm()) + batch_norm_params = keras_config.batch_norm_params() + self.assertTrue(batch_norm_params['center']) + self.assertTrue(batch_norm_params['scale']) + hyperparams = keras_config.params() + self.assertTrue(hyperparams['use_bias']) + def test_use_none_activation_keras(self): conv_hyperparams_text_proto = """ regularizer { diff --git a/research/object_detection/builders/preprocessor_builder.py b/research/object_detection/builders/preprocessor_builder.py index aa6a6bc96..fe59039b1 100644 --- a/research/object_detection/builders/preprocessor_builder.py +++ b/research/object_detection/builders/preprocessor_builder.py @@ -151,6 +151,7 @@ def build(preprocessor_step_config): { 'keypoint_flip_permutation': tuple( config.keypoint_flip_permutation) or None, + 'probability': config.probability or None, }) if step_type == 'random_vertical_flip': @@ -159,10 +160,17 @@ def build(preprocessor_step_config): { 'keypoint_flip_permutation': tuple( config.keypoint_flip_permutation) or None, + 'probability': config.probability or None, }) if step_type == 'random_rotation90': - return (preprocessor.random_rotation90, {}) + config = preprocessor_step_config.random_rotation90 + return (preprocessor.random_rotation90, + { + 'keypoint_rot_permutation': tuple( + config.keypoint_rot_permutation) or None, + 'probability': config.probability or None, + }) if step_type == 'random_crop_image': config = preprocessor_step_config.random_crop_image diff --git a/research/object_detection/builders/preprocessor_builder_test.py b/research/object_detection/builders/preprocessor_builder_test.py index 4c283238c..9e90344d0 100644 --- a/research/object_detection/builders/preprocessor_builder_test.py +++ b/research/object_detection/builders/preprocessor_builder_test.py @@ -65,13 +65,15 @@ class PreprocessorBuilderTest(tf.test.TestCase): keypoint_flip_permutation: 3 keypoint_flip_permutation: 5 keypoint_flip_permutation: 4 + probability: 0.5 } """ preprocessor_proto = preprocessor_pb2.PreprocessingStep() text_format.Merge(preprocessor_text_proto, preprocessor_proto) function, args = preprocessor_builder.build(preprocessor_proto) self.assertEqual(function, preprocessor.random_horizontal_flip) - self.assertEqual(args, {'keypoint_flip_permutation': (1, 0, 2, 3, 5, 4)}) + self.assertEqual(args, {'keypoint_flip_permutation': (1, 0, 2, 3, 5, 4), + 'probability': 0.5}) def test_build_random_vertical_flip(self): preprocessor_text_proto = """ @@ -82,23 +84,32 @@ class PreprocessorBuilderTest(tf.test.TestCase): keypoint_flip_permutation: 3 keypoint_flip_permutation: 5 keypoint_flip_permutation: 4 + probability: 0.5 } """ preprocessor_proto = preprocessor_pb2.PreprocessingStep() text_format.Merge(preprocessor_text_proto, preprocessor_proto) function, args = preprocessor_builder.build(preprocessor_proto) self.assertEqual(function, preprocessor.random_vertical_flip) - self.assertEqual(args, {'keypoint_flip_permutation': (1, 0, 2, 3, 5, 4)}) + self.assertEqual(args, {'keypoint_flip_permutation': (1, 0, 2, 3, 5, 4), + 'probability': 0.5}) def test_build_random_rotation90(self): preprocessor_text_proto = """ - random_rotation90 {} + random_rotation90 { + keypoint_rot_permutation: 3 + keypoint_rot_permutation: 0 + keypoint_rot_permutation: 1 + keypoint_rot_permutation: 2 + probability: 0.5 + } """ preprocessor_proto = preprocessor_pb2.PreprocessingStep() text_format.Merge(preprocessor_text_proto, preprocessor_proto) function, args = preprocessor_builder.build(preprocessor_proto) self.assertEqual(function, preprocessor.random_rotation90) - self.assertEqual(args, {}) + self.assertEqual(args, {'keypoint_rot_permutation': (3, 0, 1, 2), + 'probability': 0.5}) def test_build_random_pixel_value_scale(self): preprocessor_text_proto = """ diff --git a/research/object_detection/core/keypoint_ops.py b/research/object_detection/core/keypoint_ops.py index e321783d9..1b0c4ccfe 100644 --- a/research/object_detection/core/keypoint_ops.py +++ b/research/object_detection/core/keypoint_ops.py @@ -217,7 +217,7 @@ def to_absolute_coordinates(keypoints, height, width, return scale(keypoints, height, width) -def flip_horizontal(keypoints, flip_point, flip_permutation, scope=None): +def flip_horizontal(keypoints, flip_point, flip_permutation=None, scope=None): """Flips the keypoints horizontally around the flip_point. This operation flips the x coordinate for each keypoint around the flip_point @@ -227,13 +227,14 @@ def flip_horizontal(keypoints, flip_point, flip_permutation, scope=None): keypoints: a tensor of shape [num_instances, num_keypoints, 2] flip_point: (float) scalar tensor representing the x coordinate to flip the keypoints around. - flip_permutation: rank 1 int32 tensor containing the keypoint flip - permutation. This specifies the mapping from original keypoint indices - to the flipped keypoint indices. This is used primarily for keypoints - that are not reflection invariant. E.g. Suppose there are 3 keypoints - representing ['head', 'right_eye', 'left_eye'], then a logical choice for - flip_permutation might be [0, 2, 1] since we want to swap the 'left_eye' - and 'right_eye' after a horizontal flip. + flip_permutation: integer list or rank 1 int32 tensor containing the + keypoint flip permutation. This specifies the mapping from original + keypoint indices to the flipped keypoint indices. This is used primarily + for keypoints that are not reflection invariant. E.g. Suppose there are 3 + keypoints representing ['head', 'right_eye', 'left_eye'], then a logical + choice for flip_permutation might be [0, 2, 1] since we want to swap the + 'left_eye' and 'right_eye' after a horizontal flip. + Default to None or empty list to keep the original order after flip. scope: name scope. Returns: @@ -241,7 +242,8 @@ def flip_horizontal(keypoints, flip_point, flip_permutation, scope=None): """ with tf.name_scope(scope, 'FlipHorizontal'): keypoints = tf.transpose(keypoints, [1, 0, 2]) - keypoints = tf.gather(keypoints, flip_permutation) + if flip_permutation: + keypoints = tf.gather(keypoints, flip_permutation) v, u = tf.split(value=keypoints, num_or_size_splits=2, axis=2) u = flip_point * 2.0 - u new_keypoints = tf.concat([v, u], 2) @@ -249,7 +251,7 @@ def flip_horizontal(keypoints, flip_point, flip_permutation, scope=None): return new_keypoints -def flip_vertical(keypoints, flip_point, flip_permutation, scope=None): +def flip_vertical(keypoints, flip_point, flip_permutation=None, scope=None): """Flips the keypoints vertically around the flip_point. This operation flips the y coordinate for each keypoint around the flip_point @@ -259,13 +261,14 @@ def flip_vertical(keypoints, flip_point, flip_permutation, scope=None): keypoints: a tensor of shape [num_instances, num_keypoints, 2] flip_point: (float) scalar tensor representing the y coordinate to flip the keypoints around. - flip_permutation: rank 1 int32 tensor containing the keypoint flip - permutation. This specifies the mapping from original keypoint indices - to the flipped keypoint indices. This is used primarily for keypoints - that are not reflection invariant. E.g. Suppose there are 3 keypoints - representing ['head', 'right_eye', 'left_eye'], then a logical choice for - flip_permutation might be [0, 2, 1] since we want to swap the 'left_eye' - and 'right_eye' after a horizontal flip. + flip_permutation: integer list or rank 1 int32 tensor containing the + keypoint flip permutation. This specifies the mapping from original + keypoint indices to the flipped keypoint indices. This is used primarily + for keypoints that are not reflection invariant. E.g. Suppose there are 3 + keypoints representing ['head', 'right_eye', 'left_eye'], then a logical + choice for flip_permutation might be [0, 2, 1] since we want to swap the + 'left_eye' and 'right_eye' after a horizontal flip. + Default to None or empty list to keep the original order after flip. scope: name scope. Returns: @@ -273,7 +276,8 @@ def flip_vertical(keypoints, flip_point, flip_permutation, scope=None): """ with tf.name_scope(scope, 'FlipVertical'): keypoints = tf.transpose(keypoints, [1, 0, 2]) - keypoints = tf.gather(keypoints, flip_permutation) + if flip_permutation: + keypoints = tf.gather(keypoints, flip_permutation) v, u = tf.split(value=keypoints, num_or_size_splits=2, axis=2) v = flip_point * 2.0 - v new_keypoints = tf.concat([v, u], 2) @@ -281,18 +285,24 @@ def flip_vertical(keypoints, flip_point, flip_permutation, scope=None): return new_keypoints -def rot90(keypoints, scope=None): +def rot90(keypoints, rotation_permutation=None, scope=None): """Rotates the keypoints counter-clockwise by 90 degrees. Args: keypoints: a tensor of shape [num_instances, num_keypoints, 2] + rotation_permutation: integer list or rank 1 int32 tensor containing the + keypoint flip permutation. This specifies the mapping from original + keypoint indices to the rotated keypoint indices. This is used primarily + for keypoints that are not rotation invariant. + Default to None or empty list to keep the original order after rotation. scope: name scope. - Returns: new_keypoints: a tensor of shape [num_instances, num_keypoints, 2] """ with tf.name_scope(scope, 'Rot90'): keypoints = tf.transpose(keypoints, [1, 0, 2]) + if rotation_permutation: + keypoints = tf.gather(keypoints, rotation_permutation) v, u = tf.split(value=keypoints[:, :, ::-1], num_or_size_splits=2, axis=2) v = 1.0 - v new_keypoints = tf.concat([v, u], 2) diff --git a/research/object_detection/core/keypoint_ops_test.py b/research/object_detection/core/keypoint_ops_test.py index 695e8fa1c..bbdcf0194 100644 --- a/research/object_detection/core/keypoint_ops_test.py +++ b/research/object_detection/core/keypoint_ops_test.py @@ -180,6 +180,21 @@ class KeypointOpsTest(test_case.TestCase): [[0.1, 0.1], [0.2, 0.2], [0.3, 0.3]], [[0.4, 0.4], [0.5, 0.5], [0.6, 0.6]] ]) + expected_keypoints = tf.constant([ + [[0.1, 0.9], [0.2, 0.8], [0.3, 0.7]], + [[0.4, 0.6], [0.5, 0.5], [0.6, 0.4]], + ]) + output = keypoint_ops.flip_horizontal(keypoints, 0.5) + return output, expected_keypoints + + output, expected_keypoints = self.execute(graph_fn, []) + self.assertAllClose(output, expected_keypoints) + + def test_flip_horizontal_permutation(self): + + def graph_fn(): + keypoints = tf.constant([[[0.1, 0.1], [0.2, 0.2], [0.3, 0.3]], + [[0.4, 0.4], [0.5, 0.5], [0.6, 0.6]]]) flip_permutation = [0, 2, 1] expected_keypoints = tf.constant([ @@ -197,6 +212,22 @@ class KeypointOpsTest(test_case.TestCase): [[0.1, 0.1], [0.2, 0.2], [0.3, 0.3]], [[0.4, 0.4], [0.5, 0.5], [0.6, 0.6]] ]) + + expected_keypoints = tf.constant([ + [[0.9, 0.1], [0.8, 0.2], [0.7, 0.3]], + [[0.6, 0.4], [0.5, 0.5], [0.4, 0.6]], + ]) + output = keypoint_ops.flip_vertical(keypoints, 0.5) + return output, expected_keypoints + + output, expected_keypoints = self.execute(graph_fn, []) + self.assertAllClose(output, expected_keypoints) + + def test_flip_vertical_permutation(self): + + def graph_fn(): + keypoints = tf.constant([[[0.1, 0.1], [0.2, 0.2], [0.3, 0.3]], + [[0.4, 0.4], [0.5, 0.5], [0.6, 0.6]]]) flip_permutation = [0, 2, 1] expected_keypoints = tf.constant([ @@ -223,6 +254,23 @@ class KeypointOpsTest(test_case.TestCase): output, expected_keypoints = self.execute(graph_fn, []) self.assertAllClose(output, expected_keypoints) + def test_rot90_permutation(self): + + def graph_fn(): + keypoints = tf.constant([[[0.1, 0.1], [0.2, 0.2], [0.3, 0.3]], + [[0.4, 0.6], [0.5, 0.6], [0.6, 0.7]]]) + rot_permutation = [0, 2, 1] + expected_keypoints = tf.constant([ + [[0.9, 0.1], [0.7, 0.3], [0.8, 0.2]], + [[0.4, 0.4], [0.3, 0.6], [0.4, 0.5]], + ]) + output = keypoint_ops.rot90(keypoints, + rotation_permutation=rot_permutation) + return output, expected_keypoints + + output, expected_keypoints = self.execute(graph_fn, []) + self.assertAllClose(output, expected_keypoints) + def test_keypoint_weights_from_visibilities(self): def graph_fn(): keypoint_visibilities = tf.constant([ diff --git a/research/object_detection/core/preprocessor.py b/research/object_detection/core/preprocessor.py index 8b8fdff5e..a1e7ed028 100644 --- a/research/object_detection/core/preprocessor.py +++ b/research/object_detection/core/preprocessor.py @@ -569,12 +569,11 @@ def random_horizontal_flip(image, keypoints=None, keypoint_visibilities=None, keypoint_flip_permutation=None, + probability=0.5, seed=None, preprocess_vars_cache=None): """Randomly flips the image and detections horizontally. - The probability of flipping the image is 50%. - Args: image: rank 3 float32 tensor with shape [height, width, channels]. boxes: (optional) rank 2 float32 tensor with shape [N, 4] @@ -592,6 +591,7 @@ def random_horizontal_flip(image, [num_instances, num_keypoints]. keypoint_flip_permutation: rank 1 int32 tensor containing the keypoint flip permutation. + probability: the probability of performing this augmentation. seed: random seed preprocess_vars_cache: PreprocessorCache object that records previously performed augmentations. Updated in-place. If this @@ -636,7 +636,7 @@ def random_horizontal_flip(image, generator_func, preprocessor_cache.PreprocessorCache.HORIZONTAL_FLIP, preprocess_vars_cache) - do_a_flip_random = tf.greater(do_a_flip_random, 0.5) + do_a_flip_random = tf.less(do_a_flip_random, probability) # flip image image = tf.cond(do_a_flip_random, lambda: _flip_image(image), lambda: image) @@ -682,6 +682,7 @@ def random_vertical_flip(image, masks=None, keypoints=None, keypoint_flip_permutation=None, + probability=0.5, seed=None, preprocess_vars_cache=None): """Randomly flips the image and detections vertically. @@ -703,6 +704,7 @@ def random_vertical_flip(image, normalized coordinates. keypoint_flip_permutation: rank 1 int32 tensor containing the keypoint flip permutation. + probability: the probability of performing this augmentation. seed: random seed preprocess_vars_cache: PreprocessorCache object that records previously performed augmentations. Updated in-place. If this @@ -743,7 +745,7 @@ def random_vertical_flip(image, do_a_flip_random = _get_or_create_preprocess_rand_vars( generator_func, preprocessor_cache.PreprocessorCache.VERTICAL_FLIP, preprocess_vars_cache) - do_a_flip_random = tf.greater(do_a_flip_random, 0.5) + do_a_flip_random = tf.less(do_a_flip_random, probability) # flip image image = tf.cond(do_a_flip_random, lambda: _flip_image(image), lambda: image) @@ -777,6 +779,8 @@ def random_rotation90(image, boxes=None, masks=None, keypoints=None, + keypoint_rot_permutation=None, + probability=0.5, seed=None, preprocess_vars_cache=None): """Randomly rotates the image and detections 90 degrees counter-clockwise. @@ -799,6 +803,9 @@ def random_rotation90(image, keypoints: (optional) rank 3 float32 tensor with shape [num_instances, num_keypoints, 2]. The keypoints are in y-x normalized coordinates. + keypoint_rot_permutation: rank 1 int32 tensor containing the keypoint flip + permutation. + probability: the probability of performing this augmentation. seed: random seed preprocess_vars_cache: PreprocessorCache object that records previously performed augmentations. Updated in-place. If this @@ -833,7 +840,7 @@ def random_rotation90(image, do_a_rot90_random = _get_or_create_preprocess_rand_vars( generator_func, preprocessor_cache.PreprocessorCache.ROTATION90, preprocess_vars_cache) - do_a_rot90_random = tf.greater(do_a_rot90_random, 0.5) + do_a_rot90_random = tf.less(do_a_rot90_random, probability) # flip image image = tf.cond(do_a_rot90_random, lambda: _rot90_image(image), @@ -856,7 +863,7 @@ def random_rotation90(image, if keypoints is not None: keypoints = tf.cond( do_a_rot90_random, - lambda: keypoint_ops.rot90(keypoints), + lambda: keypoint_ops.rot90(keypoints, keypoint_rot_permutation), lambda: keypoints) result.append(keypoints) diff --git a/research/object_detection/core/preprocessor_test.py b/research/object_detection/core/preprocessor_test.py index fdb56c90a..5ebfe9eef 100644 --- a/research/object_detection/core/preprocessor_test.py +++ b/research/object_detection/core/preprocessor_test.py @@ -120,7 +120,10 @@ class PreprocessorTest(test_case.TestCase, parameterized.TestCase): return tf.constant(keypoints, dtype=tf.float32) def createKeypointFlipPermutation(self): - return np.array([0, 2, 1], dtype=np.int32) + return [0, 2, 1] + + def createKeypointRotPermutation(self): + return [0, 2, 1] def createTestLabels(self): labels = tf.constant([1, 2], dtype=tf.int32) @@ -912,19 +915,22 @@ class PreprocessorTest(test_case.TestCase, parameterized.TestCase): test_keypoints=True) def testRunRandomRotation90WithMaskAndKeypoints(self): - preprocess_options = [(preprocessor.random_rotation90, {})] image_height = 3 image_width = 3 images = tf.random_uniform([1, image_height, image_width, 3]) boxes = self.createTestBoxes() masks = self.createTestMasks() keypoints, _ = self.createTestKeypoints() + keypoint_rot_permutation = self.createKeypointRotPermutation() tensor_dict = { fields.InputDataFields.image: images, fields.InputDataFields.groundtruth_boxes: boxes, fields.InputDataFields.groundtruth_instance_masks: masks, fields.InputDataFields.groundtruth_keypoints: keypoints } + preprocess_options = [(preprocessor.random_rotation90, { + 'keypoint_rot_permutation': keypoint_rot_permutation + })] preprocessor_arg_map = preprocessor.get_default_func_arg_map( include_instance_masks=True, include_keypoints=True) tensor_dict = preprocessor.preprocess( diff --git a/research/object_detection/dockerfiles/1.15/Dockerfile b/research/object_detection/dockerfiles/1.15/Dockerfile new file mode 100644 index 000000000..76c0c1f07 --- /dev/null +++ b/research/object_detection/dockerfiles/1.15/Dockerfile @@ -0,0 +1,44 @@ +FROM tensorflow/tensorflow:1.15.2-gpu-py3 + +ARG DEBIAN_FRONTEND=noninteractive + +# Install apt dependencies +RUN apt-get update && apt-get install -y \ + git \ + gpg-agent \ + python3-cairocffi \ + protobuf-compiler \ + python3-pil \ + python3-lxml \ + python3-tk \ + wget + +# Install gcloud and gsutil commands +# https://cloud.google.com/sdk/docs/quickstart-debian-ubuntu +RUN export CLOUD_SDK_REPO="cloud-sdk-$(lsb_release -c -s)" && \ + echo "deb http://packages.cloud.google.com/apt $CLOUD_SDK_REPO main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && \ + curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - && \ + apt-get update -y && apt-get install google-cloud-sdk -y + +# Add new user to avoid running as root +RUN useradd -ms /bin/bash tensorflow +USER tensorflow +WORKDIR /home/tensorflow + +# Install pip dependencies +RUN pip3 install --user absl-py +RUN pip3 install --user contextlib2 +RUN pip3 install --user Cython +RUN pip3 install --user jupyter +RUN pip3 install --user matplotlib +RUN pip3 install --user pycocotools +RUN pip3 install --user tf-slim + +# Copy this version of of the model garden into the image +COPY --chown=tensorflow . /home/tensorflow/models + +# Compile protobuf configs +RUN (cd /home/tensorflow/models/research/ && protoc object_detection/protos/*.proto --python_out=.) + +ENV PYTHONPATH $PYTHONPATH:/home/tensorflow/models/research/:/home/tensorflow/models/research/slim +ENV TF_CPP_MIN_LOG_LEVEL 3 diff --git a/research/object_detection/dockerfiles/1.15/README.md b/research/object_detection/dockerfiles/1.15/README.md new file mode 100644 index 000000000..5e3e6d052 --- /dev/null +++ b/research/object_detection/dockerfiles/1.15/README.md @@ -0,0 +1,11 @@ +# Tensorflow Object Detection on Docker + +These instructions are experimental. + +## Building and running: + +```bash +# From the root of the git repository +docker build -f research/object_detection/dockerfiles/1.15/Dockerfile -t od . +docker run -it od +``` diff --git a/research/object_detection/dockerfiles/2.2/Dockerfile b/research/object_detection/dockerfiles/2.2/Dockerfile new file mode 100644 index 000000000..c3d5e2e8f --- /dev/null +++ b/research/object_detection/dockerfiles/2.2/Dockerfile @@ -0,0 +1,44 @@ +FROM tensorflow/tensorflow:2.2.0-gpu + +ARG DEBIAN_FRONTEND=noninteractive + +# Install apt dependencies +RUN apt-get update && apt-get install -y \ + git \ + gpg-agent \ + python3-cairocffi \ + protobuf-compiler \ + python3-pil \ + python3-lxml \ + python3-tk \ + wget + +# Install gcloud and gsutil commands +# https://cloud.google.com/sdk/docs/quickstart-debian-ubuntu +RUN export CLOUD_SDK_REPO="cloud-sdk-$(lsb_release -c -s)" && \ + echo "deb http://packages.cloud.google.com/apt $CLOUD_SDK_REPO main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && \ + curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - && \ + apt-get update -y && apt-get install google-cloud-sdk -y + +# Add new user to avoid running as root +RUN useradd -ms /bin/bash tensorflow +USER tensorflow +WORKDIR /home/tensorflow + +# Install pip dependencies +RUN pip3 install --user absl-py +RUN pip3 install --user contextlib2 +RUN pip3 install --user Cython +RUN pip3 install --user jupyter +RUN pip3 install --user matplotlib +RUN pip3 install --user pycocotools +RUN pip3 install --user tf-slim + +# Copy this version of of the model garden into the image +COPY --chown=tensorflow . /home/tensorflow/models + +# Compile protobuf configs +RUN (cd /home/tensorflow/models/research/ && protoc object_detection/protos/*.proto --python_out=.) + +ENV PYTHONPATH $PYTHONPATH:/home/tensorflow/models/research/:/home/tensorflow/models/research/slim +ENV TF_CPP_MIN_LOG_LEVEL 3 diff --git a/research/object_detection/dockerfiles/2.2/README.md b/research/object_detection/dockerfiles/2.2/README.md new file mode 100644 index 000000000..58b58db85 --- /dev/null +++ b/research/object_detection/dockerfiles/2.2/README.md @@ -0,0 +1,11 @@ +# Tensorflow Object Detection on Docker + +These instructions are experimental. + +## Building and running: + +```bash +# From the root of the git repository +docker build -f research/object_detection/dockerfiles/2.2/Dockerfile -t od . +docker run -it od +``` diff --git a/research/object_detection/model_lib_tf2_test.py b/research/object_detection/model_lib_tf2_test.py index 8c6d96172..4e9b5a82e 100644 --- a/research/object_detection/model_lib_tf2_test.py +++ b/research/object_detection/model_lib_tf2_test.py @@ -24,9 +24,9 @@ import unittest import numpy as np import six import tensorflow.compat.v1 as tf +import tensorflow.compat.v2 as tf2 from object_detection import inputs -from object_detection import model_hparams from object_detection import model_lib_v2 from object_detection.builders import model_builder from object_detection.core import model @@ -82,24 +82,25 @@ class ModelLibTest(tf.test.TestCase): def test_train_loop_then_eval_loop(self): """Tests that Estimator and input function are constructed correctly.""" - hparams = model_hparams.create_hparams( - hparams_overrides='load_pretrained=false') + model_dir = tf.test.get_temp_dir() pipeline_config_path = get_pipeline_config_path(MODEL_NAME_FOR_TEST) + new_pipeline_config_path = os.path.join(model_dir, 'new_pipeline.config') + config_util.clear_fine_tune_checkpoint(pipeline_config_path, + new_pipeline_config_path) config_kwarg_overrides = _get_config_kwarg_overrides() - model_dir = tf.test.get_temp_dir() train_steps = 2 - model_lib_v2.train_loop( - hparams, - pipeline_config_path, - model_dir=model_dir, - train_steps=train_steps, - checkpoint_every_n=1, - **config_kwarg_overrides) + strategy = tf2.distribute.OneDeviceStrategy(device='/cpu:0') + with strategy.scope(): + model_lib_v2.train_loop( + new_pipeline_config_path, + model_dir=model_dir, + train_steps=train_steps, + checkpoint_every_n=1, + **config_kwarg_overrides) model_lib_v2.eval_continuously( - hparams, - pipeline_config_path, + new_pipeline_config_path, model_dir=model_dir, checkpoint_dir=model_dir, train_steps=train_steps, @@ -148,21 +149,24 @@ class ModelCheckpointTest(tf.test.TestCase): def test_checkpoint_max_to_keep(self): """Test that only the most recent checkpoints are kept.""" + strategy = tf2.distribute.OneDeviceStrategy(device='/cpu:0') with mock.patch.object( model_builder, 'build', autospec=True) as mock_builder: - mock_builder.return_value = SimpleModel() - - hparams = model_hparams.create_hparams( - hparams_overrides='load_pretrained=false') + with strategy.scope(): + mock_builder.return_value = SimpleModel() + model_dir = tempfile.mkdtemp(dir=self.get_temp_dir()) pipeline_config_path = get_pipeline_config_path(MODEL_NAME_FOR_TEST) + new_pipeline_config_path = os.path.join(model_dir, 'new_pipeline.config') + config_util.clear_fine_tune_checkpoint(pipeline_config_path, + new_pipeline_config_path) config_kwarg_overrides = _get_config_kwarg_overrides() - model_dir = tempfile.mkdtemp(dir=self.get_temp_dir()) - model_lib_v2.train_loop( - hparams, pipeline_config_path, model_dir=model_dir, - train_steps=20, checkpoint_every_n=2, checkpoint_max_to_keep=3, - **config_kwarg_overrides - ) + with strategy.scope(): + model_lib_v2.train_loop( + new_pipeline_config_path, model_dir=model_dir, + train_steps=20, checkpoint_every_n=2, checkpoint_max_to_keep=3, + **config_kwarg_overrides + ) ckpt_files = tf.io.gfile.glob(os.path.join(model_dir, 'ckpt-*.index')) self.assertEqual(len(ckpt_files), 3, '{} not of length 3.'.format(ckpt_files)) @@ -221,3 +225,5 @@ class CheckpointV2Test(tf.test.TestCase): unpad_groundtruth_tensors=True) +if __name__ == '__main__': + tf.test.main() diff --git a/research/object_detection/model_lib_v2.py b/research/object_detection/model_lib_v2.py index 29eb5a295..6a764d17a 100644 --- a/research/object_detection/model_lib_v2.py +++ b/research/object_detection/model_lib_v2.py @@ -392,14 +392,12 @@ def clean_temporary_directories(strategy, filepath): def train_loop( - hparams, pipeline_config_path, model_dir, config_override=None, train_steps=None, use_tpu=False, save_final_config=False, - export_to_tpu=None, checkpoint_every_n=1000, checkpoint_max_to_keep=7, **kwargs): @@ -417,7 +415,6 @@ def train_loop( 8. Logs the training metrics as TensorBoard summaries. Args: - hparams: A `HParams`. pipeline_config_path: A path to a pipeline config file. model_dir: The directory to save checkpoints and summaries to. @@ -428,10 +425,6 @@ def train_loop( use_tpu: Boolean, whether training and evaluation should run on TPU. save_final_config: Whether to save final config (obtained after applying overrides) to `model_dir`. - export_to_tpu: When use_tpu and export_to_tpu are true, - `export_savedmodel()` exports a metagraph for serving on TPU besides the - one on CPU. If export_to_tpu is not provided, we will look for it in - hparams too. checkpoint_every_n: Checkpoint every n training steps. checkpoint_max_to_keep: @@ -453,7 +446,7 @@ def train_loop( 'use_bfloat16': configs['train_config'].use_bfloat16 and use_tpu }) configs = merge_external_params_with_configs( - configs, hparams, kwargs_dict=kwargs) + configs, None, kwargs_dict=kwargs) model_config = configs['model'] train_config = configs['train_config'] train_input_config = configs['train_input_config'] @@ -468,33 +461,12 @@ def train_loop( if train_steps is None and train_config.num_steps != 0: train_steps = train_config.num_steps - # Read export_to_tpu from hparams if not passed. - if export_to_tpu is None: - export_to_tpu = hparams.get('export_to_tpu', False) - tf.logging.info( - 'train_loop: use_tpu %s, export_to_tpu %s', use_tpu, - export_to_tpu) - if kwargs['use_bfloat16']: tf.compat.v2.keras.mixed_precision.experimental.set_policy('mixed_bfloat16') - # Parse the checkpoint fine tuning configs - if hparams.load_pretrained: - fine_tune_checkpoint_path = train_config.fine_tune_checkpoint - else: - fine_tune_checkpoint_path = None load_all_detection_checkpoint_vars = ( train_config.load_all_detection_checkpoint_vars) - # TODO(kaftan) (or anyone else): move this piece of config munging to - ## utils/config_util.py - if not train_config.fine_tune_checkpoint_type: - # train_config.from_detection_checkpoint field is deprecated. For - # backward compatibility, set train_config.fine_tune_checkpoint_type - # based on train_config.from_detection_checkpoint. - if train_config.from_detection_checkpoint: - train_config.fine_tune_checkpoint_type = 'detection' - else: - train_config.fine_tune_checkpoint_type = 'classification' + config_util.update_fine_tune_checkpoint_type(train_config) fine_tune_checkpoint_type = train_config.fine_tune_checkpoint_type fine_tune_checkpoint_version = train_config.fine_tune_checkpoint_version @@ -556,8 +528,9 @@ def train_loop( with tf.compat.v2.summary.record_if( lambda: global_step % num_steps_per_iteration == 0): # Load a fine-tuning checkpoint. - if fine_tune_checkpoint_path: - load_fine_tune_checkpoint(detection_model, fine_tune_checkpoint_path, + if train_config.fine_tune_checkpoint: + load_fine_tune_checkpoint(detection_model, + train_config.fine_tune_checkpoint, fine_tune_checkpoint_type, fine_tune_checkpoint_version, load_all_detection_checkpoint_vars, @@ -841,7 +814,6 @@ def eager_eval_loop( def eval_continuously( - hparams, pipeline_config_path, config_override=None, train_steps=None, @@ -850,7 +822,6 @@ def eval_continuously( use_tpu=False, override_eval_num_epochs=True, postprocess_on_cpu=False, - export_to_tpu=None, model_dir=None, checkpoint_dir=None, wait_interval=180, @@ -863,7 +834,6 @@ def eval_continuously( on the evaluation data. Args: - hparams: A `HParams`. pipeline_config_path: A path to a pipeline config file. config_override: A pipeline_pb2.TrainEvalPipelineConfig text proto to override the config from `pipeline_config_path`. @@ -879,10 +849,6 @@ def eval_continuously( eval_input. postprocess_on_cpu: When use_tpu and postprocess_on_cpu are true, postprocess is scheduled on the host cpu. - export_to_tpu: When use_tpu and export_to_tpu are true, - `export_savedmodel()` exports a metagraph for serving on TPU besides the - one on CPU. If export_to_tpu is not provided, we will look for it in - hparams too. model_dir: Directory to output resulting evaluation summaries to. checkpoint_dir: Directory that contains the training checkpoints. wait_interval: The mimmum number of seconds to wait before checking for a @@ -910,7 +876,7 @@ def eval_continuously( tf.logging.warning( 'Forced number of epochs for all eval validations to be 1.') configs = merge_external_params_with_configs( - configs, hparams, kwargs_dict=kwargs) + configs, None, kwargs_dict=kwargs) model_config = configs['model'] train_input_config = configs['train_input_config'] eval_config = configs['eval_config'] @@ -942,12 +908,6 @@ def eval_continuously( model=detection_model) eval_inputs.append((eval_input_config.name, next_eval_input)) - # Read export_to_tpu from hparams if not passed. - if export_to_tpu is None: - export_to_tpu = hparams.get('export_to_tpu', False) - tf.logging.info('eval_continuously: use_tpu %s, export_to_tpu %s', - use_tpu, export_to_tpu) - global_step = tf.compat.v2.Variable( 0, trainable=False, dtype=tf.compat.v2.dtypes.int64) diff --git a/research/object_detection/model_main_tf2.py b/research/object_detection/model_main_tf2.py index f6832ba84..e20a8bc20 100644 --- a/research/object_detection/model_main_tf2.py +++ b/research/object_detection/model_main_tf2.py @@ -37,7 +37,6 @@ python model_main_tf2.py -- \ """ from absl import flags import tensorflow.compat.v2 as tf -from object_detection import model_hparams from object_detection import model_lib_v2 flags.DEFINE_string('pipeline_config_path', None, 'Path to pipeline config ' @@ -51,10 +50,6 @@ flags.DEFINE_integer('sample_1_of_n_eval_on_train_examples', 5, 'Will sample ' 'one of every n train input examples for evaluation, ' 'where n is provided. This is only used if ' '`eval_training_data` is True.') -flags.DEFINE_string( - 'hparams_overrides', None, 'Hyperparameter overrides, ' - 'represented as a string containing comma-separated ' - 'hparam_name=value pairs.') flags.DEFINE_string( 'model_dir', None, 'Path to output model directory ' 'where event and checkpoint files will be written.') @@ -80,7 +75,6 @@ def main(unused_argv): if FLAGS.checkpoint_dir: model_lib_v2.eval_continuously( - hparams=model_hparams.create_hparams(FLAGS.hparams_overrides), pipeline_config_path=FLAGS.pipeline_config_path, model_dir=FLAGS.model_dir, train_steps=FLAGS.num_train_steps, @@ -102,11 +96,10 @@ def main(unused_argv): with strategy.scope(): model_lib_v2.train_loop( - hparams=model_hparams.create_hparams(FLAGS.hparams_overrides), pipeline_config_path=FLAGS.pipeline_config_path, model_dir=FLAGS.model_dir, train_steps=FLAGS.num_train_steps, use_tpu=FLAGS.use_tpu) if __name__ == '__main__': - tf.app.run() + tf.compat.v1.app.run() diff --git a/research/object_detection/protos/hyperparams.proto b/research/object_detection/protos/hyperparams.proto index 2b1053877..e2fee247c 100644 --- a/research/object_detection/protos/hyperparams.proto +++ b/research/object_detection/protos/hyperparams.proto @@ -52,6 +52,12 @@ message Hyperparams { // Whether depthwise convolutions should be regularized. If this parameter is // NOT set then the conv hyperparams will default to the parent scope. optional bool regularize_depthwise = 6 [default = false]; + + // By default, use_bias is set to False if batch_norm is not None and + // batch_norm.center is True. When force_use_bias is set to True, this + // behavior will be overridden, and use_bias will be set to True, regardless + // of batch norm parameters. Note, this only applies to KerasLayerHyperparams. + optional bool force_use_bias = 8 [default = false]; } // Proto with one-of field for regularizers. diff --git a/research/object_detection/protos/preprocessor.proto b/research/object_detection/protos/preprocessor.proto index aa83939f3..3201df2bd 100644 --- a/research/object_detection/protos/preprocessor.proto +++ b/research/object_detection/protos/preprocessor.proto @@ -57,7 +57,8 @@ message NormalizeImage { optional float target_maxval = 4 [default=1]; } -// Randomly horizontally flips the image and detections 50% of the time. +// Randomly horizontally flips the image and detections with the specified +// probability, default to 50% of the time. message RandomHorizontalFlip { // Specifies a mapping from the original keypoint indices to horizontally // flipped indices. This is used in the event that keypoints are specified, @@ -71,10 +72,15 @@ message RandomHorizontalFlip { // keypoint_flip_permutation: 3 // keypoint_flip_permutation: 5 // keypoint_flip_permutation: 4 + // If nothing is specified the order of keypoint will be mantained. repeated int32 keypoint_flip_permutation = 1; + + // The probability of running this augmentation for each image. + optional float probability = 2 [default=0.5]; } -// Randomly vertically flips the image and detections 50% of the time. +// Randomly vertically flips the image and detections with the specified +// probability, default to 50% of the time. message RandomVerticalFlip { // Specifies a mapping from the original keypoint indices to vertically // flipped indices. This is used in the event that keypoints are specified, @@ -89,11 +95,23 @@ message RandomVerticalFlip { // keypoint_flip_permutation: 5 // keypoint_flip_permutation: 4 repeated int32 keypoint_flip_permutation = 1; + + // The probability of running this augmentation for each image. + optional float probability = 2 [default=0.5]; } // Randomly rotates the image and detections by 90 degrees counter-clockwise -// 50% of the time. -message RandomRotation90 {} +// with the specified probability, default to 50% of the time. +message RandomRotation90 { + // Specifies a mapping from the original keypoint indices to 90 degree counter + // clockwise indices. This is used in the event that keypoints are specified, + // in which case when the image is rotated the keypoints might need to be + // permuted. + repeated int32 keypoint_rot_permutation = 1; + + // The probability of running this augmentation for each image. + optional float probability = 2 [default=0.5]; +} // Randomly scales the values of all pixels in the image by some constant value // between [minval, maxval], then clip the value to a range between [0, 1.0]. @@ -457,7 +475,6 @@ message SSDRandomCropPadFixedAspectRatio { // Converts class logits to softmax optionally scaling the values by temperature // first. message ConvertClassLogitsToSoftmax { - // Scale to use on logits before applying softmax. optional float temperature = 1 [default=1.0]; } @@ -472,12 +489,10 @@ message RandomSelfConcatImage { // Apply an Autoaugment policy to the image and bounding boxes. message AutoAugmentImage { - // What AutoAugment policy to apply to the Image optional string policy_name = 1 [default="v0"]; } - // Randomly drops ground truth boxes for a label with some probability. message DropLabelProbabilistically { // The label that should be dropped. This corresponds to one of the entries @@ -487,7 +502,6 @@ message DropLabelProbabilistically { optional float drop_probability = 2 [default = 1.0]; } - //Remap a set of labels to a new label. message RemapLabels { // Labels to be remapped. diff --git a/research/object_detection/utils/config_util.py b/research/object_detection/utils/config_util.py index 71185a5a6..8dd405dff 100644 --- a/research/object_detection/utils/config_util.py +++ b/research/object_detection/utils/config_util.py @@ -142,6 +142,34 @@ def get_configs_from_pipeline_file(pipeline_config_path, config_override=None): return create_configs_from_pipeline_proto(pipeline_config) +def clear_fine_tune_checkpoint(pipeline_config_path, + new_pipeline_config_path): + """Clears fine_tune_checkpoint and writes a new pipeline config file.""" + configs = get_configs_from_pipeline_file(pipeline_config_path) + configs["train_config"].fine_tune_checkpoint = "" + pipeline_proto = create_pipeline_proto_from_configs(configs) + with tf.gfile.Open(new_pipeline_config_path, "wb") as f: + f.write(text_format.MessageToString(pipeline_proto)) + + +def update_fine_tune_checkpoint_type(train_config): + """Set `fine_tune_checkpoint_type` using `from_detection_checkpoint`. + + `train_config.from_detection_checkpoint` field is deprecated. For backward + compatibility, this function sets `train_config.fine_tune_checkpoint_type` + based on `train_config.from_detection_checkpoint`. + + Args: + train_config: train_pb2.TrainConfig proto object. + + """ + if not train_config.fine_tune_checkpoint_type: + if train_config.from_detection_checkpoint: + train_config.fine_tune_checkpoint_type = "detection" + else: + train_config.fine_tune_checkpoint_type = "classification" + + def create_configs_from_pipeline_proto(pipeline_config): """Creates a configs dictionary from pipeline_pb2.TrainEvalPipelineConfig. -- GitLab From aca5129458f0cced4e32875a9742bfbbccc202ec Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Wed, 24 Jun 2020 13:30:01 -0700 Subject: [PATCH 57/79] Internal change PiperOrigin-RevId: 318129893 --- official/README.md | 6 +- official/vision/detection/README.md | 34 ++ .../vision/detection/configs/base_config.py | 7 +- .../modeling/architecture/factory.py | 6 +- .../modeling/architecture/nn_blocks.py | 318 +++++++++++ .../modeling/architecture/spinenet.py | 503 ++++++++++++++++++ official/vision/segmentation/README.md | 188 +++++++ official/vision/segmentation/__init__.py | 0 official/vision/segmentation/convert_lits.py | 269 ++++++++++ .../segmentation/convert_lits_nii_to_npy.py | 58 ++ official/vision/segmentation/unet_config.py | 76 +++ official/vision/segmentation/unet_data.py | 175 ++++++ official/vision/segmentation/unet_main.py | 350 ++++++++++++ .../vision/segmentation/unet_main_test.py | 248 +++++++++ official/vision/segmentation/unet_metrics.py | 279 ++++++++++ official/vision/segmentation/unet_model.py | 238 +++++++++ 16 files changed, 2751 insertions(+), 4 deletions(-) create mode 100644 official/vision/detection/modeling/architecture/nn_blocks.py create mode 100644 official/vision/detection/modeling/architecture/spinenet.py create mode 100644 official/vision/segmentation/README.md create mode 100644 official/vision/segmentation/__init__.py create mode 100644 official/vision/segmentation/convert_lits.py create mode 100644 official/vision/segmentation/convert_lits_nii_to_npy.py create mode 100644 official/vision/segmentation/unet_config.py create mode 100644 official/vision/segmentation/unet_data.py create mode 100644 official/vision/segmentation/unet_main.py create mode 100644 official/vision/segmentation/unet_main_test.py create mode 100644 official/vision/segmentation/unet_metrics.py create mode 100644 official/vision/segmentation/unet_model.py diff --git a/official/README.md b/official/README.md index 2b3f2dd76..d0e2ef81b 100644 --- a/official/README.md +++ b/official/README.md @@ -19,9 +19,10 @@ In the near future, we will add: * State-of-the-art language understanding models: More members in Transformer family -* Start-of-the-art image classification models: +* State-of-the-art image classification models: EfficientNet, MnasNet, and variants -* A set of excellent objection detection models. +* State-of-the-art objection detection and instance segmentation models: + RetinaNet, Mask R-CNN, SpineNet, and variants ## Table of Contents @@ -52,6 +53,7 @@ In the near future, we will add: | [RetinaNet](vision/detection) | [Focal Loss for Dense Object Detection](https://arxiv.org/abs/1708.02002) | | [Mask R-CNN](vision/detection) | [Mask R-CNN](https://arxiv.org/abs/1703.06870) | | [ShapeMask](vision/detection) | [ShapeMask: Learning to Segment Novel Objects by Refining Shape Priors](https://arxiv.org/abs/1904.03239) | +| [SpineNet](vision/detection) | [SpineNet: Learning Scale-Permuted Backbone for Recognition and Localization](https://arxiv.org/abs/1912.05027) | ### Natural Language Processing diff --git a/official/vision/detection/README.md b/official/vision/detection/README.md index 53134ec55..d6cb5d464 100644 --- a/official/vision/detection/README.md +++ b/official/vision/detection/README.md @@ -48,6 +48,22 @@ so the checkpoints are not compatible. We will unify the implementation soon. +### Train a SpineNet-49 based RetinaNet. + +```bash +TPU_NAME="" +MODEL_DIR="" +TRAIN_FILE_PATTERN="" +EVAL_FILE_PATTERN="" +VAL_JSON_FILE="" +python3 ~/models/official/vision/detection/main.py \ + --strategy_type=tpu \ + --tpu="${TPU_NAME?}" \ + --model_dir="${MODEL_DIR?}" \ + --mode=train \ + --params_override="{ type: retinanet, architecture: {backbone: spinenet, multilevel_features: identity}, spinenet: {model_id: 49}, train_file_pattern: ${TRAIN_FILE_PATTERN?} }, eval: { val_json_file: ${VAL_JSON_FILE?}, eval_file_pattern: ${EVAL_FILE_PATTERN?} } }" +``` + ### Train a custom RetinaNet using the config file. @@ -163,6 +179,24 @@ so the checkpoints are not compatible. We will unify the implementation soon. +### Train a SpineNet-49 based Mask R-CNN. + +```bash +TPU_NAME="" +MODEL_DIR="" +TRAIN_FILE_PATTERN="" +EVAL_FILE_PATTERN="" +VAL_JSON_FILE="" +python3 ~/models/official/vision/detection/main.py \ + --strategy_type=tpu \ + --tpu="${TPU_NAME?}" \ + --model_dir="${MODEL_DIR?}" \ + --mode=train \ + --model=mask_rcnn \ + --params_override="{architecture: {backbone: spinenet, multilevel_features: identity}, spinenet: {model_id: 49}, train_file_pattern: ${TRAIN_FILE_PATTERN?} }, eval: { val_json_file: ${VAL_JSON_FILE?}, eval_file_pattern: ${EVAL_FILE_PATTERN?} } }" +``` + + ### Train a custom Mask R-CNN using the config file. First, create a YAML config file, e.g. *my_maskrcnn.yaml*. diff --git a/official/vision/detection/configs/base_config.py b/official/vision/detection/configs/base_config.py index 0a4e2f5fb..6e2859e71 100644 --- a/official/vision/detection/configs/base_config.py +++ b/official/vision/detection/configs/base_config.py @@ -1,4 +1,4 @@ -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,10 +17,12 @@ BACKBONES = [ 'resnet', + 'spinenet', ] MULTILEVEL_FEATURES = [ 'fpn', + 'identity', ] # pylint: disable=line-too-long @@ -118,6 +120,9 @@ BASE_CFG = { 'resnet': { 'resnet_depth': 50, }, + 'spinenet': { + 'model_id': '49', + }, 'fpn': { 'fpn_feat_dims': 256, 'use_separable_conv': False, diff --git a/official/vision/detection/modeling/architecture/factory.py b/official/vision/detection/modeling/architecture/factory.py index ed5647d6f..fa563c9ed 100644 --- a/official/vision/detection/modeling/architecture/factory.py +++ b/official/vision/detection/modeling/architecture/factory.py @@ -1,4 +1,4 @@ -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -23,6 +23,7 @@ from official.vision.detection.modeling.architecture import heads from official.vision.detection.modeling.architecture import identity from official.vision.detection.modeling.architecture import nn_ops from official.vision.detection.modeling.architecture import resnet +from official.vision.detection.modeling.architecture import spinenet def norm_activation_generator(params): @@ -42,6 +43,9 @@ def backbone_generator(params): activation=params.norm_activation.activation, norm_activation=norm_activation_generator( params.norm_activation)) + elif params.architecture.backbone == 'spinenet': + spinenet_params = params.spinenet + backbone_fn = spinenet.SpineNetBuilder(model_id=spinenet_params.model_id) else: raise ValueError('Backbone model `{}` is not supported.' .format(params.architecture.backbone)) diff --git a/official/vision/detection/modeling/architecture/nn_blocks.py b/official/vision/detection/modeling/architecture/nn_blocks.py new file mode 100644 index 000000000..c94a079f9 --- /dev/null +++ b/official/vision/detection/modeling/architecture/nn_blocks.py @@ -0,0 +1,318 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Contains common building blocks for neural networks.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import tensorflow as tf + +from official.modeling import tf_utils + + +@tf.keras.utils.register_keras_serializable(package='Vision') +class ResidualBlock(tf.keras.layers.Layer): + """A residual block.""" + + def __init__(self, + filters, + strides, + use_projection=False, + kernel_initializer='VarianceScaling', + kernel_regularizer=None, + bias_regularizer=None, + activation='relu', + use_sync_bn=False, + norm_momentum=0.99, + norm_epsilon=0.001, + **kwargs): + """A residual block with BN after convolutions. + + Args: + filters: `int` number of filters for the first two convolutions. Note that + the third and final convolution will use 4 times as many filters. + strides: `int` block stride. If greater than 1, this block will ultimately + downsample the input. + use_projection: `bool` for whether this block should use a projection + shortcut (versus the default identity shortcut). This is usually `True` + for the first block of a block group, which may change the number of + filters and the resolution. + kernel_initializer: kernel_initializer for convolutional layers. + kernel_regularizer: tf.keras.regularizers.Regularizer object for Conv2D. + Default to None. + bias_regularizer: tf.keras.regularizers.Regularizer object for Conv2d. + Default to None. + activation: `str` name of the activation function. + use_sync_bn: if True, use synchronized batch normalization. + norm_momentum: `float` normalization omentum for the moving average. + norm_epsilon: `float` small float added to variance to avoid dividing by + zero. + **kwargs: keyword arguments to be passed. + """ + super(ResidualBlock, self).__init__(**kwargs) + + self._filters = filters + self._strides = strides + self._use_projection = use_projection + self._use_sync_bn = use_sync_bn + self._activation = activation + self._kernel_initializer = kernel_initializer + self._norm_momentum = norm_momentum + self._norm_epsilon = norm_epsilon + self._kernel_regularizer = kernel_regularizer + self._bias_regularizer = bias_regularizer + + if use_sync_bn: + self._norm = tf.keras.layers.experimental.SyncBatchNormalization + else: + self._norm = tf.keras.layers.BatchNormalization + if tf.keras.backend.image_data_format() == 'channels_last': + self._bn_axis = -1 + else: + self._bn_axis = 1 + self._activation_fn = tf_utils.get_activation(activation) + + def build(self, input_shape): + if self._use_projection: + self._shortcut = tf.keras.layers.Conv2D( + filters=self._filters, + kernel_size=1, + strides=self._strides, + use_bias=False, + kernel_initializer=self._kernel_initializer, + kernel_regularizer=self._kernel_regularizer, + bias_regularizer=self._bias_regularizer) + self._norm0 = self._norm( + axis=self._bn_axis, + momentum=self._norm_momentum, + epsilon=self._norm_epsilon) + + self._conv1 = tf.keras.layers.Conv2D( + filters=self._filters, + kernel_size=3, + strides=self._strides, + padding='same', + use_bias=False, + kernel_initializer=self._kernel_initializer, + kernel_regularizer=self._kernel_regularizer, + bias_regularizer=self._bias_regularizer) + self._norm1 = self._norm( + axis=self._bn_axis, + momentum=self._norm_momentum, + epsilon=self._norm_epsilon) + + self._conv2 = tf.keras.layers.Conv2D( + filters=self._filters, + kernel_size=3, + strides=1, + padding='same', + use_bias=False, + kernel_initializer=self._kernel_initializer, + kernel_regularizer=self._kernel_regularizer, + bias_regularizer=self._bias_regularizer) + self._norm2 = self._norm( + axis=self._bn_axis, + momentum=self._norm_momentum, + epsilon=self._norm_epsilon) + + super(ResidualBlock, self).build(input_shape) + + def get_config(self): + config = { + 'filters': self._filters, + 'strides': self._strides, + 'use_projection': self._use_projection, + 'kernel_initializer': self._kernel_initializer, + 'kernel_regularizer': self._kernel_regularizer, + 'bias_regularizer': self._bias_regularizer, + 'activation': self._activation, + 'use_sync_bn': self._use_sync_bn, + 'norm_momentum': self._norm_momentum, + 'norm_epsilon': self._norm_epsilon + } + + base_config = super(ResidualBlock, self).get_config() + return dict(list(base_config.items()) + list(config.items())) + + def call(self, inputs): + shortcut = inputs + if self._use_projection: + shortcut = self._shortcut(shortcut) + shortcut = self._norm0(shortcut) + + x = self._conv1(inputs) + x = self._norm1(x) + x = self._activation_fn(x) + + x = self._conv2(x) + x = self._norm2(x) + + return self._activation_fn(x + shortcut) + + +@tf.keras.utils.register_keras_serializable(package='Vision') +class BottleneckBlock(tf.keras.layers.Layer): + """A standard bottleneck block.""" + + def __init__(self, + filters, + strides, + use_projection=False, + kernel_initializer='VarianceScaling', + kernel_regularizer=None, + bias_regularizer=None, + activation='relu', + use_sync_bn=False, + norm_momentum=0.99, + norm_epsilon=0.001, + **kwargs): + """A standard bottleneck block with BN after convolutions. + + Args: + filters: `int` number of filters for the first two convolutions. Note that + the third and final convolution will use 4 times as many filters. + strides: `int` block stride. If greater than 1, this block will ultimately + downsample the input. + use_projection: `bool` for whether this block should use a projection + shortcut (versus the default identity shortcut). This is usually `True` + for the first block of a block group, which may change the number of + filters and the resolution. + kernel_initializer: kernel_initializer for convolutional layers. + kernel_regularizer: tf.keras.regularizers.Regularizer object for Conv2D. + Default to None. + bias_regularizer: tf.keras.regularizers.Regularizer object for Conv2d. + Default to None. + activation: `str` name of the activation function. + use_sync_bn: if True, use synchronized batch normalization. + norm_momentum: `float` normalization omentum for the moving average. + norm_epsilon: `float` small float added to variance to avoid dividing by + zero. + **kwargs: keyword arguments to be passed. + """ + super(BottleneckBlock, self).__init__(**kwargs) + + self._filters = filters + self._strides = strides + self._use_projection = use_projection + self._use_sync_bn = use_sync_bn + self._activation = activation + self._kernel_initializer = kernel_initializer + self._norm_momentum = norm_momentum + self._norm_epsilon = norm_epsilon + self._kernel_regularizer = kernel_regularizer + self._bias_regularizer = bias_regularizer + if use_sync_bn: + self._norm = tf.keras.layers.experimental.SyncBatchNormalization + else: + self._norm = tf.keras.layers.BatchNormalization + if tf.keras.backend.image_data_format() == 'channels_last': + self._bn_axis = -1 + else: + self._bn_axis = 1 + self._activation_fn = tf_utils.get_activation(activation) + + def build(self, input_shape): + if self._use_projection: + self._shortcut = tf.keras.layers.Conv2D( + filters=self._filters * 4, + kernel_size=1, + strides=self._strides, + use_bias=False, + kernel_initializer=self._kernel_initializer, + kernel_regularizer=self._kernel_regularizer, + bias_regularizer=self._bias_regularizer) + self._norm0 = self._norm( + axis=self._bn_axis, + momentum=self._norm_momentum, + epsilon=self._norm_epsilon) + + self._conv1 = tf.keras.layers.Conv2D( + filters=self._filters, + kernel_size=1, + strides=1, + use_bias=False, + kernel_initializer=self._kernel_initializer, + kernel_regularizer=self._kernel_regularizer, + bias_regularizer=self._bias_regularizer) + self._norm1 = self._norm( + axis=self._bn_axis, + momentum=self._norm_momentum, + epsilon=self._norm_epsilon) + + self._conv2 = tf.keras.layers.Conv2D( + filters=self._filters, + kernel_size=3, + strides=self._strides, + padding='same', + use_bias=False, + kernel_initializer=self._kernel_initializer, + kernel_regularizer=self._kernel_regularizer, + bias_regularizer=self._bias_regularizer) + self._norm2 = self._norm( + axis=self._bn_axis, + momentum=self._norm_momentum, + epsilon=self._norm_epsilon) + + self._conv3 = tf.keras.layers.Conv2D( + filters=self._filters * 4, + kernel_size=1, + strides=1, + use_bias=False, + kernel_initializer=self._kernel_initializer, + kernel_regularizer=self._kernel_regularizer, + bias_regularizer=self._bias_regularizer) + self._norm3 = self._norm( + axis=self._bn_axis, + momentum=self._norm_momentum, + epsilon=self._norm_epsilon) + + super(BottleneckBlock, self).build(input_shape) + + def get_config(self): + config = { + 'filters': self._filters, + 'strides': self._strides, + 'use_projection': self._use_projection, + 'kernel_initializer': self._kernel_initializer, + 'kernel_regularizer': self._kernel_regularizer, + 'bias_regularizer': self._bias_regularizer, + 'activation': self._activation, + 'use_sync_bn': self._use_sync_bn, + 'norm_momentum': self._norm_momentum, + 'norm_epsilon': self._norm_epsilon + } + + base_config = super(BottleneckBlock, self).get_config() + return dict(list(base_config.items()) + list(config.items())) + + def call(self, inputs): + shortcut = inputs + if self._use_projection: + shortcut = self._shortcut(shortcut) + shortcut = self._norm0(shortcut) + + x = self._conv1(inputs) + x = self._norm1(x) + x = self._activation_fn(x) + + x = self._conv2(x) + x = self._norm2(x) + x = self._activation_fn(x) + + x = self._conv3(x) + x = self._norm3(x) + + return self._activation_fn(x + shortcut) diff --git a/official/vision/detection/modeling/architecture/spinenet.py b/official/vision/detection/modeling/architecture/spinenet.py new file mode 100644 index 000000000..152ed24af --- /dev/null +++ b/official/vision/detection/modeling/architecture/spinenet.py @@ -0,0 +1,503 @@ +# Lint as: python3 +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Implementation of SpineNet model. + +X. Du, T-Y. Lin, P. Jin, G. Ghiasi, M. Tan, Y. Cui, Q. V. Le, X. Song +SpineNet: Learning Scale-Permuted Backbone for Recognition and Localization +https://arxiv.org/abs/1912.05027 +""" +import math + +from absl import logging +import tensorflow as tf + +from tensorflow.python.keras import backend +from official.modeling import tf_utils +from official.vision.detection.modeling.architecture import nn_blocks + +layers = tf.keras.layers + +FILTER_SIZE_MAP = { + 1: 32, + 2: 64, + 3: 128, + 4: 256, + 5: 256, + 6: 256, + 7: 256, +} + +# The fixed SpineNet architecture discovered by NAS. +# Each element represents a specification of a building block: +# (block_level, block_fn, (input_offset0, input_offset1), is_output). +SPINENET_BLOCK_SPECS = [ + (2, 'bottleneck', (0, 1), False), + (4, 'residual', (0, 1), False), + (3, 'bottleneck', (2, 3), False), + (4, 'bottleneck', (2, 4), False), + (6, 'residual', (3, 5), False), + (4, 'bottleneck', (3, 5), False), + (5, 'residual', (6, 7), False), + (7, 'residual', (6, 8), False), + (5, 'bottleneck', (8, 9), False), + (5, 'bottleneck', (8, 10), False), + (4, 'bottleneck', (5, 10), True), + (3, 'bottleneck', (4, 10), True), + (5, 'bottleneck', (7, 12), True), + (7, 'bottleneck', (5, 14), True), + (6, 'bottleneck', (12, 14), True), +] + +SCALING_MAP = { + '49S': { + 'endpoints_num_filters': 128, + 'filter_size_scale': 0.65, + 'resample_alpha': 0.5, + 'block_repeats': 1, + }, + '49': { + 'endpoints_num_filters': 256, + 'filter_size_scale': 1.0, + 'resample_alpha': 0.5, + 'block_repeats': 1, + }, + '96': { + 'endpoints_num_filters': 256, + 'filter_size_scale': 1.0, + 'resample_alpha': 0.5, + 'block_repeats': 2, + }, + '143': { + 'endpoints_num_filters': 256, + 'filter_size_scale': 1.0, + 'resample_alpha': 1.0, + 'block_repeats': 3, + }, + '190': { + 'endpoints_num_filters': 512, + 'filter_size_scale': 1.3, + 'resample_alpha': 1.0, + 'block_repeats': 4, + }, +} + + +class BlockSpec(object): + """A container class that specifies the block configuration for SpineNet.""" + + def __init__(self, level, block_fn, input_offsets, is_output): + self.level = level + self.block_fn = block_fn + self.input_offsets = input_offsets + self.is_output = is_output + + +def build_block_specs(block_specs=None): + """Builds the list of BlockSpec objects for SpineNet.""" + if not block_specs: + block_specs = SPINENET_BLOCK_SPECS + logging.info('Building SpineNet block specs: %s', block_specs) + return [BlockSpec(*b) for b in block_specs] + + +@tf.keras.utils.register_keras_serializable(package='Vision') +class SpineNet(tf.keras.Model): + """Class to build SpineNet models.""" + + def __init__(self, + input_specs=tf.keras.layers.InputSpec(shape=[None, 640, 640, 3]), + min_level=3, + max_level=7, + block_specs=build_block_specs(), + endpoints_num_filters=256, + resample_alpha=0.5, + block_repeats=1, + filter_size_scale=1.0, + kernel_initializer='VarianceScaling', + kernel_regularizer=None, + bias_regularizer=None, + activation='relu', + use_sync_bn=False, + norm_momentum=0.99, + norm_epsilon=0.001, + **kwargs): + """SpineNet model.""" + self._min_level = min_level + self._max_level = max_level + self._block_specs = block_specs + self._endpoints_num_filters = endpoints_num_filters + self._resample_alpha = resample_alpha + self._block_repeats = block_repeats + self._filter_size_scale = filter_size_scale + self._kernel_initializer = kernel_initializer + self._kernel_regularizer = kernel_regularizer + self._bias_regularizer = bias_regularizer + self._use_sync_bn = use_sync_bn + self._norm_momentum = norm_momentum + self._norm_epsilon = norm_epsilon + if activation == 'relu': + self._activation = tf.nn.relu + elif activation == 'swish': + self._activation = tf.nn.swish + else: + raise ValueError('Activation {} not implemented.'.format(activation)) + self._init_block_fn = 'bottleneck' + self._num_init_blocks = 2 + + if use_sync_bn: + self._norm = layers.experimental.SyncBatchNormalization + else: + self._norm = layers.BatchNormalization + + if tf.keras.backend.image_data_format() == 'channels_last': + self._bn_axis = -1 + else: + self._bn_axis = 1 + + # Build SpineNet. + inputs = tf.keras.Input(shape=input_specs.shape[1:]) + + net = self._build_stem(inputs=inputs) + net = self._build_scale_permuted_network( + net=net, input_width=input_specs.shape[1]) + net = self._build_endpoints(net=net) + + super(SpineNet, self).__init__(inputs=inputs, outputs=net) + + def _block_group(self, + inputs, + filters, + strides, + block_fn_cand, + block_repeats=1, + name='block_group'): + """Creates one group of blocks for the SpineNet model.""" + block_fn_candidates = { + 'bottleneck': nn_blocks.BottleneckBlock, + 'residual': nn_blocks.ResidualBlock, + } + block_fn = block_fn_candidates[block_fn_cand] + _, _, _, num_filters = inputs.get_shape().as_list() + + if block_fn_cand == 'bottleneck': + use_projection = not (num_filters == (filters * 4) and strides == 1) + else: + use_projection = not (num_filters == filters and strides == 1) + + x = block_fn( + filters=filters, + strides=strides, + use_projection=use_projection, + kernel_initializer=self._kernel_initializer, + kernel_regularizer=self._kernel_regularizer, + bias_regularizer=self._bias_regularizer, + activation=self._activation, + use_sync_bn=self._use_sync_bn, + norm_momentum=self._norm_momentum, + norm_epsilon=self._norm_epsilon)( + inputs) + for _ in range(1, block_repeats): + x = block_fn( + filters=filters, + strides=1, + use_projection=False, + kernel_initializer=self._kernel_initializer, + kernel_regularizer=self._kernel_regularizer, + bias_regularizer=self._bias_regularizer, + activation=self._activation, + use_sync_bn=self._use_sync_bn, + norm_momentum=self._norm_momentum, + norm_epsilon=self._norm_epsilon)( + x) + return tf.identity(x, name=name) + + def _build_stem(self, inputs): + """Build SpineNet stem.""" + x = layers.Conv2D( + filters=64, + kernel_size=7, + strides=2, + use_bias=False, + padding='same', + kernel_initializer=self._kernel_initializer, + kernel_regularizer=self._kernel_regularizer, + bias_regularizer=self._bias_regularizer)( + inputs) + x = self._norm( + axis=self._bn_axis, + momentum=self._norm_momentum, + epsilon=self._norm_epsilon)( + x) + x = tf_utils.get_activation(self._activation)(x) + x = layers.MaxPool2D(pool_size=3, strides=2, padding='same')(x) + + net = [] + # Build the initial level 2 blocks. + for i in range(self._num_init_blocks): + x = self._block_group( + inputs=x, + filters=int(FILTER_SIZE_MAP[2] * self._filter_size_scale), + strides=1, + block_fn_cand=self._init_block_fn, + block_repeats=self._block_repeats, + name='stem_block_{}'.format(i + 1)) + net.append(x) + return net + + def _build_scale_permuted_network(self, + net, + input_width, + weighted_fusion=False): + """Build scale-permuted network.""" + net_sizes = [int(math.ceil(input_width / 2**2))] * len(net) + net_block_fns = [self._init_block_fn] * len(net) + num_outgoing_connections = [0] * len(net) + + endpoints = {} + for i, block_spec in enumerate(self._block_specs): + # Find out specs for the target block. + target_width = int(math.ceil(input_width / 2**block_spec.level)) + target_num_filters = int(FILTER_SIZE_MAP[block_spec.level] * + self._filter_size_scale) + target_block_fn = block_spec.block_fn + + # Resample then merge input0 and input1. + parents = [] + input0 = block_spec.input_offsets[0] + input1 = block_spec.input_offsets[1] + + x0 = self._resample_with_alpha( + inputs=net[input0], + input_width=net_sizes[input0], + input_block_fn=net_block_fns[input0], + target_width=target_width, + target_num_filters=target_num_filters, + target_block_fn=target_block_fn, + alpha=self._resample_alpha) + parents.append(x0) + num_outgoing_connections[input0] += 1 + + x1 = self._resample_with_alpha( + inputs=net[input1], + input_width=net_sizes[input1], + input_block_fn=net_block_fns[input1], + target_width=target_width, + target_num_filters=target_num_filters, + target_block_fn=target_block_fn, + alpha=self._resample_alpha) + parents.append(x1) + num_outgoing_connections[input1] += 1 + + # Merge 0 outdegree blocks to the output block. + if block_spec.is_output: + for j, (j_feat, + j_connections) in enumerate(zip(net, num_outgoing_connections)): + if j_connections == 0 and (j_feat.shape[2] == target_width and + j_feat.shape[3] == x0.shape[3]): + parents.append(j_feat) + num_outgoing_connections[j] += 1 + + # pylint: disable=g-direct-tensorflow-import + if weighted_fusion: + dtype = parents[0].dtype + parent_weights = [ + tf.nn.relu(tf.cast(tf.Variable(1.0, name='block{}_fusion{}'.format( + i, j)), dtype=dtype)) for j in range(len(parents))] + weights_sum = tf.add_n(parent_weights) + parents = [ + parents[i] * parent_weights[i] / (weights_sum + 0.0001) + for i in range(len(parents)) + ] + + # Fuse all parent nodes then build a new block. + x = tf_utils.get_activation(self._activation)(tf.add_n(parents)) + x = self._block_group( + inputs=x, + filters=target_num_filters, + strides=1, + block_fn_cand=target_block_fn, + block_repeats=self._block_repeats, + name='scale_permuted_block_{}'.format(i + 1)) + + net.append(x) + net_sizes.append(target_width) + net_block_fns.append(target_block_fn) + num_outgoing_connections.append(0) + + # Save output feats. + if block_spec.is_output: + if block_spec.level in endpoints: + raise ValueError('Duplicate feats found for output level {}.'.format( + block_spec.level)) + if (block_spec.level < self._min_level or + block_spec.level > self._max_level): + raise ValueError('Output level is out of range [{}, {}]'.format( + self._min_level, self._max_level)) + endpoints[block_spec.level] = x + + return endpoints + + def _build_endpoints(self, net): + """Match filter size for endpoints before sharing conv layers.""" + endpoints = {} + for level in range(self._min_level, self._max_level + 1): + x = layers.Conv2D( + filters=self._endpoints_num_filters, + kernel_size=1, + strides=1, + use_bias=False, + kernel_initializer=self._kernel_initializer, + kernel_regularizer=self._kernel_regularizer, + bias_regularizer=self._bias_regularizer)( + net[level]) + x = self._norm( + axis=self._bn_axis, + momentum=self._norm_momentum, + epsilon=self._norm_epsilon)( + x) + x = tf_utils.get_activation(self._activation)(x) + endpoints[level] = x + return endpoints + + def _resample_with_alpha(self, + inputs, + input_width, + input_block_fn, + target_width, + target_num_filters, + target_block_fn, + alpha=0.5): + """Match resolution and feature dimension.""" + _, _, _, input_num_filters = inputs.get_shape().as_list() + if input_block_fn == 'bottleneck': + input_num_filters /= 4 + new_num_filters = int(input_num_filters * alpha) + + x = layers.Conv2D( + filters=new_num_filters, + kernel_size=1, + strides=1, + use_bias=False, + kernel_initializer=self._kernel_initializer, + kernel_regularizer=self._kernel_regularizer, + bias_regularizer=self._bias_regularizer)( + inputs) + x = self._norm( + axis=self._bn_axis, + momentum=self._norm_momentum, + epsilon=self._norm_epsilon)( + x) + x = tf_utils.get_activation(self._activation)(x) + + # Spatial resampling. + if input_width > target_width: + x = layers.Conv2D( + filters=new_num_filters, + kernel_size=3, + strides=2, + padding='SAME', + use_bias=False, + kernel_initializer=self._kernel_initializer, + kernel_regularizer=self._kernel_regularizer, + bias_regularizer=self._bias_regularizer)( + x) + x = self._norm( + axis=self._bn_axis, + momentum=self._norm_momentum, + epsilon=self._norm_epsilon)( + x) + x = tf_utils.get_activation(self._activation)(x) + input_width /= 2 + while input_width > target_width: + x = layers.MaxPool2D(pool_size=3, strides=2, padding='SAME')(x) + input_width /= 2 + elif input_width < target_width: + scale = target_width // input_width + x = layers.UpSampling2D(size=(scale, scale))(x) + + # Last 1x1 conv to match filter size. + if target_block_fn == 'bottleneck': + target_num_filters *= 4 + x = layers.Conv2D( + filters=target_num_filters, + kernel_size=1, + strides=1, + use_bias=False, + kernel_initializer=self._kernel_initializer, + kernel_regularizer=self._kernel_regularizer, + bias_regularizer=self._bias_regularizer)( + x) + x = self._norm( + axis=self._bn_axis, + momentum=self._norm_momentum, + epsilon=self._norm_epsilon)( + x) + + return x + + +class SpineNetBuilder(object): + """SpineNet builder.""" + + def __init__(self, + model_id, + min_level=3, + max_level=7, + block_specs=build_block_specs(), + kernel_initializer='VarianceScaling', + kernel_regularizer=None, + bias_regularizer=None, + activation='relu', + use_sync_bn=False, + norm_momentum=0.99, + norm_epsilon=0.001): + if model_id not in SCALING_MAP: + raise ValueError( + 'SpineNet {} is not a valid architecture.'.format(model_id)) + scaling_params = SCALING_MAP[model_id] + self._min_level = min_level + self._max_level = max_level + self._block_specs = block_specs + self._endpoints_num_filters = scaling_params['endpoints_num_filters'] + self._resample_alpha = scaling_params['resample_alpha'] + self._block_repeats = scaling_params['block_repeats'] + self._filter_size_scale = scaling_params['filter_size_scale'] + self._kernel_initializer = kernel_initializer + self._kernel_regularizer = kernel_regularizer + self._bias_regularizer = bias_regularizer + self._activation = activation + self._use_sync_bn = use_sync_bn + self._norm_momentum = norm_momentum + self._norm_epsilon = norm_epsilon + + def __call__(self, inputs, is_training=None): + with backend.get_graph().as_default(): + model = SpineNet( + min_level=self._min_level, + max_level=self._max_level, + block_specs=self._block_specs, + endpoints_num_filters=self._endpoints_num_filters, + resample_alpha=self._resample_alpha, + block_repeats=self._block_repeats, + filter_size_scale=self._filter_size_scale, + kernel_initializer=self._kernel_initializer, + kernel_regularizer=self._kernel_regularizer, + bias_regularizer=self._bias_regularizer, + activation=self._activation, + use_sync_bn=self._use_sync_bn, + norm_momentum=self._norm_momentum, + norm_epsilon=self._norm_epsilon) + return model(inputs) diff --git a/official/vision/segmentation/README.md b/official/vision/segmentation/README.md new file mode 100644 index 000000000..3ab3fb18b --- /dev/null +++ b/official/vision/segmentation/README.md @@ -0,0 +1,188 @@ +# UNet 3D Model + +This repository contains TensorFlow 2.x implementation for 3D Unet model +[[1]](#1) as well as instructions for producing the data for training and +evaluation. + +Furthermore, this implementation also includes use of spatial partitioning +[[2]](#2) for TPU's to leverage high resolution images for training. + +## Contents + * [Contents](#contents) + * [Prerequsites](#prerequsites) + * [Setup](#setup) + * [Data Preparation](#data-preparation) + * [Training](#data-preparation) + * [Train with Spatial Partition](#train-with-spatial-partition) + * [Evaluation](#evaluation) + * [References](#references) + +## Prerequsites + +To use high resolution image data, spatial partition should be used to avoid +prevent out of memory issues. This is currently only supported with TPU's. To +use TPU's for training, in Google Cloud console, please run the following +command to create cloud TPU VM. + +```shell +ctpu up -name=[tpu_name] -tf-version=nightly -tpu-size=v3-8 -zone=us-central1-b +``` + +## Setup + +Before running any binary, please install necessary packages on cloud VM. + +```shell +pip install -r requirements.tx +``` + +## Data Preparation + +This software uses TFRecords as input. We provide example scripts to convert +Numpy (.npy) files or NIfTI-1 (.nii) files to TFRecords, using the Liver Tumor +Segmentation (LiTS) dataset (Christ et al. +https://competitions.codalab.org/competitions/17094). You can download the +dataset by registering on the competition website. + +**Example**: + +```shell +cd data_preprocess + +# Change input_path and output_path in convert_lits_nii_to_npy.py +# Then run the script to convert nii to npy. +python convert_lits_nii_to_npy.py + +# Convert npy files to TFRecords. +python convert_lits.py \ + --image_file_pattern=Downloads/.../volume-{}.npy \ + --label_file_pattern=Downloads/.../segmentation-{}.npy \ + --output_path=Downloads/... +``` + +## Training + +Working configs on TPU V3-8: + ++ TF 2.2, train_batch_size=16, use_batch_norm=true, dtype='bfloat16' or + 'float16', spatial partition not used. ++ tf-nightly, train_batch_size=32, use_batch_norm=true, dtype='bfloat16', + spatial partition used. + +The following example shows how to train volumic UNet on TPU v3-8. The loss is +*adaptive_dice32*. The training batch size is 32. For detail config, refer to +`unet_config.py` and example config file shown below. + +**Example**: + +```shell +DATA_BUCKET= +TRAIN_FILES="${DATA_BUCKET}/tfrecords/trainbox*.tfrecord" +VAL_FILES="${DATA_BUCKET}/tfrecords/validationbox*.tfrecord" +MODEL_BUCKET= +EXP_NAME=unet_20190610_dice_t1 + +python unet_main.py \ +--distribution_strategy=<"mirrored" or "tpu"> +--num_gpus=<'number of GPUs to use if using mirrored strategy'> +--tpu= \ +--model_dir="gs://${MODEL_BUCKET}/models/${EXP_NAME}" \ +--training_file_pattern="${TRAIN_FILES}" \ +--eval_file_pattern="${VAL_FILES}" \ +--steps_per_loop=10 \ +--mode=train \ +--config_file="./configs/cloud/v3-8_128x128x128_ce.yaml" \ +``` + +The following script example is for running evaluation on TPU v3-8. +Configurations such as `train_batch_size`, `train_steps`, `eval_batch_size` and +`eval_item_count` are defined in the configuration file passed as +`config_file`flag. It is only one line change from previous script: changes the +`mode` flag to "eval". + +### Train with Spatial Partition + +The following example specifies spatial partition with the +"--input_partition_dims" in the config file. For example, setting +`input_partition_dims: [1, 16, 1, 1, 1]` in the config_file will split +the image into 16 ways in first (width) dimension. The first dimension +(set to 1) is the batch dimension. + +**Example: Train with 16-way spatial partition**: + +```shell +DATA_BUCKET= +TRAIN_FILES="${DATA_BUCKET}/tfrecords/trainbox*.tfrecord" +VAL_FILES="${DATA_BUCKET}/tfrecords/validationbox*.tfrecord" +MODEL_BUCKET= +EXP_NAME=unet_20190610_dice_t1 + +python unet_main.py \ +--distribution_strategy=<"mirrored" or "tpu"> +--num_gpus=<'number of GPUs to use if using mirrored strategy'> +--tpu= \ +--model_dir="gs://${MODEL_BUCKET}/models/${EXP_NAME}" \ +--training_file_pattern="${TRAIN_FILES}" \ +--eval_file_pattern="${VAL_FILES}" \ +--steps_per_loop=10 \ +--mode=train \ +--config_file="./configs/cloud/v3-8_128x128x128_ce.yaml" +``` + +**Example: Example config file with 16-way spatial partition**: + +``` +train_steps: 3000 +loss: 'adaptive_dice32' +train_batch_size: 8 +eval_batch_size: 8 +use_index_label_in_train: false + +input_partition_dims: [1,16,1,1,1] +input_image_size: [256,256,256] + +dtype: 'bfloat16' +label_dtype: 'float32' + +train_item_count: 5400 +eval_item_count: 1674 +``` + +## Evaluation + +```shell +DATA_BUCKET= +TRAIN_FILES="${DATA_BUCKET}/tfrecords/trainbox*.tfrecord" +VAL_FILES="${DATA_BUCKET}/tfrecords/validationbox*.tfrecord" +MODEL_BUCKET= +EXP_NAME=unet_20190610_dice_t1 + +python unet_main.py \ +--distribution_strategy=<"mirrored" or "tpu"> +--num_gpus=<'number of GPUs to use if using mirrored strategy'> +--tpu= \ +--model_dir="gs://${MODEL_BUCKET}/models/${EXP_NAME}" \ +--training_file_pattern="${TRAIN_FILES}" \ +--eval_file_pattern="${VAL_FILES}" \ +--steps_per_loop=10 \ +--mode="eval" \ +--config_file="./configs/cloud/v3-8_128x128x128_ce.yaml" +``` + +## License + +[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) + +This project is licensed under the terms of the **Apache License 2.0**. + +## References + +[1] Özgün Çiçek, Ahmed Abdulkadir, Soeren S. Lienkamp, +Thomas Brox, Olaf Ronneberger "3D U-Net: Learning Dense Volumetric Segmentation +from Sparse Annotation": https://arxiv.org/abs/1606.06650. (MICCAI 2016). + +[2] Le Hou, Youlong Cheng, Noam Shazeer, Niki Parmar, Yeqing Li, +Panagiotis Korfiatis, Travis M. Drucker, Daniel J. Blezek, Xiaodan Song "High +Resolution Medical Image Analysis with Spatial Partitioning": +https://arxiv.org/abs/1810.04805. + diff --git a/official/vision/segmentation/__init__.py b/official/vision/segmentation/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/official/vision/segmentation/convert_lits.py b/official/vision/segmentation/convert_lits.py new file mode 100644 index 000000000..e80e60e1d --- /dev/null +++ b/official/vision/segmentation/convert_lits.py @@ -0,0 +1,269 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +r"""Converts raw LiTS numpy data to TFRecord. + +The file is forked from: +https://github.com/tensorflow/tpu/blob/master/models/official/unet3d/data_preprocess/convert_lits.py +""" + +from __future__ import absolute_import +from __future__ import division +# from __future__ import google_type_annotations +from __future__ import print_function + +import os + +from absl import app +from absl import flags +from absl import logging +import numpy as np +from PIL import Image +from scipy import ndimage +import tensorflow.google.compat.v1 as tf + +flags.DEFINE_string("image_file_pattern", None, + "path pattern to an input image npy file.") +flags.DEFINE_string("label_file_pattern", None, + "path pattern to an input label npy file.") +flags.DEFINE_string("output_path", None, "path to output TFRecords.") + +flags.DEFINE_boolean("crop_liver_region", True, + "whether to crop liver region out.") +flags.DEFINE_boolean("apply_data_aug", False, + "whether to apply data augmentation.") + +flags.DEFINE_integer("shard_start", 0, + "start with volume-${shard_start}.npy.") +flags.DEFINE_integer("shard_stride", 1, + "this process will convert " + "volume-${shard_start + n * shard_stride}.npy for all n.") + +flags.DEFINE_integer("output_size", 128, + "output, cropped size along x, y, and z.") +flags.DEFINE_integer("resize_size", 192, + "size along x, y, and z before cropping.") + +FLAGS = flags.FLAGS + + +def to_1hot(label): + per_class = [] + for classes in range(3): + per_class.append((label == classes)[..., np.newaxis]) + label = np.concatenate(per_class, axis=-1).astype(label.dtype) + return label + + +def save_to_tfrecord(image, label, idx, im_id, output_path, + convert_label_to_1hot): + """Save to TFRecord.""" + if convert_label_to_1hot: + label = to_1hot(label) + + d_feature = {} + d_feature["image/ct_image"] = tf.train.Feature( + bytes_list=tf.train.BytesList(value=[image.reshape([-1]).tobytes()])) + d_feature["image/label"] = tf.train.Feature( + bytes_list=tf.train.BytesList(value=[label.reshape([-1]).tobytes()])) + + example = tf.train.Example(features=tf.train.Features(feature=d_feature)) + serialized = example.SerializeToString() + + result_file = os.path.join( + output_path, "instance-{}-{}.tfrecords".format(im_id, idx)) + options = tf.python_io.TFRecordOptions( + tf.python_io.TFRecordCompressionType.GZIP) + with tf.python_io.TFRecordWriter(result_file, options=options) as w: + w.write(serialized) + + +def intensity_change(im): + """Color augmentation.""" + if np.random.rand() < 0.1: + return im + # Randomly scale color. + sigma = 0.05 + truncate_rad = 0.1 + im *= np.clip(np.random.normal(1.0, sigma), + 1.0 - truncate_rad, 1.0 + truncate_rad) + return im + + +def rand_crop_liver(image, label, res_s, out_s, + apply_data_aug, augment_times=54): + """Crop image and label; Randomly change image intensity. + + Randomly crop image and label around liver. + + Args: + image: 3D numpy array. + label: 3D numpy array. + res_s: resized size of image and label. + out_s: output size of random crops. + apply_data_aug: whether to apply data augmentation. + augment_times: the number of times to randomly crop and augment data. + Yields: + croped and augmented image and label. + """ + if image.shape != (res_s, res_s, res_s) or \ + label.shape != (res_s, res_s, res_s): + logging.info("Unexpected shapes. " + "image.shape: %s, label.shape: %s", + image.shape, label.shape) + return + + rough_liver_label = 1 + x, y, z = np.where(label == rough_liver_label) + bbox_center = [(x.min() + x.max()) // 2, + (y.min() + y.max()) // 2, + (z.min() + z.max()) // 2] + + def in_range_check(c): + c = max(c, out_s // 2) + c = min(c, res_s - out_s // 2) + return c + + for _ in range(augment_times): + rand_c = [] + for c in bbox_center: + sigma = out_s // 6 + truncate_rad = out_s // 4 + c += np.clip(np.random.randn() * sigma, -truncate_rad, truncate_rad) + rand_c.append(int(in_range_check(c))) + + image_aug = image[rand_c[0] - out_s // 2:rand_c[0] + out_s // 2, + rand_c[1] - out_s // 2:rand_c[1] + out_s // 2, + rand_c[2] - out_s // 2:rand_c[2] + out_s // 2].copy() + label_aug = label[rand_c[0] - out_s // 2:rand_c[0] + out_s // 2, + rand_c[1] - out_s // 2:rand_c[1] + out_s // 2, + rand_c[2] - out_s // 2:rand_c[2] + out_s // 2].copy() + + if apply_data_aug: + image_aug = intensity_change(image_aug) + + yield image_aug, label_aug + + +def rand_crop_whole_ct(image, label, res_s, out_s, + apply_data_aug, augment_times=2): + """Crop image and label; Randomly change image intensity. + + Randomly crop image and label. + + Args: + image: 3D numpy array. + label: 3D numpy array. + res_s: resized size of image and label. + out_s: output size of random crops. + apply_data_aug: whether to apply data augmentation. + augment_times: the number of times to randomly crop and augment data. + Yields: + croped and augmented image and label. + """ + if image.shape != (res_s, res_s, res_s) or \ + label.shape != (res_s, res_s, res_s): + logging.info("Unexpected shapes. " + "image.shape: %s, label.shape: %s", + image.shape, label.shape) + return + + if not apply_data_aug: + # Do not augment data. + idx = (res_s - out_s) // 2 + image = image[idx:idx + out_s, idx:idx + out_s, idx:idx + out_s] + label = label[idx:idx + out_s, idx:idx + out_s, idx:idx + out_s] + yield image, label + else: + cut = res_s - out_s + for _ in range(augment_times): + for i in [0, cut // 2, cut]: + for j in [0, cut // 2, cut]: + for k in [0, cut // 2, cut]: + image_aug = image[i:i + out_s, j:j + out_s, k:k + out_s].copy() + label_aug = label[i:i + out_s, j:j + out_s, k:k + out_s].copy() + image_aug = intensity_change(image_aug) + yield image_aug, label_aug + + +def resize_3d_image_nearest_interpolation(im, res_s): + """Resize 3D image, but with nearest interpolation.""" + new_shape = [res_s, im.shape[1], im.shape[2]] + ret0 = np.zeros(new_shape, dtype=im.dtype) + for i in range(im.shape[2]): + im_slice = np.array(Image.fromarray(im[..., i]).resize( + (im.shape[1], res_s), resample=Image.NEAREST)) + ret0[..., i] = im_slice + + new_shape = [res_s, res_s, res_s] + ret = np.zeros(new_shape, dtype=im.dtype) + for i in range(res_s): + im_slice = np.array(Image.fromarray(ret0[i, ...]).resize( + (res_s, res_s), resample=Image.NEAREST)) + ret[i, ...] = im_slice + return ret + + +def process_one_file(image_path, label_path, im_id, + output_path, res_s, out_s, + crop_liver_region, apply_data_aug): + """Convert one npy file.""" + with tf.gfile.Open(image_path, "rb") as f: + image = np.load(f) + with tf.gfile.Open(label_path, "rb") as f: + label = np.load(f) + + image = ndimage.zoom(image, [float(res_s) / image.shape[0], + float(res_s) / image.shape[1], + float(res_s) / image.shape[2]]) + label = resize_3d_image_nearest_interpolation(label.astype(np.uint8), + res_s).astype(np.float32) + + if crop_liver_region: + for idx, (image_aug, label_aug) in enumerate(rand_crop_liver( + image, label, res_s, out_s, apply_data_aug)): + save_to_tfrecord(image_aug, label_aug, idx, im_id, output_path, + convert_label_to_1hot=True) + else: # not crop_liver_region + # If we output the entire CT scan (crop_liver_region=False), + # do not convert_label_to_1hot to save storage. + for idx, (image_aug, label_aug) in enumerate(rand_crop_whole_ct( + image, label, res_s, out_s, apply_data_aug)): + save_to_tfrecord(image_aug, label_aug, idx, im_id, output_path, + convert_label_to_1hot=False) + + +def main(argv): + del argv + + output_path = FLAGS.output_path + res_s = FLAGS.resize_size + out_s = FLAGS.output_size + crop_liver_region = FLAGS.crop_liver_region + apply_data_aug = FLAGS.apply_data_aug + + for im_id in range(FLAGS.shard_start, 1000000, FLAGS.shard_stride): + image_path = FLAGS.image_file_pattern.format(im_id) + label_path = FLAGS.label_file_pattern.format(im_id) + if not tf.gfile.Exists(image_path): + logging.info("Reached the end. Image does not exist: %s. " + "Process finish.", image_path) + break + process_one_file(image_path, label_path, im_id, + output_path, res_s, out_s, + crop_liver_region, apply_data_aug) + + +if __name__ == "__main__": + app.run(main) diff --git a/official/vision/segmentation/convert_lits_nii_to_npy.py b/official/vision/segmentation/convert_lits_nii_to_npy.py new file mode 100644 index 000000000..12fc2d2d0 --- /dev/null +++ b/official/vision/segmentation/convert_lits_nii_to_npy.py @@ -0,0 +1,58 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +r"""Converts .nii files in LiTS dataset to .npy files. + +This script should be run just once before running convert_lits.py. + +The file is forked from: +https://github.com/tensorflow/tpu/blob/master/models/official/unet3d/data_preprocess/convert_lits_nii_to_npy.py +""" + +from __future__ import absolute_import +from __future__ import division +# from __future__ import google_type_annotations +from __future__ import print_function + +import glob +import multiprocessing +import os + +import nibabel as nib +import numpy as np + + +num_processes = 2 +input_path = "Downloads/LiTS/Train/" # where the .nii files are. +output_path = "Downloads/LiTS/Train_np/" # where you want to put the npy files. + + +def process_one_file(image_path): + """Convert one nii file to npy.""" + im_id = os.path.basename(image_path).split("volume-")[1].split(".nii")[0] + label_path = image_path.replace("volume-", "segmentation-") + + image = nib.load(image_path).get_data().astype(np.float32) + label = nib.load(label_path).get_data().astype(np.float32) + + print("image shape: {}, dtype: {}".format(image.shape, image.dtype)) + print("label shape: {}, dtype: {}".format(label.shape, label.dtype)) + + np.save(os.path.join(output_path, "volume-{}.npy".format(im_id)), image) + np.save(os.path.join(output_path, "segmentation-{}.npy".format(im_id)), label) + + +nii_dir = os.path.join(input_path, "volume-*") +p = multiprocessing.Pool(num_processes) +p.map(process_one_file, glob.glob(nii_dir)) diff --git a/official/vision/segmentation/unet_config.py b/official/vision/segmentation/unet_config.py new file mode 100644 index 000000000..bfaf83adc --- /dev/null +++ b/official/vision/segmentation/unet_config.py @@ -0,0 +1,76 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Config to train UNet.""" + +from __future__ import absolute_import +from __future__ import division +# from __future__ import google_type_annotations +from __future__ import print_function + +UNET_CONFIG = { + # Place holder for tpu configs. + 'tpu_config': {}, + 'model_dir': '', + 'training_file_pattern': None, + 'eval_file_pattern': None, + # The input files are GZip compressed and need decompression. + 'compressed_input': True, + 'dtype': 'bfloat16', + 'label_dtype': 'float32', + 'train_batch_size': 8, + 'eval_batch_size': 8, + 'predict_batch_size': 8, + 'train_epochs': 20, + 'train_steps': 1000, + 'eval_steps': 10, + 'num_steps_per_eval': 100, + 'min_eval_interval': 180, + 'eval_timeout': None, + 'optimizer': 'adam', + 'momentum': 0.9, + # Spatial dimension of input image. + 'input_image_size': [128, 128, 128], + # Number of channels of the input image. + 'num_channels': 1, + # Spatial partition dimensions. + 'input_partition_dims': None, + # Use deconvolution to upsample, otherwise upsampling. + 'deconvolution': True, + # Number of areas i need to segment + 'num_classes': 3, + # Number of filters used by the architecture + 'num_base_filters': 32, + # Depth of the network + 'depth': 4, + # Dropout values to use across the network + 'dropout_rate': 0.5, + # Number of levels that contribute to the output. + 'num_segmentation_levels': 2, + # Use batch norm. + 'use_batch_norm': True, + 'init_learning_rate': 0.1, + # learning rate decay steps. + 'lr_decay_steps': 100, + # learning rate decay rate. + 'lr_decay_rate': 0.5, + # Data format, 'channels_last' and 'channels_first' + 'data_format': 'channels_last', + # Use class index for training. Otherwise, use one-hot encoding. + 'use_index_label_in_train': False, + # e.g. softmax cross entropy, adaptive_dice32 + 'loss': 'adaptive_dice32', +} + +UNET_RESTRICTIONS = [] diff --git a/official/vision/segmentation/unet_data.py b/official/vision/segmentation/unet_data.py new file mode 100644 index 000000000..b3d6ed29a --- /dev/null +++ b/official/vision/segmentation/unet_data.py @@ -0,0 +1,175 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +r"""Defines input_fn of TF2 UNet-3D model.""" + +from __future__ import absolute_import +from __future__ import division +# from __future__ import google_type_annotations +from __future__ import print_function + +import functools + +import tensorflow as tf + + +class BaseInput(object): + """Input function for 3D Unet model.""" + + def __init__(self, file_pattern, params, is_training): + self._params = params + self._file_pattern = file_pattern + self._is_training = is_training + self._parser_fn = self.create_parser_fn(params) + if params.compressed_input: + self._dataset_fn = functools.partial( + tf.data.TFRecordDataset, compression_type='GZIP') + else: + self._dataset_fn = tf.data.TFRecordDataset + + def create_parser_fn(self, params): + """Create parse fn to extract tensors from tf.Example.""" + + def _parser(serialized_example): + """Parses a single tf.Example into image and label tensors.""" + features = tf.io.parse_example( + serialized=[serialized_example], + features={ + 'image/encoded': tf.io.VarLenFeature(dtype=tf.float32), + 'image/segmentation/mask': tf.io.VarLenFeature(dtype=tf.float32), + }) + + image = features['image/encoded'] + if isinstance(image, tf.SparseTensor): + image = tf.sparse.to_dense(image) + + gt_mask = features['image/segmentation/mask'] + if isinstance(gt_mask, tf.SparseTensor): + gt_mask = tf.sparse.to_dense(gt_mask) + + image_size, label_size = self.get_input_shapes(params) + image = tf.reshape(image, image_size) + gt_mask = tf.reshape(gt_mask, label_size) + + image = tf.cast(image, dtype=params.dtype) + gt_mask = tf.cast(gt_mask, dtype=params.dtype) + return image, gt_mask + + return _parser + + def get_input_shapes(self, params): + image_size = params.input_image_size + [params.num_channels] + label_size = params.input_image_size + [params.num_classes] + return image_size, label_size + + def __call__(self, input_pipeline_context=None): + """Generates features and labels for training or evaluation. + + This uses the input pipeline based approach using file name queue + to read data so that entire data is not loaded in memory. + + Args: + input_pipeline_context: Context used by distribution strategy to + shard dataset across workers. + + Returns: + tf.data.Dataset + """ + params = self._params + batch_size = ( + params.train_batch_size + if self._is_training else params.eval_batch_size) + + dataset = tf.data.Dataset.list_files( + self._file_pattern, shuffle=self._is_training) + + # Shard dataset when there are more than 1 workers in training. + if input_pipeline_context: + batch_size = input_pipeline_context.get_per_replica_batch_size(batch_size) + + if input_pipeline_context.num_input_pipelines > 1: + dataset = dataset.shard(input_pipeline_context.num_input_pipelines, + input_pipeline_context.input_pipeline_id) + + if self._is_training: + dataset = dataset.repeat() + + dataset = dataset.apply( + tf.data.experimental.parallel_interleave( + lambda file_name: self._dataset_fn(file_name).prefetch(1), + cycle_length=32, + sloppy=self._is_training)) + + if self._is_training: + dataset = dataset.shuffle(64) + + # Parses the fetched records to input tensors for model function. + dataset = dataset.map(self._parser_fn, tf.data.experimental.AUTOTUNE) + dataset = dataset.batch(batch_size, drop_remainder=True) + dataset = dataset.prefetch(tf.data.experimental.AUTOTUNE) + return dataset + + +class LiverInput(BaseInput): + """Input function of Liver Segmentation data set.""" + + def create_parser_fn(self, params): + """Create parse fn to extract tensors from tf.Example.""" + + def _decode_liver_example(serialized_example): + """Parses a single tf.Example into image and label tensors.""" + features = {} + + features['image/ct_image'] = tf.io.FixedLenFeature([], tf.string) + features['image/label'] = tf.io.FixedLenFeature([], tf.string) + + parsed = tf.io.parse_single_example( + serialized=serialized_example, features=features) + + # Here, assumes the `image` is normalized to [0, 1] of type float32 and + # the `label` is a binary matrix, whose last dimension is one_hot encoded + # labels. + # The dtype of `label` can be either float32 or int64. + image = tf.io.decode_raw(parsed['image/ct_image'], + tf.as_dtype(tf.float32)) + label = tf.io.decode_raw(parsed['image/label'], + tf.as_dtype(params.label_dtype)) + + image_size = params.input_image_size + [params.num_channels] + image = tf.reshape(image, image_size) + label_size = params.input_image_size + [params.num_classes] + label = tf.reshape(label, label_size) + + if self._is_training and params.use_index_label_in_train: + # Use class index for labels and remove the channel dim (#channels=1). + channel_dim = -1 + label = tf.argmax(input=label, axis=channel_dim, output_type=tf.int32) + + image = tf.cast(image, dtype=params.dtype) + label = tf.cast(label, dtype=params.dtype) + + # TPU doesn't support tf.int64 well, use tf.int32 directly. + if label.dtype == tf.int64: + label = tf.cast(label, dtype=tf.int32) + return image, label + + return _decode_liver_example + + def get_input_shapes(self, params): + image_size = params.input_image_size + [params.num_channels] + if self._is_training and params.use_index_label_in_train: + label_size = params.input_image_size + else: + label_size = params.input_image_size + [params.num_classes] + return image_size, label_size diff --git a/official/vision/segmentation/unet_main.py b/official/vision/segmentation/unet_main.py new file mode 100644 index 000000000..4ba95b1aa --- /dev/null +++ b/official/vision/segmentation/unet_main.py @@ -0,0 +1,350 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +r"""Training script for UNet-3D.""" + +from __future__ import absolute_import +from __future__ import division +# from __future__ import google_type_annotations +from __future__ import print_function + +import functools +import os + +from absl import app +from absl import flags +import numpy as np +import tensorflow as tf + +from official.modeling.hyperparams import params_dict +from official.utils import hyperparams_flags +from official.utils.misc import distribution_utils +from official.utils.misc import keras_utils +from official.vision.segmentation import unet_config +from official.vision.segmentation import unet_data +from official.vision.segmentation import unet_metrics +from official.vision.segmentation import unet_model as unet_model_lib + + +def define_unet3d_flags(): + """Defines flags for training 3D Unet.""" + hyperparams_flags.initialize_common_flags() + + flags.DEFINE_enum( + 'distribution_strategy', 'tpu', ['tpu', 'mirrored'], + 'Distribution Strategy type to use for training. `tpu` uses TPUStrategy ' + 'for running on TPUs, `mirrored` uses GPUs with single host.') + flags.DEFINE_integer( + 'steps_per_loop', 50, + 'Number of steps to execute in a loop for performance optimization.') + flags.DEFINE_integer('checkpoint_interval', 100, + 'Minimum step interval between two checkpoints.') + flags.DEFINE_integer('epochs', 10, 'Number of epochs to run training.') + flags.DEFINE_string( + 'gcp_project', + default=None, + help='Project name for the Cloud TPU-enabled project. If not specified, we ' + 'will attempt to automatically detect the GCE project from metadata.') + flags.DEFINE_string( + 'eval_checkpoint_dir', + default=None, + help='Directory for reading checkpoint file when `mode` == `eval`.') + flags.DEFINE_multi_integer( + 'input_partition_dims', [1], + 'A list that describes the partition dims for all the tensors.') + flags.DEFINE_string( + 'mode', 'train', 'Mode to run: train or eval or train_and_eval ' + '(default: train)') + flags.DEFINE_string('training_file_pattern', None, + 'Location of the train data.') + flags.DEFINE_string('eval_file_pattern', None, 'Location of ther eval data') + flags.DEFINE_float('lr_init_value', 0.0001, 'Initial learning rate.') + flags.DEFINE_float('lr_decay_rate', 0.9, 'Learning rate decay rate.') + flags.DEFINE_integer('lr_decay_steps', 100, 'Learning rate decay steps.') + + +def save_params(params): + """Save parameters to config files if model_dir is defined.""" + model_dir = params.model_dir + assert model_dir is not None + if not tf.io.gfile.exists(model_dir): + tf.io.gfile.makedirs(model_dir) + file_name = os.path.join(model_dir, 'params.yaml') + params_dict.save_params_dict_to_yaml(params, file_name) + + +def extract_params(flags_obj): + """Extract configuration parameters for training and evaluation.""" + params = params_dict.ParamsDict(unet_config.UNET_CONFIG, + unet_config.UNET_RESTRICTIONS) + + params = params_dict.override_params_dict( + params, flags_obj.config_file, is_strict=False) + + if flags_obj.training_file_pattern: + params.override({'training_file_pattern': flags_obj.training_file_pattern}, + is_strict=True) + if flags_obj.eval_file_pattern: + params.override({'eval_file_pattern': flags_obj.eval_file_pattern}, + is_strict=True) + + train_epoch_steps = params.train_item_count // params.train_batch_size + eval_epoch_steps = params.eval_item_count // params.eval_batch_size + + params.override( + { + 'model_dir': flags_obj.model_dir, + 'eval_checkpoint_dir': flags_obj.eval_checkpoint_dir, + 'mode': flags_obj.mode, + 'distribution_strategy': flags_obj.distribution_strategy, + 'tpu': flags_obj.tpu, + 'num_gpus': flags_obj.num_gpus, + 'init_learning_rate': flags_obj.lr_init_value, + 'lr_decay_rate': flags_obj.lr_decay_rate, + 'lr_decay_steps': train_epoch_steps, + 'train_epoch_steps': train_epoch_steps, + 'eval_epoch_steps': eval_epoch_steps, + 'steps_per_loop': flags_obj.steps_per_loop, + 'epochs': flags_obj.epochs, + 'checkpoint_interval': flags_obj.checkpoint_interval, + }, + is_strict=False) + + params.validate() + params.lock() + return params + + +def unet3d_callbacks(params, checkpoint_manager=None): + """Custom callbacks during training.""" + tensorboard_callback = tf.keras.callbacks.TensorBoard( + log_dir=params.model_dir) + + if checkpoint_manager: + checkpoint_callback = keras_utils.SimpleCheckpoint(checkpoint_manager) + return [tensorboard_callback, checkpoint_callback] + else: + return [tensorboard_callback] + + +def get_computation_shape_for_model_parallelism(input_partition_dims): + """Return computation shape to be used for TPUStrategy spatial partition.""" + num_logical_devices = np.prod(input_partition_dims) + if num_logical_devices == 1: + return [1, 1, 1, 1] + if num_logical_devices == 2: + return [1, 1, 1, 2] + if num_logical_devices == 4: + return [1, 2, 1, 2] + if num_logical_devices == 8: + return [2, 2, 1, 2] + if num_logical_devices == 16: + return [4, 2, 1, 2] + + raise ValueError('Unsupported number of spatial partition configuration.') + + +def create_distribution_strategy(params): + """Creates distribution strategy to use for computation.""" + + if params.input_partition_dims is not None: + if params.distribution_strategy != 'tpu': + raise ValueError('Spatial partitioning is only supported ' + 'for TPUStrategy.') + + # When `input_partition_dims` is specified create custom TPUStrategy + # instance with computation shape for model parallelism. + resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu=params.tpu) + if params.tpu not in ('', 'local'): + tf.config.experimental_connect_to_cluster(resolver) + + topology = tf.tpu.experimental.initialize_tpu_system(resolver) + num_replicas = resolver.get_tpu_system_metadata().num_cores // np.prod( + params.input_partition_dims) + device_assignment = tf.tpu.experimental.DeviceAssignment.build( + topology, + num_replicas=num_replicas, + computation_shape=get_computation_shape_for_model_parallelism( + params.input_partition_dims)) + return tf.distribute.experimental.TPUStrategy( + resolver, device_assignment=device_assignment) + + return distribution_utils.get_distribution_strategy( + distribution_strategy=params.distribution_strategy, + tpu_address=params.tpu, + num_gpus=params.num_gpus) + + +def get_train_dataset(params, ctx=None): + """Returns training dataset.""" + return unet_data.LiverInput( + params.training_file_pattern, params, is_training=True)( + ctx) + + +def get_eval_dataset(params, ctx=None): + """Returns evaluation dataset.""" + return unet_data.LiverInput( + params.training_file_pattern, params, is_training=False)( + ctx) + + +def expand_1d(data): + """Expands 1-dimensional `Tensor`s into 2-dimensional `Tensor`s.""" + + def _expand_single_1d_tensor(t): + if (isinstance(t, tf.Tensor) and isinstance(t.shape, tf.TensorShape) and + t.shape.rank == 1): + return tf.expand_dims(t, axis=-1) + return t + + return tf.nest.map_structure(_expand_single_1d_tensor, data) + + +def train_step(train_fn, input_partition_dims, data): + """The logic for one training step with spatial partitioning.""" + # Keras expects rank 2 inputs. As so, expand single rank inputs. + data = expand_1d(data) + x, y, sample_weight = tf.keras.utils.unpack_x_y_sample_weight(data) + + if input_partition_dims: + strategy = tf.distribute.get_strategy() + x = strategy.experimental_split_to_logical_devices(x, input_partition_dims) + y = strategy.experimental_split_to_logical_devices(y, input_partition_dims) + + partitioned_data = tf.keras.utils.pack_x_y_sample_weight(x, y, sample_weight) + return train_fn(partitioned_data) + + +def test_step(test_fn, input_partition_dims, data): + """The logic for one testing step with spatial partitioning.""" + # Keras expects rank 2 inputs. As so, expand single rank inputs. + data = expand_1d(data) + x, y, sample_weight = tf.keras.utils.unpack_x_y_sample_weight(data) + + if input_partition_dims: + strategy = tf.distribute.get_strategy() + x = strategy.experimental_split_to_logical_devices(x, input_partition_dims) + y = strategy.experimental_split_to_logical_devices(y, input_partition_dims) + + partitioned_data = tf.keras.utils.pack_x_y_sample_weight(x, y, sample_weight) + return test_fn(partitioned_data) + + +def train(params, strategy, unet_model, train_input_fn, eval_input_fn): + """Trains 3D Unet model.""" + assert tf.distribute.has_strategy() + + # Override Keras Model's train_step() and test_step() function so + # that inputs are spatially partitioned. + # Note that is `predict()` API is used, then `predict_step()` should also + # be overriden. + unet_model.train_step = functools.partial(train_step, unet_model.train_step, + params.input_partition_dims) + unet_model.test_step = functools.partial(test_step, unet_model.test_step, + params.input_partition_dims) + + optimizer = unet_model_lib.create_optimizer(params.init_learning_rate, params) + loss_fn = unet_metrics.get_loss_fn(params.mode, params) + unet_model.compile( + loss=loss_fn, + optimizer=optimizer, + metrics=[unet_metrics.metric_accuracy], + experimental_steps_per_execution=params.steps_per_loop) + + train_ds = strategy.experimental_distribute_datasets_from_function( + train_input_fn) + eval_ds = strategy.experimental_distribute_datasets_from_function( + eval_input_fn) + + checkpoint = tf.train.Checkpoint(model=unet_model) + + train_epoch_steps = params.train_item_count // params.train_batch_size + eval_epoch_steps = params.eval_item_count // params.eval_batch_size + + checkpoint_manager = tf.train.CheckpointManager( + checkpoint, + directory=params.model_dir, + max_to_keep=10, + step_counter=unet_model.optimizer.iterations, + checkpoint_interval=params.checkpoint_interval) + checkpoint_manager.restore_or_initialize() + + train_result = unet_model.fit( + x=train_ds, + epochs=params.epochs, + steps_per_epoch=train_epoch_steps, + validation_data=eval_ds, + validation_steps=eval_epoch_steps, + callbacks=unet3d_callbacks(params, checkpoint_manager)) + return train_result + + +def evaluate(params, strategy, unet_model, input_fn): + """Reads from checkpoint and evaluate 3D Unet model.""" + assert tf.distribute.has_strategy() + + unet_model.compile( + metrics=[unet_metrics.metric_accuracy], + experimental_steps_per_execution=params.steps_per_loop) + + # Override test_step() function so that inputs are spatially partitioned. + unet_model.test_step = functools.partial(test_step, unet_model.test_step, + params.input_partition_dims) + + # Load checkpoint for evaluation. + checkpoint = tf.train.Checkpoint(model=unet_model) + checkpoint_path = tf.train.latest_checkpoint(params.eval_checkpoint_dir) + status = checkpoint.restore(checkpoint_path) + status.assert_existing_objects_matched() + + eval_ds = strategy.experimental_distribute_datasets_from_function(input_fn) + eval_epoch_steps = params.eval_item_count // params.eval_batch_size + + eval_result = unet_model.evaluate( + x=eval_ds, steps=eval_epoch_steps, callbacks=unet3d_callbacks(params)) + return eval_result + + +def main(_): + params = extract_params(flags.FLAGS) + assert params.mode in {'train', 'eval'}, 'only support train and eval' + save_params(params) + + input_dtype = params.dtype + if input_dtype == 'float16' or input_dtype == 'bfloat16': + policy = tf.keras.mixed_precision.experimental.Policy( + 'mixed_bfloat16' if input_dtype == 'bfloat16' else 'mixed_float16') + tf.keras.mixed_precision.experimental.set_policy(policy) + + strategy = create_distribution_strategy(params) + with strategy.scope(): + unet_model = unet_model_lib.build_unet_model(params) + + if params.mode == 'train': + train(params, strategy, unet_model, + functools.partial(get_train_dataset, params), + functools.partial(get_eval_dataset, params)) + + elif params.mode == 'eval': + evaluate(params, strategy, unet_model, + functools.partial(get_eval_dataset, params)) + + else: + raise Exception('Only `train` mode and `eval` mode are supported.') + + +if __name__ == '__main__': + define_unet3d_flags() + app.run(main) diff --git a/official/vision/segmentation/unet_main_test.py b/official/vision/segmentation/unet_main_test.py new file mode 100644 index 000000000..765d6d3e0 --- /dev/null +++ b/official/vision/segmentation/unet_main_test.py @@ -0,0 +1,248 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import tempfile + +from absl import flags +from absl.testing import flagsaver +from absl.testing import parameterized +import numpy as np +import tensorflow as tf + +from tensorflow.contrib import cluster_resolver as contrib_cluster_resolver +from tensorflow.contrib.tpu.python.tpu import device_assignment as device_lib +from tensorflow.python.distribute import tpu_strategy as tpu_strategy_lib +from tensorflow.python.tpu import tpu_strategy_util +from official.modeling.hyperparams import params_dict +from official.vision.segmentation import unet_config +from official.vision.segmentation import unet_main as unet_main_lib +from official.vision.segmentation import unet_metrics +from official.vision.segmentation import unet_model as unet_model_lib + +FLAGS = flags.FLAGS + + +def create_fake_input_fn(params, + features_size, + labels_size, + use_bfloat16=False): + """Returns fake input function for testing.""" + + def fake_data_input_fn(unused_ctx=None): + """An input function for generating fake data.""" + batch_size = params.train_batch_size + features = np.random.rand(64, *features_size) + labels = np.random.randint(2, size=[64] + labels_size) + + # Convert the inputs to a Dataset. + dataset = tf.data.Dataset.from_tensor_slices((features, labels)) + + def _assign_dtype(features, labels): + if use_bfloat16: + features = tf.cast(features, tf.bfloat16) + labels = tf.cast(labels, tf.bfloat16) + else: + features = tf.cast(features, tf.float32) + labels = tf.cast(labels, tf.float32) + return features, labels + + # Shuffle, repeat, and batch the examples. + dataset = dataset.map(_assign_dtype) + dataset = dataset.shuffle(64).repeat() + dataset = dataset.batch(batch_size, drop_remainder=True) + dataset = dataset.prefetch(tf.data.experimental.AUTOTUNE) + + # Return the dataset. + return dataset + + return fake_data_input_fn + + +class UnetMainTest(parameterized.TestCase, tf.test.TestCase): + + def setUp(self): + super(UnetMainTest, self).setUp() + self._model_dir = os.path.join(tempfile.mkdtemp(), 'model_dir') + tf.io.gfile.makedirs(self._model_dir) + + def tearDown(self): + tf.io.gfile.rmtree(self._model_dir) + super(UnetMainTest, self).tearDown() + + @flagsaver.flagsaver + def testUnet3DModel(self): + FLAGS.tpu = '' + FLAGS.mode = 'train' + params = params_dict.ParamsDict(unet_config.UNET_CONFIG, + unet_config.UNET_RESTRICTIONS) + params.override( + { + 'input_image_size': [64, 64, 64], + 'train_item_count': 4, + 'eval_item_count': 4, + 'train_batch_size': 2, + 'eval_batch_size': 2, + 'batch_size': 2, + 'num_base_filters': 16, + 'dtype': 'bfloat16', + 'depth': 1, + 'train_steps': 2, + 'eval_steps': 2, + 'mode': FLAGS.mode, + 'tpu': FLAGS.tpu, + 'num_gpus': 0, + 'checkpoint_interval': 1, + 'use_tpu': True, + 'input_partition_dims': None, + }, + is_strict=False) + params.validate() + params.lock() + + image_size = params.input_image_size + [params.num_channels] + label_size = params.input_image_size + [params.num_classes] + input_fn = create_fake_input_fn( + params, features_size=image_size, labels_size=label_size) + + resolver = contrib_cluster_resolver.TPUClusterResolver(tpu=params.tpu) + topology = tpu_strategy_util.initialize_tpu_system(resolver) + device_assignment = None + + if params.input_partition_dims is not None: + assert np.prod( + params.input_partition_dims) == 2, 'invalid unit test configuration' + computation_shape = [1, 1, 1, 2] + partition_dimension = params.input_partition_dims + num_replicas = resolver.get_tpu_system_metadata().num_cores // np.prod( + partition_dimension) + device_assignment = device_lib.device_assignment( + topology, + computation_shape=computation_shape, + num_replicas=num_replicas) + + strategy = tpu_strategy_lib.TPUStrategy( + resolver, device_assignment=device_assignment) + + with strategy.scope(): + model = unet_model_lib.build_unet_model(params) + optimizer = unet_model_lib.create_optimizer(params.init_learning_rate, + params) + loss_fn = unet_metrics.get_loss_fn(params.mode, params) + model.compile(loss=loss_fn, optimizer=optimizer, metrics=[loss_fn]) + + eval_ds = input_fn() + iterator = iter(eval_ds) + + image, _ = next(iterator) + logits = model(image, training=False) + self.assertEqual(logits.shape[1:], params.input_image_size + [3]) + + @parameterized.parameters( + { + 'use_mlir': True, + 'dtype': 'bfloat16', + 'input_partition_dims': None, + }, { + 'use_mlir': False, + 'dtype': 'bfloat16', + 'input_partition_dims': None, + }, { + 'use_mlir': True, + 'dtype': 'bfloat16', + 'input_partition_dims': None, + }, { + 'use_mlir': False, + 'dtype': 'bfloat16', + 'input_partition_dims': None, + }, { + 'use_mlir': True, + 'dtype': 'bfloat16', + 'input_partition_dims': [1, 2, 1, 1, 1], + }, { + 'use_mlir': False, + 'dtype': 'bfloat16', + 'input_partition_dims': [1, 2, 1, 1, 1], + }, { + 'use_mlir': True, + 'dtype': 'bfloat16', + 'input_partition_dims': [1, 2, 1, 1, 1], + }, { + 'use_mlir': False, + 'dtype': 'bfloat16', + 'input_partition_dims': [1, 2, 1, 1, 1] + }) + @flagsaver.flagsaver + def testUnetTrain(self, use_mlir, dtype, input_partition_dims): + FLAGS.tpu = '' + FLAGS.mode = 'train' + + if use_mlir: + tf.config.experimental.enable_mlir_bridge() + + params = params_dict.ParamsDict(unet_config.UNET_CONFIG, + unet_config.UNET_RESTRICTIONS) + params.override( + { + 'model_dir': self._model_dir, + 'input_image_size': [8, 8, 8], + 'train_item_count': 2, + 'eval_item_count': 2, + 'train_batch_size': 2, + 'eval_batch_size': 2, + 'batch_size': 2, + 'num_base_filters': 1, + 'dtype': 'bfloat16', + 'depth': 1, + 'epochs': 1, + 'checkpoint_interval': 1, + 'train_steps': 1, + 'eval_steps': 1, + 'mode': FLAGS.mode, + 'tpu': FLAGS.tpu, + 'use_tpu': True, + 'num_gpus': 0, + 'distribution_strategy': 'tpu', + 'steps_per_loop': 1, + 'input_partition_dims': input_partition_dims, + }, + is_strict=False) + params.validate() + params.lock() + + image_size = params.input_image_size + [params.num_channels] + label_size = params.input_image_size + [params.num_classes] + input_fn = create_fake_input_fn( + params, features_size=image_size, labels_size=label_size) + + input_dtype = params.dtype + if input_dtype == 'float16' or input_dtype == 'bfloat16': + policy = tf.keras.mixed_precision.experimental.Policy( + 'mixed_bfloat16' if input_dtype == 'bfloat16' else 'mixed_float16') + tf.keras.mixed_precision.experimental.set_policy(policy) + + strategy = unet_main_lib.create_distribution_strategy(params) + with strategy.scope(): + unet_model = unet_model_lib.build_unet_model(params) + unet_main_lib.train(params, strategy, unet_model, input_fn, input_fn) + + +if __name__ == '__main__': + unet_main_lib.define_unet3d_flags() + tf.test.main() diff --git a/official/vision/segmentation/unet_metrics.py b/official/vision/segmentation/unet_metrics.py new file mode 100644 index 000000000..d7eb225aa --- /dev/null +++ b/official/vision/segmentation/unet_metrics.py @@ -0,0 +1,279 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Define metrics for the UNet 3D Model.""" + +from __future__ import absolute_import +from __future__ import division +# from __future__ import google_type_annotations +from __future__ import print_function + +import tensorflow as tf + + +def dice(y_true, y_pred, axis=(1, 2, 3, 4)): + """DICE coefficient. + + Taha AA, Hanbury A. Metrics for evaluating 3D medical image segmentation: + analysis, selection, and tool. BMC Med Imaging. 2015;15:29. Published + 2015 + Aug 12. doi:10.1186/s12880-015-0068-x + + Implemented according to + https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4533825/#Equ6 + + Args: + y_true: the ground truth matrix. Shape [batch_size, x, y, z, num_classes]. + y_pred: the prediction matrix. Shape [batch_size, x, y, z, num_classes]. + axis: axises of features. + + Returns: + DICE coefficient. + """ + y_true = tf.cast(y_true, y_pred.dtype) + eps = tf.keras.backend.epsilon() + + intersection = tf.reduce_sum(input_tensor=y_true * y_pred, axis=axis) + summation = tf.reduce_sum( + input_tensor=y_true, axis=axis) + tf.reduce_sum( + input_tensor=y_pred, axis=axis) + return (2 * intersection + eps) / (summation + eps) + + +def generalized_dice(y_true, y_pred, axis=(1, 2, 3)): + """Generalized Dice coefficient, for multi-class predictions. + + For output of a multi-class model, where the shape of the output is + (batch, x, y, z, n_classes), the axis argument should be (1, 2, 3). + + Args: + y_true: the ground truth matrix. Shape [batch_size, x, y, z, num_classes]. + y_pred: the prediction matrix. Shape [batch_size, x, y, z, num_classes]. + axis: axises of features. + + Returns: + DICE coefficient. + """ + y_true = tf.cast(y_true, y_pred.dtype) + + if y_true.get_shape().ndims < 2 or y_pred.get_shape().ndims < 2: + raise ValueError('y_true and y_pred must be at least rank 2.') + + epsilon = tf.keras.backend.epsilon() + w = tf.math.reciprocal(tf.square(tf.reduce_sum(y_true, axis=axis)) + epsilon) + num = 2 * tf.reduce_sum( + w * tf.reduce_sum(y_true * y_pred, axis=axis), axis=-1) + den = tf.reduce_sum(w * tf.reduce_sum(y_true + y_pred, axis=axis), axis=-1) + return (num + epsilon) / (den + epsilon) + + +def hamming(y_true, y_pred, axis=(1, 2, 3)): + """Hamming distance. + + Args: + y_true: the ground truth matrix. Shape [batch_size, x, y, z]. + y_pred: the prediction matrix. Shape [batch_size, x, y, z]. + axis: a list, axises of the feature dimensions. + + Returns: + Hamming distance value. + """ + y_true = tf.cast(y_true, y_pred.dtype) + return tf.reduce_mean(input_tensor=tf.not_equal(y_pred, y_true), axis=axis) + + +def jaccard(y_true, y_pred, axis=(1, 2, 3, 4)): + """Jaccard Similarity. + + Taha AA, Hanbury A. Metrics for evaluating 3D medical image segmentation: + analysis, selection, and tool. BMC Med Imaging. 2015;15:29. Published + 2015 + Aug 12. doi:10.1186/s12880-015-0068-x + + Implemented according to + https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4533825/#Equ7 + + Args: + y_true: the ground truth matrix. Shape [batch_size, x, y, z, num_classes]. + y_pred: the prediction matrix. Shape [batch_size, x, y, z, num_classes]. + axis: axises of features. + + Returns: + Jaccard similarity. + """ + y_true = tf.cast(y_true, y_pred.dtype) + eps = tf.keras.backend.epsilon() + + intersection = tf.reduce_sum(input_tensor=y_true * y_pred, axis=axis) + union = tf.reduce_sum(y_true, axis=axis) + tf.reduce_sum(y_pred, axis=axis) + return (intersection + eps) / (union - intersection + eps) + + +def tversky(y_true, y_pred, axis=(1, 2, 3), alpha=0.3, beta=0.7): + """Tversky similarity. + + Args: + y_true: the ground truth matrix. Shape [batch_size, x, y, z, num_classes]. + y_pred: the prediction matrix. Shape [batch_size, x, y, z, num_classes]. + axis: axises of spatial dimensions. + alpha: weight of the prediction. + beta: weight of the groundtruth. + + Returns: + Tversky similarity coefficient. + """ + y_true = tf.cast(y_true, y_pred.dtype) + + if y_true.get_shape().ndims < 2 or y_pred.get_shape().ndims < 2: + raise ValueError('y_true and y_pred must be at least rank 2.') + + eps = tf.keras.backend.epsilon() + + num = tf.reduce_sum(input_tensor=y_pred * y_true, axis=axis) + den = ( + num + alpha * tf.reduce_sum(y_pred * (1 - y_true), axis=axis) + + beta * tf.reduce_sum((1 - y_pred) * y_true, axis=axis)) + # Sum over classes. + return tf.reduce_sum(input_tensor=(num + eps) / (den + eps), axis=-1) + + +def adaptive_dice32(y_true, y_pred, data_format='channels_last'): + """Adaptive dice metric. + + Args: + y_true: the ground truth matrix. Shape [batch_size, x, y, z, num_classes]. + y_pred: the prediction matrix. Shape [batch_size, x, y, z, num_classes]. + data_format: channel last of channel first. + + Returns: + Adaptive dice value. + """ + epsilon = 10**-7 + y_true = tf.cast(y_true, dtype=y_pred.dtype) + # Determine axes to pass to tf.reduce_sum + if data_format == 'channels_last': + ndim = len(y_pred.shape) + reduction_axes = list(range(ndim - 1)) + else: + reduction_axes = 1 + + # Calculate intersections and unions per class + intersections = tf.reduce_sum(y_true * y_pred, axis=reduction_axes) + unions = tf.reduce_sum(y_true + y_pred, axis=reduction_axes) + + # Calculate Dice scores per class + dice_scores = 2.0 * (intersections + epsilon) / (unions + epsilon) + + # Calculate weights based on Dice scores + weights = tf.exp(-1.0 * dice_scores) + + # Multiply weights by corresponding scores and get sum + weighted_dice = tf.reduce_sum(weights * dice_scores) + + # Calculate normalization factor + norm_factor = tf.size(input=dice_scores, out_type=tf.float32) * tf.exp(-1.0) + + weighted_dice = tf.cast(weighted_dice, dtype=tf.float32) + + # Return 1 - adaptive Dice score + return 1 - (weighted_dice / norm_factor) + + +def assert_shape_equal(pred_shape, label_shape): + """Asserts that `pred_shape` and `label_shape` is equal.""" + assert (label_shape == pred_shape + ), 'pred. shape {} is not equal to label shape {}'.format( + label_shape, pred_shape) + + +def get_loss_fn(mode, params): + """Return loss_fn for unet training. + + Args: + mode: training or eval. This is a legacy parameter from TF1. + params: unet configuration parameter. + + Returns: + loss_fn. + """ + + def loss_fn(y_true, y_pred): + """Returns scalar loss from labels and netowrk outputs.""" + loss = None + label_shape = y_true.get_shape().as_list() + pred_shape = y_pred.get_shape().as_list() + assert_shape_equal(label_shape, pred_shape) + if params.loss == 'adaptive_dice32': + loss = adaptive_dice32(y_true, y_pred) + elif params.loss == 'cross_entropy': + if mode == tf.estimator.ModeKeys.TRAIN and params.use_index_label_in_train: + labels_idx = tf.cast(y_true, dtype=tf.int32) + else: + # Use one-hot label representation, convert to label index. + labels_idx = tf.argmax(input=y_true, axis=-1, output_type=tf.int32) + y_pred = tf.cast(y_pred, dtype=tf.float32) + loss = tf.keras.losses.sparse_categorical_crossentropy( + labels_idx, y_pred, from_logits=False) + else: + raise Exception('Unexpected loss type') + return loss + + return loss_fn + + +def metric_accuracy(labels, predictions): + """Returns accuracy metric of model outputs. + + Args: + labels: ground truth tensor (labels). + predictions: network output (logits) + + Returns: + metric_fn. + """ + if labels.dtype == tf.bfloat16: + labels = tf.cast(labels, tf.float32) + if predictions.dtype == tf.bfloat16: + predictions = tf.cast(predictions, tf.float32) + return tf.keras.backend.mean( + tf.keras.backend.equal( + tf.argmax(input=labels, axis=-1), + tf.argmax(input=predictions, axis=-1))) + + +def metric_ce(labels, predictions): + """Returns categorical crossentropy given outputs and labels. + + Args: + labels: ground truth tensor (labels). + predictions: network output (logits) + + Returns: + metric_fn. + """ + if labels.dtype == tf.bfloat16: + labels = tf.cast(labels, tf.float32) + if predictions.dtype == tf.bfloat16: + predictions = tf.cast(predictions, tf.float32) + return tf.keras.losses.categorical_crossentropy( + labels, predictions, from_logits=False) + + +def metric_dice(labels, predictions): + """Returns adaptive dice coefficient.""" + if labels.dtype == tf.bfloat16: + labels = tf.cast(labels, tf.float32) + if predictions.dtype == tf.bfloat16: + predictions = tf.cast(predictions, tf.float32) + return adaptive_dice32(labels, predictions) diff --git a/official/vision/segmentation/unet_model.py b/official/vision/segmentation/unet_model.py new file mode 100644 index 000000000..9d13fc9eb --- /dev/null +++ b/official/vision/segmentation/unet_model.py @@ -0,0 +1,238 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Model definition for the TF2 Keras UNet 3D Model.""" + +from __future__ import absolute_import +from __future__ import division +# from __future__ import google_type_annotations +from __future__ import print_function + +import tensorflow as tf + + +def create_optimizer(init_learning_rate, params): + """Creates optimizer for training.""" + learning_rate = tf.keras.optimizers.schedules.ExponentialDecay( + initial_learning_rate=init_learning_rate, + decay_steps=params.lr_decay_steps, + decay_rate=params.lr_decay_rate) + + # TODO(hongjunchoi): Provide alternative optimizer options depending on model + # config parameters. + optimizer = tf.keras.optimizers.Adam(learning_rate) + return optimizer + + +def create_convolution_block(input_layer, + n_filters, + batch_normalization=False, + kernel=(3, 3, 3), + activation=tf.nn.relu, + padding='SAME', + strides=(1, 1, 1), + data_format='channels_last', + instance_normalization=False): + """UNet convolution block. + + Args: + input_layer: tf.Tensor, the input tensor. + n_filters: integer, the number of the output channels of the convolution. + batch_normalization: boolean, use batch normalization after the convolution. + kernel: kernel size of the convolution. + activation: Tensorflow activation layer to use. (default is 'relu') + padding: padding type of the convolution. + strides: strides of the convolution. + data_format: data format of the convolution. One of 'channels_first' or + 'channels_last'. + instance_normalization: use Instance normalization. Exclusive with batch + normalization. + + Returns: + The Tensor after apply the convolution block to the input. + """ + assert instance_normalization == 0, 'TF 2.0 does not support inst. norm.' + layer = tf.keras.layers.Conv3D( + filters=n_filters, + kernel_size=kernel, + strides=strides, + padding=padding, + data_format=data_format, + activation=None, + )( + inputs=input_layer) + if batch_normalization: + layer = tf.keras.layers.BatchNormalization(axis=1)(inputs=layer) + return activation(layer) + + +def apply_up_convolution(inputs, + num_filters, + pool_size, + kernel_size=(2, 2, 2), + strides=(2, 2, 2), + deconvolution=False): + """Apply up convolution on inputs. + + Args: + inputs: input feature tensor. + num_filters: number of deconvolution output feature channels. + pool_size: pool size of the up-scaling. + kernel_size: kernel size of the deconvolution. + strides: strides of the deconvolution. + deconvolution: Use deconvolution or upsampling. + + Returns: + The tensor of the up-scaled features. + """ + if deconvolution: + return tf.keras.layers.Conv3DTranspose( + filters=num_filters, kernel_size=kernel_size, strides=strides)( + inputs=inputs) + else: + return tf.keras.layers.UpSampling3D(size=pool_size)(inputs) + + +def unet3d_base(input_layer, + pool_size=(2, 2, 2), + n_labels=1, + deconvolution=False, + depth=4, + n_base_filters=32, + batch_normalization=False, + data_format='channels_last'): + """Builds the 3D UNet Tensorflow model and return the last layer logits. + + Args: + input_layer: the input Tensor. + pool_size: Pool size for the max pooling operations. + n_labels: Number of binary labels that the model is learning. + deconvolution: If set to True, will use transpose convolution(deconvolution) + instead of up-sampling. This increases the amount memory required during + training. + depth: indicates the depth of the U-shape for the model. The greater the + depth, the more max pooling layers will be added to the model. Lowering + the depth may reduce the amount of memory required for training. + n_base_filters: The number of filters that the first layer in the + convolution network will have. Following layers will contain a multiple of + this number. Lowering this number will likely reduce the amount of memory + required to train the model. + batch_normalization: boolean. True for use batch normalization after + convolution and before activation. + data_format: string, channel_last (default) or channel_first + + Returns: + The last layer logits of 3D UNet. + """ + levels = [] + current_layer = input_layer + if data_format == 'channels_last': + channel_dim = -1 + else: + channel_dim = 1 + + # add levels with max pooling + for layer_depth in range(depth): + layer1 = create_convolution_block( + input_layer=current_layer, + n_filters=n_base_filters * (2**layer_depth), + batch_normalization=batch_normalization, + kernel=(3, 3, 3), + activation=tf.nn.relu, + padding='SAME', + strides=(1, 1, 1), + data_format=data_format, + instance_normalization=False) + layer2 = create_convolution_block( + input_layer=layer1, + n_filters=n_base_filters * (2**layer_depth) * 2, + batch_normalization=batch_normalization, + kernel=(3, 3, 3), + activation=tf.nn.relu, + padding='SAME', + strides=(1, 1, 1), + data_format=data_format, + instance_normalization=False) + if layer_depth < depth - 1: + current_layer = tf.keras.layers.MaxPool3D( + pool_size=pool_size, + strides=(2, 2, 2), + padding='VALID', + data_format=data_format)( + inputs=layer2) + levels.append([layer1, layer2, current_layer]) + else: + current_layer = layer2 + levels.append([layer1, layer2]) + + # add levels with up-convolution or up-sampling + for layer_depth in range(depth - 2, -1, -1): + up_convolution = apply_up_convolution( + current_layer, + pool_size=pool_size, + deconvolution=deconvolution, + num_filters=current_layer.get_shape().as_list()[channel_dim]) + concat = tf.concat([up_convolution, levels[layer_depth][1]], + axis=channel_dim) + current_layer = create_convolution_block( + n_filters=levels[layer_depth][1].get_shape().as_list()[channel_dim], + input_layer=concat, + batch_normalization=batch_normalization, + kernel=(3, 3, 3), + activation=tf.nn.relu, + padding='SAME', + strides=(1, 1, 1), + data_format=data_format, + instance_normalization=False) + current_layer = create_convolution_block( + n_filters=levels[layer_depth][1].get_shape().as_list()[channel_dim], + input_layer=current_layer, + batch_normalization=batch_normalization, + kernel=(3, 3, 3), + activation=tf.nn.relu, + padding='SAME', + strides=(1, 1, 1), + data_format=data_format, + instance_normalization=False) + + final_convolution = tf.keras.layers.Conv3D( + filters=n_labels, + kernel_size=(1, 1, 1), + padding='VALID', + data_format=data_format, + activation=None)( + current_layer) + return final_convolution + + +def build_unet_model(params): + """Builds the unet model, optimizer included.""" + input_shape = params.input_image_size + [1] + input_layer = tf.keras.layers.Input(shape=input_shape) + + logits = unet3d_base( + input_layer, + pool_size=(2, 2, 2), + n_labels=params.num_classes, + deconvolution=params.deconvolution, + depth=params.depth, + n_base_filters=params.num_base_filters, + batch_normalization=params.use_batch_norm, + data_format=params.data_format) + + # Set output of softmax to float32 to avoid potential numerical overflow. + predictions = tf.keras.layers.Softmax(dtype='float32')(logits) + model = tf.keras.models.Model(inputs=input_layer, outputs=predictions) + model.optimizer = create_optimizer(params.init_learning_rate, params) + return model -- GitLab From d495e481d8985ab0eedd17ebf6ced417fa6f03f2 Mon Sep 17 00:00:00 2001 From: Chen Chen Date: Wed, 24 Jun 2020 15:08:00 -0700 Subject: [PATCH 58/79] Exclude the label_id used for padding in the label space for tagging task. PiperOrigin-RevId: 318149287 --- official/nlp/tasks/tagging.py | 64 ++++++++++++++++++++++------------- 1 file changed, 41 insertions(+), 23 deletions(-) diff --git a/official/nlp/tasks/tagging.py b/official/nlp/tasks/tagging.py index d319301dc..f8e10e9f7 100644 --- a/official/nlp/tasks/tagging.py +++ b/official/nlp/tasks/tagging.py @@ -35,16 +35,37 @@ class TaggingConfig(cfg.TaskConfig): hub_module_url: str = '' model: encoders.TransformerEncoderConfig = ( encoders.TransformerEncoderConfig()) + + # The number of real labels. Note that a word may be tokenized into + # multiple word_pieces tokens, and we asssume the real label id (non-negative) + # is assigned to the first token of the word, and a negative label id is + # assigned to the remaining tokens. The negative label id will not contribute + # to loss and metrics. num_classes: int = 0 - # The ignored label id will not contribute to loss. - # A word may be tokenized into multiple word_pieces tokens, and we usually - # assign the real label id for the first token of the word, and - # `ignore_label_id` for the remaining tokens. - ignore_label_id: int = 0 train_data: cfg.DataConfig = cfg.DataConfig() validation_data: cfg.DataConfig = cfg.DataConfig() +def _masked_labels_and_weights(y_true): + """Masks negative values from token level labels. + + Args: + y_true: Token labels, typically shape (batch_size, seq_len), where tokens + with negative labels should be ignored during loss/accuracy calculation. + + Returns: + (masked_y_true, masked_weights) where `masked_y_true` is the input + with each negative label replaced with zero and `masked_weights` is 0.0 + where negative labels were replaced and 1.0 for original labels. + """ + # Ignore the classes of tokens with negative values. + mask = tf.greater_equal(y_true, 0) + # Replace negative labels, which are out of bounds for some loss functions, + # with zero. + masked_y_true = tf.where(mask, y_true, 0) + return masked_y_true, tf.cast(mask, tf.float32) + + @base_task.register_task_cls(TaggingConfig) class TaggingTask(base_task.Task): """Task object for tagging (e.g., NER or POS).""" @@ -79,14 +100,11 @@ class TaggingTask(base_task.Task): def build_losses(self, labels, model_outputs, aux_losses=None) -> tf.Tensor: model_outputs = tf.cast(model_outputs, tf.float32) + masked_labels, masked_weights = _masked_labels_and_weights(labels) loss = tf.keras.losses.sparse_categorical_crossentropy( - labels, model_outputs, from_logits=True) - # `ignore_label_id` will not contribute to loss. - label_weights = tf.cast( - tf.not_equal(labels, self.task_config.ignore_label_id), - dtype=tf.float32) - numerator_loss = tf.reduce_sum(loss * label_weights) - denominator_loss = tf.reduce_sum(label_weights) + masked_labels, model_outputs, from_logits=True) + numerator_loss = tf.reduce_sum(loss * masked_weights) + denominator_loss = tf.reduce_sum(masked_weights) loss = tf.math.divide_no_nan(numerator_loss, denominator_loss) return loss @@ -100,7 +118,13 @@ class TaggingTask(base_task.Task): input_word_ids=dummy_ids, input_mask=dummy_ids, input_type_ids=dummy_ids) - y = tf.ones((1, params.seq_length), dtype=tf.int32) + + # Include some label_id as -1, which will be ignored in loss/metrics. + y = tf.random.uniform( + shape=(1, params.seq_length), + minval=-1, + maxval=self.task_config.num_classes, + dtype=tf.dtypes.int32) return (x, y) dataset = tf.data.Dataset.range(1) @@ -118,19 +142,13 @@ class TaggingTask(base_task.Task): return [tf.keras.metrics.SparseCategoricalAccuracy(name='accuracy')] def process_metrics(self, metrics, labels, model_outputs): - # `ignore_label_id` will not contribute to metrics. - sample_weight = tf.cast( - tf.not_equal(labels, self.task_config.ignore_label_id), - dtype=tf.float32) + masked_labels, masked_weights = _masked_labels_and_weights(labels) for metric in metrics: - metric.update_state(labels, model_outputs, sample_weight) + metric.update_state(masked_labels, model_outputs, masked_weights) def process_compiled_metrics(self, compiled_metrics, labels, model_outputs): - # `ignore_label_id` will not contribute to metrics. - sample_weight = tf.cast( - tf.not_equal(labels, self.task_config.ignore_label_id), - dtype=tf.float32) - compiled_metrics.update_state(labels, model_outputs, sample_weight) + masked_labels, masked_weights = _masked_labels_and_weights(labels) + compiled_metrics.update_state(masked_labels, model_outputs, masked_weights) def initialize(self, model): """Load a pretrained checkpoint (if exists) and then train from iter 0.""" -- GitLab From 14782f9a5f95432e931101d145922c004f28c44a Mon Sep 17 00:00:00 2001 From: Hongkun Yu Date: Wed, 24 Jun 2020 15:22:13 -0700 Subject: [PATCH 59/79] Opensource 3D Unet model to Tensorflow Official Model Garden. PiperOrigin-RevId: 318151912 --- official/vision/segmentation/README.md | 188 ---------- official/vision/segmentation/__init__.py | 0 official/vision/segmentation/convert_lits.py | 269 -------------- .../segmentation/convert_lits_nii_to_npy.py | 58 --- official/vision/segmentation/unet_config.py | 76 ---- official/vision/segmentation/unet_data.py | 175 --------- official/vision/segmentation/unet_main.py | 350 ------------------ .../vision/segmentation/unet_main_test.py | 248 ------------- official/vision/segmentation/unet_metrics.py | 279 -------------- official/vision/segmentation/unet_model.py | 238 ------------ 10 files changed, 1881 deletions(-) delete mode 100644 official/vision/segmentation/README.md delete mode 100644 official/vision/segmentation/__init__.py delete mode 100644 official/vision/segmentation/convert_lits.py delete mode 100644 official/vision/segmentation/convert_lits_nii_to_npy.py delete mode 100644 official/vision/segmentation/unet_config.py delete mode 100644 official/vision/segmentation/unet_data.py delete mode 100644 official/vision/segmentation/unet_main.py delete mode 100644 official/vision/segmentation/unet_main_test.py delete mode 100644 official/vision/segmentation/unet_metrics.py delete mode 100644 official/vision/segmentation/unet_model.py diff --git a/official/vision/segmentation/README.md b/official/vision/segmentation/README.md deleted file mode 100644 index 3ab3fb18b..000000000 --- a/official/vision/segmentation/README.md +++ /dev/null @@ -1,188 +0,0 @@ -# UNet 3D Model - -This repository contains TensorFlow 2.x implementation for 3D Unet model -[[1]](#1) as well as instructions for producing the data for training and -evaluation. - -Furthermore, this implementation also includes use of spatial partitioning -[[2]](#2) for TPU's to leverage high resolution images for training. - -## Contents - * [Contents](#contents) - * [Prerequsites](#prerequsites) - * [Setup](#setup) - * [Data Preparation](#data-preparation) - * [Training](#data-preparation) - * [Train with Spatial Partition](#train-with-spatial-partition) - * [Evaluation](#evaluation) - * [References](#references) - -## Prerequsites - -To use high resolution image data, spatial partition should be used to avoid -prevent out of memory issues. This is currently only supported with TPU's. To -use TPU's for training, in Google Cloud console, please run the following -command to create cloud TPU VM. - -```shell -ctpu up -name=[tpu_name] -tf-version=nightly -tpu-size=v3-8 -zone=us-central1-b -``` - -## Setup - -Before running any binary, please install necessary packages on cloud VM. - -```shell -pip install -r requirements.tx -``` - -## Data Preparation - -This software uses TFRecords as input. We provide example scripts to convert -Numpy (.npy) files or NIfTI-1 (.nii) files to TFRecords, using the Liver Tumor -Segmentation (LiTS) dataset (Christ et al. -https://competitions.codalab.org/competitions/17094). You can download the -dataset by registering on the competition website. - -**Example**: - -```shell -cd data_preprocess - -# Change input_path and output_path in convert_lits_nii_to_npy.py -# Then run the script to convert nii to npy. -python convert_lits_nii_to_npy.py - -# Convert npy files to TFRecords. -python convert_lits.py \ - --image_file_pattern=Downloads/.../volume-{}.npy \ - --label_file_pattern=Downloads/.../segmentation-{}.npy \ - --output_path=Downloads/... -``` - -## Training - -Working configs on TPU V3-8: - -+ TF 2.2, train_batch_size=16, use_batch_norm=true, dtype='bfloat16' or - 'float16', spatial partition not used. -+ tf-nightly, train_batch_size=32, use_batch_norm=true, dtype='bfloat16', - spatial partition used. - -The following example shows how to train volumic UNet on TPU v3-8. The loss is -*adaptive_dice32*. The training batch size is 32. For detail config, refer to -`unet_config.py` and example config file shown below. - -**Example**: - -```shell -DATA_BUCKET= -TRAIN_FILES="${DATA_BUCKET}/tfrecords/trainbox*.tfrecord" -VAL_FILES="${DATA_BUCKET}/tfrecords/validationbox*.tfrecord" -MODEL_BUCKET= -EXP_NAME=unet_20190610_dice_t1 - -python unet_main.py \ ---distribution_strategy=<"mirrored" or "tpu"> ---num_gpus=<'number of GPUs to use if using mirrored strategy'> ---tpu= \ ---model_dir="gs://${MODEL_BUCKET}/models/${EXP_NAME}" \ ---training_file_pattern="${TRAIN_FILES}" \ ---eval_file_pattern="${VAL_FILES}" \ ---steps_per_loop=10 \ ---mode=train \ ---config_file="./configs/cloud/v3-8_128x128x128_ce.yaml" \ -``` - -The following script example is for running evaluation on TPU v3-8. -Configurations such as `train_batch_size`, `train_steps`, `eval_batch_size` and -`eval_item_count` are defined in the configuration file passed as -`config_file`flag. It is only one line change from previous script: changes the -`mode` flag to "eval". - -### Train with Spatial Partition - -The following example specifies spatial partition with the -"--input_partition_dims" in the config file. For example, setting -`input_partition_dims: [1, 16, 1, 1, 1]` in the config_file will split -the image into 16 ways in first (width) dimension. The first dimension -(set to 1) is the batch dimension. - -**Example: Train with 16-way spatial partition**: - -```shell -DATA_BUCKET= -TRAIN_FILES="${DATA_BUCKET}/tfrecords/trainbox*.tfrecord" -VAL_FILES="${DATA_BUCKET}/tfrecords/validationbox*.tfrecord" -MODEL_BUCKET= -EXP_NAME=unet_20190610_dice_t1 - -python unet_main.py \ ---distribution_strategy=<"mirrored" or "tpu"> ---num_gpus=<'number of GPUs to use if using mirrored strategy'> ---tpu= \ ---model_dir="gs://${MODEL_BUCKET}/models/${EXP_NAME}" \ ---training_file_pattern="${TRAIN_FILES}" \ ---eval_file_pattern="${VAL_FILES}" \ ---steps_per_loop=10 \ ---mode=train \ ---config_file="./configs/cloud/v3-8_128x128x128_ce.yaml" -``` - -**Example: Example config file with 16-way spatial partition**: - -``` -train_steps: 3000 -loss: 'adaptive_dice32' -train_batch_size: 8 -eval_batch_size: 8 -use_index_label_in_train: false - -input_partition_dims: [1,16,1,1,1] -input_image_size: [256,256,256] - -dtype: 'bfloat16' -label_dtype: 'float32' - -train_item_count: 5400 -eval_item_count: 1674 -``` - -## Evaluation - -```shell -DATA_BUCKET= -TRAIN_FILES="${DATA_BUCKET}/tfrecords/trainbox*.tfrecord" -VAL_FILES="${DATA_BUCKET}/tfrecords/validationbox*.tfrecord" -MODEL_BUCKET= -EXP_NAME=unet_20190610_dice_t1 - -python unet_main.py \ ---distribution_strategy=<"mirrored" or "tpu"> ---num_gpus=<'number of GPUs to use if using mirrored strategy'> ---tpu= \ ---model_dir="gs://${MODEL_BUCKET}/models/${EXP_NAME}" \ ---training_file_pattern="${TRAIN_FILES}" \ ---eval_file_pattern="${VAL_FILES}" \ ---steps_per_loop=10 \ ---mode="eval" \ ---config_file="./configs/cloud/v3-8_128x128x128_ce.yaml" -``` - -## License - -[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) - -This project is licensed under the terms of the **Apache License 2.0**. - -## References - -[1] Özgün Çiçek, Ahmed Abdulkadir, Soeren S. Lienkamp, -Thomas Brox, Olaf Ronneberger "3D U-Net: Learning Dense Volumetric Segmentation -from Sparse Annotation": https://arxiv.org/abs/1606.06650. (MICCAI 2016). - -[2] Le Hou, Youlong Cheng, Noam Shazeer, Niki Parmar, Yeqing Li, -Panagiotis Korfiatis, Travis M. Drucker, Daniel J. Blezek, Xiaodan Song "High -Resolution Medical Image Analysis with Spatial Partitioning": -https://arxiv.org/abs/1810.04805. - diff --git a/official/vision/segmentation/__init__.py b/official/vision/segmentation/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/official/vision/segmentation/convert_lits.py b/official/vision/segmentation/convert_lits.py deleted file mode 100644 index e80e60e1d..000000000 --- a/official/vision/segmentation/convert_lits.py +++ /dev/null @@ -1,269 +0,0 @@ -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -r"""Converts raw LiTS numpy data to TFRecord. - -The file is forked from: -https://github.com/tensorflow/tpu/blob/master/models/official/unet3d/data_preprocess/convert_lits.py -""" - -from __future__ import absolute_import -from __future__ import division -# from __future__ import google_type_annotations -from __future__ import print_function - -import os - -from absl import app -from absl import flags -from absl import logging -import numpy as np -from PIL import Image -from scipy import ndimage -import tensorflow.google.compat.v1 as tf - -flags.DEFINE_string("image_file_pattern", None, - "path pattern to an input image npy file.") -flags.DEFINE_string("label_file_pattern", None, - "path pattern to an input label npy file.") -flags.DEFINE_string("output_path", None, "path to output TFRecords.") - -flags.DEFINE_boolean("crop_liver_region", True, - "whether to crop liver region out.") -flags.DEFINE_boolean("apply_data_aug", False, - "whether to apply data augmentation.") - -flags.DEFINE_integer("shard_start", 0, - "start with volume-${shard_start}.npy.") -flags.DEFINE_integer("shard_stride", 1, - "this process will convert " - "volume-${shard_start + n * shard_stride}.npy for all n.") - -flags.DEFINE_integer("output_size", 128, - "output, cropped size along x, y, and z.") -flags.DEFINE_integer("resize_size", 192, - "size along x, y, and z before cropping.") - -FLAGS = flags.FLAGS - - -def to_1hot(label): - per_class = [] - for classes in range(3): - per_class.append((label == classes)[..., np.newaxis]) - label = np.concatenate(per_class, axis=-1).astype(label.dtype) - return label - - -def save_to_tfrecord(image, label, idx, im_id, output_path, - convert_label_to_1hot): - """Save to TFRecord.""" - if convert_label_to_1hot: - label = to_1hot(label) - - d_feature = {} - d_feature["image/ct_image"] = tf.train.Feature( - bytes_list=tf.train.BytesList(value=[image.reshape([-1]).tobytes()])) - d_feature["image/label"] = tf.train.Feature( - bytes_list=tf.train.BytesList(value=[label.reshape([-1]).tobytes()])) - - example = tf.train.Example(features=tf.train.Features(feature=d_feature)) - serialized = example.SerializeToString() - - result_file = os.path.join( - output_path, "instance-{}-{}.tfrecords".format(im_id, idx)) - options = tf.python_io.TFRecordOptions( - tf.python_io.TFRecordCompressionType.GZIP) - with tf.python_io.TFRecordWriter(result_file, options=options) as w: - w.write(serialized) - - -def intensity_change(im): - """Color augmentation.""" - if np.random.rand() < 0.1: - return im - # Randomly scale color. - sigma = 0.05 - truncate_rad = 0.1 - im *= np.clip(np.random.normal(1.0, sigma), - 1.0 - truncate_rad, 1.0 + truncate_rad) - return im - - -def rand_crop_liver(image, label, res_s, out_s, - apply_data_aug, augment_times=54): - """Crop image and label; Randomly change image intensity. - - Randomly crop image and label around liver. - - Args: - image: 3D numpy array. - label: 3D numpy array. - res_s: resized size of image and label. - out_s: output size of random crops. - apply_data_aug: whether to apply data augmentation. - augment_times: the number of times to randomly crop and augment data. - Yields: - croped and augmented image and label. - """ - if image.shape != (res_s, res_s, res_s) or \ - label.shape != (res_s, res_s, res_s): - logging.info("Unexpected shapes. " - "image.shape: %s, label.shape: %s", - image.shape, label.shape) - return - - rough_liver_label = 1 - x, y, z = np.where(label == rough_liver_label) - bbox_center = [(x.min() + x.max()) // 2, - (y.min() + y.max()) // 2, - (z.min() + z.max()) // 2] - - def in_range_check(c): - c = max(c, out_s // 2) - c = min(c, res_s - out_s // 2) - return c - - for _ in range(augment_times): - rand_c = [] - for c in bbox_center: - sigma = out_s // 6 - truncate_rad = out_s // 4 - c += np.clip(np.random.randn() * sigma, -truncate_rad, truncate_rad) - rand_c.append(int(in_range_check(c))) - - image_aug = image[rand_c[0] - out_s // 2:rand_c[0] + out_s // 2, - rand_c[1] - out_s // 2:rand_c[1] + out_s // 2, - rand_c[2] - out_s // 2:rand_c[2] + out_s // 2].copy() - label_aug = label[rand_c[0] - out_s // 2:rand_c[0] + out_s // 2, - rand_c[1] - out_s // 2:rand_c[1] + out_s // 2, - rand_c[2] - out_s // 2:rand_c[2] + out_s // 2].copy() - - if apply_data_aug: - image_aug = intensity_change(image_aug) - - yield image_aug, label_aug - - -def rand_crop_whole_ct(image, label, res_s, out_s, - apply_data_aug, augment_times=2): - """Crop image and label; Randomly change image intensity. - - Randomly crop image and label. - - Args: - image: 3D numpy array. - label: 3D numpy array. - res_s: resized size of image and label. - out_s: output size of random crops. - apply_data_aug: whether to apply data augmentation. - augment_times: the number of times to randomly crop and augment data. - Yields: - croped and augmented image and label. - """ - if image.shape != (res_s, res_s, res_s) or \ - label.shape != (res_s, res_s, res_s): - logging.info("Unexpected shapes. " - "image.shape: %s, label.shape: %s", - image.shape, label.shape) - return - - if not apply_data_aug: - # Do not augment data. - idx = (res_s - out_s) // 2 - image = image[idx:idx + out_s, idx:idx + out_s, idx:idx + out_s] - label = label[idx:idx + out_s, idx:idx + out_s, idx:idx + out_s] - yield image, label - else: - cut = res_s - out_s - for _ in range(augment_times): - for i in [0, cut // 2, cut]: - for j in [0, cut // 2, cut]: - for k in [0, cut // 2, cut]: - image_aug = image[i:i + out_s, j:j + out_s, k:k + out_s].copy() - label_aug = label[i:i + out_s, j:j + out_s, k:k + out_s].copy() - image_aug = intensity_change(image_aug) - yield image_aug, label_aug - - -def resize_3d_image_nearest_interpolation(im, res_s): - """Resize 3D image, but with nearest interpolation.""" - new_shape = [res_s, im.shape[1], im.shape[2]] - ret0 = np.zeros(new_shape, dtype=im.dtype) - for i in range(im.shape[2]): - im_slice = np.array(Image.fromarray(im[..., i]).resize( - (im.shape[1], res_s), resample=Image.NEAREST)) - ret0[..., i] = im_slice - - new_shape = [res_s, res_s, res_s] - ret = np.zeros(new_shape, dtype=im.dtype) - for i in range(res_s): - im_slice = np.array(Image.fromarray(ret0[i, ...]).resize( - (res_s, res_s), resample=Image.NEAREST)) - ret[i, ...] = im_slice - return ret - - -def process_one_file(image_path, label_path, im_id, - output_path, res_s, out_s, - crop_liver_region, apply_data_aug): - """Convert one npy file.""" - with tf.gfile.Open(image_path, "rb") as f: - image = np.load(f) - with tf.gfile.Open(label_path, "rb") as f: - label = np.load(f) - - image = ndimage.zoom(image, [float(res_s) / image.shape[0], - float(res_s) / image.shape[1], - float(res_s) / image.shape[2]]) - label = resize_3d_image_nearest_interpolation(label.astype(np.uint8), - res_s).astype(np.float32) - - if crop_liver_region: - for idx, (image_aug, label_aug) in enumerate(rand_crop_liver( - image, label, res_s, out_s, apply_data_aug)): - save_to_tfrecord(image_aug, label_aug, idx, im_id, output_path, - convert_label_to_1hot=True) - else: # not crop_liver_region - # If we output the entire CT scan (crop_liver_region=False), - # do not convert_label_to_1hot to save storage. - for idx, (image_aug, label_aug) in enumerate(rand_crop_whole_ct( - image, label, res_s, out_s, apply_data_aug)): - save_to_tfrecord(image_aug, label_aug, idx, im_id, output_path, - convert_label_to_1hot=False) - - -def main(argv): - del argv - - output_path = FLAGS.output_path - res_s = FLAGS.resize_size - out_s = FLAGS.output_size - crop_liver_region = FLAGS.crop_liver_region - apply_data_aug = FLAGS.apply_data_aug - - for im_id in range(FLAGS.shard_start, 1000000, FLAGS.shard_stride): - image_path = FLAGS.image_file_pattern.format(im_id) - label_path = FLAGS.label_file_pattern.format(im_id) - if not tf.gfile.Exists(image_path): - logging.info("Reached the end. Image does not exist: %s. " - "Process finish.", image_path) - break - process_one_file(image_path, label_path, im_id, - output_path, res_s, out_s, - crop_liver_region, apply_data_aug) - - -if __name__ == "__main__": - app.run(main) diff --git a/official/vision/segmentation/convert_lits_nii_to_npy.py b/official/vision/segmentation/convert_lits_nii_to_npy.py deleted file mode 100644 index 12fc2d2d0..000000000 --- a/official/vision/segmentation/convert_lits_nii_to_npy.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -r"""Converts .nii files in LiTS dataset to .npy files. - -This script should be run just once before running convert_lits.py. - -The file is forked from: -https://github.com/tensorflow/tpu/blob/master/models/official/unet3d/data_preprocess/convert_lits_nii_to_npy.py -""" - -from __future__ import absolute_import -from __future__ import division -# from __future__ import google_type_annotations -from __future__ import print_function - -import glob -import multiprocessing -import os - -import nibabel as nib -import numpy as np - - -num_processes = 2 -input_path = "Downloads/LiTS/Train/" # where the .nii files are. -output_path = "Downloads/LiTS/Train_np/" # where you want to put the npy files. - - -def process_one_file(image_path): - """Convert one nii file to npy.""" - im_id = os.path.basename(image_path).split("volume-")[1].split(".nii")[0] - label_path = image_path.replace("volume-", "segmentation-") - - image = nib.load(image_path).get_data().astype(np.float32) - label = nib.load(label_path).get_data().astype(np.float32) - - print("image shape: {}, dtype: {}".format(image.shape, image.dtype)) - print("label shape: {}, dtype: {}".format(label.shape, label.dtype)) - - np.save(os.path.join(output_path, "volume-{}.npy".format(im_id)), image) - np.save(os.path.join(output_path, "segmentation-{}.npy".format(im_id)), label) - - -nii_dir = os.path.join(input_path, "volume-*") -p = multiprocessing.Pool(num_processes) -p.map(process_one_file, glob.glob(nii_dir)) diff --git a/official/vision/segmentation/unet_config.py b/official/vision/segmentation/unet_config.py deleted file mode 100644 index bfaf83adc..000000000 --- a/official/vision/segmentation/unet_config.py +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Config to train UNet.""" - -from __future__ import absolute_import -from __future__ import division -# from __future__ import google_type_annotations -from __future__ import print_function - -UNET_CONFIG = { - # Place holder for tpu configs. - 'tpu_config': {}, - 'model_dir': '', - 'training_file_pattern': None, - 'eval_file_pattern': None, - # The input files are GZip compressed and need decompression. - 'compressed_input': True, - 'dtype': 'bfloat16', - 'label_dtype': 'float32', - 'train_batch_size': 8, - 'eval_batch_size': 8, - 'predict_batch_size': 8, - 'train_epochs': 20, - 'train_steps': 1000, - 'eval_steps': 10, - 'num_steps_per_eval': 100, - 'min_eval_interval': 180, - 'eval_timeout': None, - 'optimizer': 'adam', - 'momentum': 0.9, - # Spatial dimension of input image. - 'input_image_size': [128, 128, 128], - # Number of channels of the input image. - 'num_channels': 1, - # Spatial partition dimensions. - 'input_partition_dims': None, - # Use deconvolution to upsample, otherwise upsampling. - 'deconvolution': True, - # Number of areas i need to segment - 'num_classes': 3, - # Number of filters used by the architecture - 'num_base_filters': 32, - # Depth of the network - 'depth': 4, - # Dropout values to use across the network - 'dropout_rate': 0.5, - # Number of levels that contribute to the output. - 'num_segmentation_levels': 2, - # Use batch norm. - 'use_batch_norm': True, - 'init_learning_rate': 0.1, - # learning rate decay steps. - 'lr_decay_steps': 100, - # learning rate decay rate. - 'lr_decay_rate': 0.5, - # Data format, 'channels_last' and 'channels_first' - 'data_format': 'channels_last', - # Use class index for training. Otherwise, use one-hot encoding. - 'use_index_label_in_train': False, - # e.g. softmax cross entropy, adaptive_dice32 - 'loss': 'adaptive_dice32', -} - -UNET_RESTRICTIONS = [] diff --git a/official/vision/segmentation/unet_data.py b/official/vision/segmentation/unet_data.py deleted file mode 100644 index b3d6ed29a..000000000 --- a/official/vision/segmentation/unet_data.py +++ /dev/null @@ -1,175 +0,0 @@ -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -r"""Defines input_fn of TF2 UNet-3D model.""" - -from __future__ import absolute_import -from __future__ import division -# from __future__ import google_type_annotations -from __future__ import print_function - -import functools - -import tensorflow as tf - - -class BaseInput(object): - """Input function for 3D Unet model.""" - - def __init__(self, file_pattern, params, is_training): - self._params = params - self._file_pattern = file_pattern - self._is_training = is_training - self._parser_fn = self.create_parser_fn(params) - if params.compressed_input: - self._dataset_fn = functools.partial( - tf.data.TFRecordDataset, compression_type='GZIP') - else: - self._dataset_fn = tf.data.TFRecordDataset - - def create_parser_fn(self, params): - """Create parse fn to extract tensors from tf.Example.""" - - def _parser(serialized_example): - """Parses a single tf.Example into image and label tensors.""" - features = tf.io.parse_example( - serialized=[serialized_example], - features={ - 'image/encoded': tf.io.VarLenFeature(dtype=tf.float32), - 'image/segmentation/mask': tf.io.VarLenFeature(dtype=tf.float32), - }) - - image = features['image/encoded'] - if isinstance(image, tf.SparseTensor): - image = tf.sparse.to_dense(image) - - gt_mask = features['image/segmentation/mask'] - if isinstance(gt_mask, tf.SparseTensor): - gt_mask = tf.sparse.to_dense(gt_mask) - - image_size, label_size = self.get_input_shapes(params) - image = tf.reshape(image, image_size) - gt_mask = tf.reshape(gt_mask, label_size) - - image = tf.cast(image, dtype=params.dtype) - gt_mask = tf.cast(gt_mask, dtype=params.dtype) - return image, gt_mask - - return _parser - - def get_input_shapes(self, params): - image_size = params.input_image_size + [params.num_channels] - label_size = params.input_image_size + [params.num_classes] - return image_size, label_size - - def __call__(self, input_pipeline_context=None): - """Generates features and labels for training or evaluation. - - This uses the input pipeline based approach using file name queue - to read data so that entire data is not loaded in memory. - - Args: - input_pipeline_context: Context used by distribution strategy to - shard dataset across workers. - - Returns: - tf.data.Dataset - """ - params = self._params - batch_size = ( - params.train_batch_size - if self._is_training else params.eval_batch_size) - - dataset = tf.data.Dataset.list_files( - self._file_pattern, shuffle=self._is_training) - - # Shard dataset when there are more than 1 workers in training. - if input_pipeline_context: - batch_size = input_pipeline_context.get_per_replica_batch_size(batch_size) - - if input_pipeline_context.num_input_pipelines > 1: - dataset = dataset.shard(input_pipeline_context.num_input_pipelines, - input_pipeline_context.input_pipeline_id) - - if self._is_training: - dataset = dataset.repeat() - - dataset = dataset.apply( - tf.data.experimental.parallel_interleave( - lambda file_name: self._dataset_fn(file_name).prefetch(1), - cycle_length=32, - sloppy=self._is_training)) - - if self._is_training: - dataset = dataset.shuffle(64) - - # Parses the fetched records to input tensors for model function. - dataset = dataset.map(self._parser_fn, tf.data.experimental.AUTOTUNE) - dataset = dataset.batch(batch_size, drop_remainder=True) - dataset = dataset.prefetch(tf.data.experimental.AUTOTUNE) - return dataset - - -class LiverInput(BaseInput): - """Input function of Liver Segmentation data set.""" - - def create_parser_fn(self, params): - """Create parse fn to extract tensors from tf.Example.""" - - def _decode_liver_example(serialized_example): - """Parses a single tf.Example into image and label tensors.""" - features = {} - - features['image/ct_image'] = tf.io.FixedLenFeature([], tf.string) - features['image/label'] = tf.io.FixedLenFeature([], tf.string) - - parsed = tf.io.parse_single_example( - serialized=serialized_example, features=features) - - # Here, assumes the `image` is normalized to [0, 1] of type float32 and - # the `label` is a binary matrix, whose last dimension is one_hot encoded - # labels. - # The dtype of `label` can be either float32 or int64. - image = tf.io.decode_raw(parsed['image/ct_image'], - tf.as_dtype(tf.float32)) - label = tf.io.decode_raw(parsed['image/label'], - tf.as_dtype(params.label_dtype)) - - image_size = params.input_image_size + [params.num_channels] - image = tf.reshape(image, image_size) - label_size = params.input_image_size + [params.num_classes] - label = tf.reshape(label, label_size) - - if self._is_training and params.use_index_label_in_train: - # Use class index for labels and remove the channel dim (#channels=1). - channel_dim = -1 - label = tf.argmax(input=label, axis=channel_dim, output_type=tf.int32) - - image = tf.cast(image, dtype=params.dtype) - label = tf.cast(label, dtype=params.dtype) - - # TPU doesn't support tf.int64 well, use tf.int32 directly. - if label.dtype == tf.int64: - label = tf.cast(label, dtype=tf.int32) - return image, label - - return _decode_liver_example - - def get_input_shapes(self, params): - image_size = params.input_image_size + [params.num_channels] - if self._is_training and params.use_index_label_in_train: - label_size = params.input_image_size - else: - label_size = params.input_image_size + [params.num_classes] - return image_size, label_size diff --git a/official/vision/segmentation/unet_main.py b/official/vision/segmentation/unet_main.py deleted file mode 100644 index 4ba95b1aa..000000000 --- a/official/vision/segmentation/unet_main.py +++ /dev/null @@ -1,350 +0,0 @@ -# Copyright 2020 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -r"""Training script for UNet-3D.""" - -from __future__ import absolute_import -from __future__ import division -# from __future__ import google_type_annotations -from __future__ import print_function - -import functools -import os - -from absl import app -from absl import flags -import numpy as np -import tensorflow as tf - -from official.modeling.hyperparams import params_dict -from official.utils import hyperparams_flags -from official.utils.misc import distribution_utils -from official.utils.misc import keras_utils -from official.vision.segmentation import unet_config -from official.vision.segmentation import unet_data -from official.vision.segmentation import unet_metrics -from official.vision.segmentation import unet_model as unet_model_lib - - -def define_unet3d_flags(): - """Defines flags for training 3D Unet.""" - hyperparams_flags.initialize_common_flags() - - flags.DEFINE_enum( - 'distribution_strategy', 'tpu', ['tpu', 'mirrored'], - 'Distribution Strategy type to use for training. `tpu` uses TPUStrategy ' - 'for running on TPUs, `mirrored` uses GPUs with single host.') - flags.DEFINE_integer( - 'steps_per_loop', 50, - 'Number of steps to execute in a loop for performance optimization.') - flags.DEFINE_integer('checkpoint_interval', 100, - 'Minimum step interval between two checkpoints.') - flags.DEFINE_integer('epochs', 10, 'Number of epochs to run training.') - flags.DEFINE_string( - 'gcp_project', - default=None, - help='Project name for the Cloud TPU-enabled project. If not specified, we ' - 'will attempt to automatically detect the GCE project from metadata.') - flags.DEFINE_string( - 'eval_checkpoint_dir', - default=None, - help='Directory for reading checkpoint file when `mode` == `eval`.') - flags.DEFINE_multi_integer( - 'input_partition_dims', [1], - 'A list that describes the partition dims for all the tensors.') - flags.DEFINE_string( - 'mode', 'train', 'Mode to run: train or eval or train_and_eval ' - '(default: train)') - flags.DEFINE_string('training_file_pattern', None, - 'Location of the train data.') - flags.DEFINE_string('eval_file_pattern', None, 'Location of ther eval data') - flags.DEFINE_float('lr_init_value', 0.0001, 'Initial learning rate.') - flags.DEFINE_float('lr_decay_rate', 0.9, 'Learning rate decay rate.') - flags.DEFINE_integer('lr_decay_steps', 100, 'Learning rate decay steps.') - - -def save_params(params): - """Save parameters to config files if model_dir is defined.""" - model_dir = params.model_dir - assert model_dir is not None - if not tf.io.gfile.exists(model_dir): - tf.io.gfile.makedirs(model_dir) - file_name = os.path.join(model_dir, 'params.yaml') - params_dict.save_params_dict_to_yaml(params, file_name) - - -def extract_params(flags_obj): - """Extract configuration parameters for training and evaluation.""" - params = params_dict.ParamsDict(unet_config.UNET_CONFIG, - unet_config.UNET_RESTRICTIONS) - - params = params_dict.override_params_dict( - params, flags_obj.config_file, is_strict=False) - - if flags_obj.training_file_pattern: - params.override({'training_file_pattern': flags_obj.training_file_pattern}, - is_strict=True) - if flags_obj.eval_file_pattern: - params.override({'eval_file_pattern': flags_obj.eval_file_pattern}, - is_strict=True) - - train_epoch_steps = params.train_item_count // params.train_batch_size - eval_epoch_steps = params.eval_item_count // params.eval_batch_size - - params.override( - { - 'model_dir': flags_obj.model_dir, - 'eval_checkpoint_dir': flags_obj.eval_checkpoint_dir, - 'mode': flags_obj.mode, - 'distribution_strategy': flags_obj.distribution_strategy, - 'tpu': flags_obj.tpu, - 'num_gpus': flags_obj.num_gpus, - 'init_learning_rate': flags_obj.lr_init_value, - 'lr_decay_rate': flags_obj.lr_decay_rate, - 'lr_decay_steps': train_epoch_steps, - 'train_epoch_steps': train_epoch_steps, - 'eval_epoch_steps': eval_epoch_steps, - 'steps_per_loop': flags_obj.steps_per_loop, - 'epochs': flags_obj.epochs, - 'checkpoint_interval': flags_obj.checkpoint_interval, - }, - is_strict=False) - - params.validate() - params.lock() - return params - - -def unet3d_callbacks(params, checkpoint_manager=None): - """Custom callbacks during training.""" - tensorboard_callback = tf.keras.callbacks.TensorBoard( - log_dir=params.model_dir) - - if checkpoint_manager: - checkpoint_callback = keras_utils.SimpleCheckpoint(checkpoint_manager) - return [tensorboard_callback, checkpoint_callback] - else: - return [tensorboard_callback] - - -def get_computation_shape_for_model_parallelism(input_partition_dims): - """Return computation shape to be used for TPUStrategy spatial partition.""" - num_logical_devices = np.prod(input_partition_dims) - if num_logical_devices == 1: - return [1, 1, 1, 1] - if num_logical_devices == 2: - return [1, 1, 1, 2] - if num_logical_devices == 4: - return [1, 2, 1, 2] - if num_logical_devices == 8: - return [2, 2, 1, 2] - if num_logical_devices == 16: - return [4, 2, 1, 2] - - raise ValueError('Unsupported number of spatial partition configuration.') - - -def create_distribution_strategy(params): - """Creates distribution strategy to use for computation.""" - - if params.input_partition_dims is not None: - if params.distribution_strategy != 'tpu': - raise ValueError('Spatial partitioning is only supported ' - 'for TPUStrategy.') - - # When `input_partition_dims` is specified create custom TPUStrategy - # instance with computation shape for model parallelism. - resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu=params.tpu) - if params.tpu not in ('', 'local'): - tf.config.experimental_connect_to_cluster(resolver) - - topology = tf.tpu.experimental.initialize_tpu_system(resolver) - num_replicas = resolver.get_tpu_system_metadata().num_cores // np.prod( - params.input_partition_dims) - device_assignment = tf.tpu.experimental.DeviceAssignment.build( - topology, - num_replicas=num_replicas, - computation_shape=get_computation_shape_for_model_parallelism( - params.input_partition_dims)) - return tf.distribute.experimental.TPUStrategy( - resolver, device_assignment=device_assignment) - - return distribution_utils.get_distribution_strategy( - distribution_strategy=params.distribution_strategy, - tpu_address=params.tpu, - num_gpus=params.num_gpus) - - -def get_train_dataset(params, ctx=None): - """Returns training dataset.""" - return unet_data.LiverInput( - params.training_file_pattern, params, is_training=True)( - ctx) - - -def get_eval_dataset(params, ctx=None): - """Returns evaluation dataset.""" - return unet_data.LiverInput( - params.training_file_pattern, params, is_training=False)( - ctx) - - -def expand_1d(data): - """Expands 1-dimensional `Tensor`s into 2-dimensional `Tensor`s.""" - - def _expand_single_1d_tensor(t): - if (isinstance(t, tf.Tensor) and isinstance(t.shape, tf.TensorShape) and - t.shape.rank == 1): - return tf.expand_dims(t, axis=-1) - return t - - return tf.nest.map_structure(_expand_single_1d_tensor, data) - - -def train_step(train_fn, input_partition_dims, data): - """The logic for one training step with spatial partitioning.""" - # Keras expects rank 2 inputs. As so, expand single rank inputs. - data = expand_1d(data) - x, y, sample_weight = tf.keras.utils.unpack_x_y_sample_weight(data) - - if input_partition_dims: - strategy = tf.distribute.get_strategy() - x = strategy.experimental_split_to_logical_devices(x, input_partition_dims) - y = strategy.experimental_split_to_logical_devices(y, input_partition_dims) - - partitioned_data = tf.keras.utils.pack_x_y_sample_weight(x, y, sample_weight) - return train_fn(partitioned_data) - - -def test_step(test_fn, input_partition_dims, data): - """The logic for one testing step with spatial partitioning.""" - # Keras expects rank 2 inputs. As so, expand single rank inputs. - data = expand_1d(data) - x, y, sample_weight = tf.keras.utils.unpack_x_y_sample_weight(data) - - if input_partition_dims: - strategy = tf.distribute.get_strategy() - x = strategy.experimental_split_to_logical_devices(x, input_partition_dims) - y = strategy.experimental_split_to_logical_devices(y, input_partition_dims) - - partitioned_data = tf.keras.utils.pack_x_y_sample_weight(x, y, sample_weight) - return test_fn(partitioned_data) - - -def train(params, strategy, unet_model, train_input_fn, eval_input_fn): - """Trains 3D Unet model.""" - assert tf.distribute.has_strategy() - - # Override Keras Model's train_step() and test_step() function so - # that inputs are spatially partitioned. - # Note that is `predict()` API is used, then `predict_step()` should also - # be overriden. - unet_model.train_step = functools.partial(train_step, unet_model.train_step, - params.input_partition_dims) - unet_model.test_step = functools.partial(test_step, unet_model.test_step, - params.input_partition_dims) - - optimizer = unet_model_lib.create_optimizer(params.init_learning_rate, params) - loss_fn = unet_metrics.get_loss_fn(params.mode, params) - unet_model.compile( - loss=loss_fn, - optimizer=optimizer, - metrics=[unet_metrics.metric_accuracy], - experimental_steps_per_execution=params.steps_per_loop) - - train_ds = strategy.experimental_distribute_datasets_from_function( - train_input_fn) - eval_ds = strategy.experimental_distribute_datasets_from_function( - eval_input_fn) - - checkpoint = tf.train.Checkpoint(model=unet_model) - - train_epoch_steps = params.train_item_count // params.train_batch_size - eval_epoch_steps = params.eval_item_count // params.eval_batch_size - - checkpoint_manager = tf.train.CheckpointManager( - checkpoint, - directory=params.model_dir, - max_to_keep=10, - step_counter=unet_model.optimizer.iterations, - checkpoint_interval=params.checkpoint_interval) - checkpoint_manager.restore_or_initialize() - - train_result = unet_model.fit( - x=train_ds, - epochs=params.epochs, - steps_per_epoch=train_epoch_steps, - validation_data=eval_ds, - validation_steps=eval_epoch_steps, - callbacks=unet3d_callbacks(params, checkpoint_manager)) - return train_result - - -def evaluate(params, strategy, unet_model, input_fn): - """Reads from checkpoint and evaluate 3D Unet model.""" - assert tf.distribute.has_strategy() - - unet_model.compile( - metrics=[unet_metrics.metric_accuracy], - experimental_steps_per_execution=params.steps_per_loop) - - # Override test_step() function so that inputs are spatially partitioned. - unet_model.test_step = functools.partial(test_step, unet_model.test_step, - params.input_partition_dims) - - # Load checkpoint for evaluation. - checkpoint = tf.train.Checkpoint(model=unet_model) - checkpoint_path = tf.train.latest_checkpoint(params.eval_checkpoint_dir) - status = checkpoint.restore(checkpoint_path) - status.assert_existing_objects_matched() - - eval_ds = strategy.experimental_distribute_datasets_from_function(input_fn) - eval_epoch_steps = params.eval_item_count // params.eval_batch_size - - eval_result = unet_model.evaluate( - x=eval_ds, steps=eval_epoch_steps, callbacks=unet3d_callbacks(params)) - return eval_result - - -def main(_): - params = extract_params(flags.FLAGS) - assert params.mode in {'train', 'eval'}, 'only support train and eval' - save_params(params) - - input_dtype = params.dtype - if input_dtype == 'float16' or input_dtype == 'bfloat16': - policy = tf.keras.mixed_precision.experimental.Policy( - 'mixed_bfloat16' if input_dtype == 'bfloat16' else 'mixed_float16') - tf.keras.mixed_precision.experimental.set_policy(policy) - - strategy = create_distribution_strategy(params) - with strategy.scope(): - unet_model = unet_model_lib.build_unet_model(params) - - if params.mode == 'train': - train(params, strategy, unet_model, - functools.partial(get_train_dataset, params), - functools.partial(get_eval_dataset, params)) - - elif params.mode == 'eval': - evaluate(params, strategy, unet_model, - functools.partial(get_eval_dataset, params)) - - else: - raise Exception('Only `train` mode and `eval` mode are supported.') - - -if __name__ == '__main__': - define_unet3d_flags() - app.run(main) diff --git a/official/vision/segmentation/unet_main_test.py b/official/vision/segmentation/unet_main_test.py deleted file mode 100644 index 765d6d3e0..000000000 --- a/official/vision/segmentation/unet_main_test.py +++ /dev/null @@ -1,248 +0,0 @@ -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import os -import tempfile - -from absl import flags -from absl.testing import flagsaver -from absl.testing import parameterized -import numpy as np -import tensorflow as tf - -from tensorflow.contrib import cluster_resolver as contrib_cluster_resolver -from tensorflow.contrib.tpu.python.tpu import device_assignment as device_lib -from tensorflow.python.distribute import tpu_strategy as tpu_strategy_lib -from tensorflow.python.tpu import tpu_strategy_util -from official.modeling.hyperparams import params_dict -from official.vision.segmentation import unet_config -from official.vision.segmentation import unet_main as unet_main_lib -from official.vision.segmentation import unet_metrics -from official.vision.segmentation import unet_model as unet_model_lib - -FLAGS = flags.FLAGS - - -def create_fake_input_fn(params, - features_size, - labels_size, - use_bfloat16=False): - """Returns fake input function for testing.""" - - def fake_data_input_fn(unused_ctx=None): - """An input function for generating fake data.""" - batch_size = params.train_batch_size - features = np.random.rand(64, *features_size) - labels = np.random.randint(2, size=[64] + labels_size) - - # Convert the inputs to a Dataset. - dataset = tf.data.Dataset.from_tensor_slices((features, labels)) - - def _assign_dtype(features, labels): - if use_bfloat16: - features = tf.cast(features, tf.bfloat16) - labels = tf.cast(labels, tf.bfloat16) - else: - features = tf.cast(features, tf.float32) - labels = tf.cast(labels, tf.float32) - return features, labels - - # Shuffle, repeat, and batch the examples. - dataset = dataset.map(_assign_dtype) - dataset = dataset.shuffle(64).repeat() - dataset = dataset.batch(batch_size, drop_remainder=True) - dataset = dataset.prefetch(tf.data.experimental.AUTOTUNE) - - # Return the dataset. - return dataset - - return fake_data_input_fn - - -class UnetMainTest(parameterized.TestCase, tf.test.TestCase): - - def setUp(self): - super(UnetMainTest, self).setUp() - self._model_dir = os.path.join(tempfile.mkdtemp(), 'model_dir') - tf.io.gfile.makedirs(self._model_dir) - - def tearDown(self): - tf.io.gfile.rmtree(self._model_dir) - super(UnetMainTest, self).tearDown() - - @flagsaver.flagsaver - def testUnet3DModel(self): - FLAGS.tpu = '' - FLAGS.mode = 'train' - params = params_dict.ParamsDict(unet_config.UNET_CONFIG, - unet_config.UNET_RESTRICTIONS) - params.override( - { - 'input_image_size': [64, 64, 64], - 'train_item_count': 4, - 'eval_item_count': 4, - 'train_batch_size': 2, - 'eval_batch_size': 2, - 'batch_size': 2, - 'num_base_filters': 16, - 'dtype': 'bfloat16', - 'depth': 1, - 'train_steps': 2, - 'eval_steps': 2, - 'mode': FLAGS.mode, - 'tpu': FLAGS.tpu, - 'num_gpus': 0, - 'checkpoint_interval': 1, - 'use_tpu': True, - 'input_partition_dims': None, - }, - is_strict=False) - params.validate() - params.lock() - - image_size = params.input_image_size + [params.num_channels] - label_size = params.input_image_size + [params.num_classes] - input_fn = create_fake_input_fn( - params, features_size=image_size, labels_size=label_size) - - resolver = contrib_cluster_resolver.TPUClusterResolver(tpu=params.tpu) - topology = tpu_strategy_util.initialize_tpu_system(resolver) - device_assignment = None - - if params.input_partition_dims is not None: - assert np.prod( - params.input_partition_dims) == 2, 'invalid unit test configuration' - computation_shape = [1, 1, 1, 2] - partition_dimension = params.input_partition_dims - num_replicas = resolver.get_tpu_system_metadata().num_cores // np.prod( - partition_dimension) - device_assignment = device_lib.device_assignment( - topology, - computation_shape=computation_shape, - num_replicas=num_replicas) - - strategy = tpu_strategy_lib.TPUStrategy( - resolver, device_assignment=device_assignment) - - with strategy.scope(): - model = unet_model_lib.build_unet_model(params) - optimizer = unet_model_lib.create_optimizer(params.init_learning_rate, - params) - loss_fn = unet_metrics.get_loss_fn(params.mode, params) - model.compile(loss=loss_fn, optimizer=optimizer, metrics=[loss_fn]) - - eval_ds = input_fn() - iterator = iter(eval_ds) - - image, _ = next(iterator) - logits = model(image, training=False) - self.assertEqual(logits.shape[1:], params.input_image_size + [3]) - - @parameterized.parameters( - { - 'use_mlir': True, - 'dtype': 'bfloat16', - 'input_partition_dims': None, - }, { - 'use_mlir': False, - 'dtype': 'bfloat16', - 'input_partition_dims': None, - }, { - 'use_mlir': True, - 'dtype': 'bfloat16', - 'input_partition_dims': None, - }, { - 'use_mlir': False, - 'dtype': 'bfloat16', - 'input_partition_dims': None, - }, { - 'use_mlir': True, - 'dtype': 'bfloat16', - 'input_partition_dims': [1, 2, 1, 1, 1], - }, { - 'use_mlir': False, - 'dtype': 'bfloat16', - 'input_partition_dims': [1, 2, 1, 1, 1], - }, { - 'use_mlir': True, - 'dtype': 'bfloat16', - 'input_partition_dims': [1, 2, 1, 1, 1], - }, { - 'use_mlir': False, - 'dtype': 'bfloat16', - 'input_partition_dims': [1, 2, 1, 1, 1] - }) - @flagsaver.flagsaver - def testUnetTrain(self, use_mlir, dtype, input_partition_dims): - FLAGS.tpu = '' - FLAGS.mode = 'train' - - if use_mlir: - tf.config.experimental.enable_mlir_bridge() - - params = params_dict.ParamsDict(unet_config.UNET_CONFIG, - unet_config.UNET_RESTRICTIONS) - params.override( - { - 'model_dir': self._model_dir, - 'input_image_size': [8, 8, 8], - 'train_item_count': 2, - 'eval_item_count': 2, - 'train_batch_size': 2, - 'eval_batch_size': 2, - 'batch_size': 2, - 'num_base_filters': 1, - 'dtype': 'bfloat16', - 'depth': 1, - 'epochs': 1, - 'checkpoint_interval': 1, - 'train_steps': 1, - 'eval_steps': 1, - 'mode': FLAGS.mode, - 'tpu': FLAGS.tpu, - 'use_tpu': True, - 'num_gpus': 0, - 'distribution_strategy': 'tpu', - 'steps_per_loop': 1, - 'input_partition_dims': input_partition_dims, - }, - is_strict=False) - params.validate() - params.lock() - - image_size = params.input_image_size + [params.num_channels] - label_size = params.input_image_size + [params.num_classes] - input_fn = create_fake_input_fn( - params, features_size=image_size, labels_size=label_size) - - input_dtype = params.dtype - if input_dtype == 'float16' or input_dtype == 'bfloat16': - policy = tf.keras.mixed_precision.experimental.Policy( - 'mixed_bfloat16' if input_dtype == 'bfloat16' else 'mixed_float16') - tf.keras.mixed_precision.experimental.set_policy(policy) - - strategy = unet_main_lib.create_distribution_strategy(params) - with strategy.scope(): - unet_model = unet_model_lib.build_unet_model(params) - unet_main_lib.train(params, strategy, unet_model, input_fn, input_fn) - - -if __name__ == '__main__': - unet_main_lib.define_unet3d_flags() - tf.test.main() diff --git a/official/vision/segmentation/unet_metrics.py b/official/vision/segmentation/unet_metrics.py deleted file mode 100644 index d7eb225aa..000000000 --- a/official/vision/segmentation/unet_metrics.py +++ /dev/null @@ -1,279 +0,0 @@ -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Define metrics for the UNet 3D Model.""" - -from __future__ import absolute_import -from __future__ import division -# from __future__ import google_type_annotations -from __future__ import print_function - -import tensorflow as tf - - -def dice(y_true, y_pred, axis=(1, 2, 3, 4)): - """DICE coefficient. - - Taha AA, Hanbury A. Metrics for evaluating 3D medical image segmentation: - analysis, selection, and tool. BMC Med Imaging. 2015;15:29. Published - 2015 - Aug 12. doi:10.1186/s12880-015-0068-x - - Implemented according to - https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4533825/#Equ6 - - Args: - y_true: the ground truth matrix. Shape [batch_size, x, y, z, num_classes]. - y_pred: the prediction matrix. Shape [batch_size, x, y, z, num_classes]. - axis: axises of features. - - Returns: - DICE coefficient. - """ - y_true = tf.cast(y_true, y_pred.dtype) - eps = tf.keras.backend.epsilon() - - intersection = tf.reduce_sum(input_tensor=y_true * y_pred, axis=axis) - summation = tf.reduce_sum( - input_tensor=y_true, axis=axis) + tf.reduce_sum( - input_tensor=y_pred, axis=axis) - return (2 * intersection + eps) / (summation + eps) - - -def generalized_dice(y_true, y_pred, axis=(1, 2, 3)): - """Generalized Dice coefficient, for multi-class predictions. - - For output of a multi-class model, where the shape of the output is - (batch, x, y, z, n_classes), the axis argument should be (1, 2, 3). - - Args: - y_true: the ground truth matrix. Shape [batch_size, x, y, z, num_classes]. - y_pred: the prediction matrix. Shape [batch_size, x, y, z, num_classes]. - axis: axises of features. - - Returns: - DICE coefficient. - """ - y_true = tf.cast(y_true, y_pred.dtype) - - if y_true.get_shape().ndims < 2 or y_pred.get_shape().ndims < 2: - raise ValueError('y_true and y_pred must be at least rank 2.') - - epsilon = tf.keras.backend.epsilon() - w = tf.math.reciprocal(tf.square(tf.reduce_sum(y_true, axis=axis)) + epsilon) - num = 2 * tf.reduce_sum( - w * tf.reduce_sum(y_true * y_pred, axis=axis), axis=-1) - den = tf.reduce_sum(w * tf.reduce_sum(y_true + y_pred, axis=axis), axis=-1) - return (num + epsilon) / (den + epsilon) - - -def hamming(y_true, y_pred, axis=(1, 2, 3)): - """Hamming distance. - - Args: - y_true: the ground truth matrix. Shape [batch_size, x, y, z]. - y_pred: the prediction matrix. Shape [batch_size, x, y, z]. - axis: a list, axises of the feature dimensions. - - Returns: - Hamming distance value. - """ - y_true = tf.cast(y_true, y_pred.dtype) - return tf.reduce_mean(input_tensor=tf.not_equal(y_pred, y_true), axis=axis) - - -def jaccard(y_true, y_pred, axis=(1, 2, 3, 4)): - """Jaccard Similarity. - - Taha AA, Hanbury A. Metrics for evaluating 3D medical image segmentation: - analysis, selection, and tool. BMC Med Imaging. 2015;15:29. Published - 2015 - Aug 12. doi:10.1186/s12880-015-0068-x - - Implemented according to - https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4533825/#Equ7 - - Args: - y_true: the ground truth matrix. Shape [batch_size, x, y, z, num_classes]. - y_pred: the prediction matrix. Shape [batch_size, x, y, z, num_classes]. - axis: axises of features. - - Returns: - Jaccard similarity. - """ - y_true = tf.cast(y_true, y_pred.dtype) - eps = tf.keras.backend.epsilon() - - intersection = tf.reduce_sum(input_tensor=y_true * y_pred, axis=axis) - union = tf.reduce_sum(y_true, axis=axis) + tf.reduce_sum(y_pred, axis=axis) - return (intersection + eps) / (union - intersection + eps) - - -def tversky(y_true, y_pred, axis=(1, 2, 3), alpha=0.3, beta=0.7): - """Tversky similarity. - - Args: - y_true: the ground truth matrix. Shape [batch_size, x, y, z, num_classes]. - y_pred: the prediction matrix. Shape [batch_size, x, y, z, num_classes]. - axis: axises of spatial dimensions. - alpha: weight of the prediction. - beta: weight of the groundtruth. - - Returns: - Tversky similarity coefficient. - """ - y_true = tf.cast(y_true, y_pred.dtype) - - if y_true.get_shape().ndims < 2 or y_pred.get_shape().ndims < 2: - raise ValueError('y_true and y_pred must be at least rank 2.') - - eps = tf.keras.backend.epsilon() - - num = tf.reduce_sum(input_tensor=y_pred * y_true, axis=axis) - den = ( - num + alpha * tf.reduce_sum(y_pred * (1 - y_true), axis=axis) + - beta * tf.reduce_sum((1 - y_pred) * y_true, axis=axis)) - # Sum over classes. - return tf.reduce_sum(input_tensor=(num + eps) / (den + eps), axis=-1) - - -def adaptive_dice32(y_true, y_pred, data_format='channels_last'): - """Adaptive dice metric. - - Args: - y_true: the ground truth matrix. Shape [batch_size, x, y, z, num_classes]. - y_pred: the prediction matrix. Shape [batch_size, x, y, z, num_classes]. - data_format: channel last of channel first. - - Returns: - Adaptive dice value. - """ - epsilon = 10**-7 - y_true = tf.cast(y_true, dtype=y_pred.dtype) - # Determine axes to pass to tf.reduce_sum - if data_format == 'channels_last': - ndim = len(y_pred.shape) - reduction_axes = list(range(ndim - 1)) - else: - reduction_axes = 1 - - # Calculate intersections and unions per class - intersections = tf.reduce_sum(y_true * y_pred, axis=reduction_axes) - unions = tf.reduce_sum(y_true + y_pred, axis=reduction_axes) - - # Calculate Dice scores per class - dice_scores = 2.0 * (intersections + epsilon) / (unions + epsilon) - - # Calculate weights based on Dice scores - weights = tf.exp(-1.0 * dice_scores) - - # Multiply weights by corresponding scores and get sum - weighted_dice = tf.reduce_sum(weights * dice_scores) - - # Calculate normalization factor - norm_factor = tf.size(input=dice_scores, out_type=tf.float32) * tf.exp(-1.0) - - weighted_dice = tf.cast(weighted_dice, dtype=tf.float32) - - # Return 1 - adaptive Dice score - return 1 - (weighted_dice / norm_factor) - - -def assert_shape_equal(pred_shape, label_shape): - """Asserts that `pred_shape` and `label_shape` is equal.""" - assert (label_shape == pred_shape - ), 'pred. shape {} is not equal to label shape {}'.format( - label_shape, pred_shape) - - -def get_loss_fn(mode, params): - """Return loss_fn for unet training. - - Args: - mode: training or eval. This is a legacy parameter from TF1. - params: unet configuration parameter. - - Returns: - loss_fn. - """ - - def loss_fn(y_true, y_pred): - """Returns scalar loss from labels and netowrk outputs.""" - loss = None - label_shape = y_true.get_shape().as_list() - pred_shape = y_pred.get_shape().as_list() - assert_shape_equal(label_shape, pred_shape) - if params.loss == 'adaptive_dice32': - loss = adaptive_dice32(y_true, y_pred) - elif params.loss == 'cross_entropy': - if mode == tf.estimator.ModeKeys.TRAIN and params.use_index_label_in_train: - labels_idx = tf.cast(y_true, dtype=tf.int32) - else: - # Use one-hot label representation, convert to label index. - labels_idx = tf.argmax(input=y_true, axis=-1, output_type=tf.int32) - y_pred = tf.cast(y_pred, dtype=tf.float32) - loss = tf.keras.losses.sparse_categorical_crossentropy( - labels_idx, y_pred, from_logits=False) - else: - raise Exception('Unexpected loss type') - return loss - - return loss_fn - - -def metric_accuracy(labels, predictions): - """Returns accuracy metric of model outputs. - - Args: - labels: ground truth tensor (labels). - predictions: network output (logits) - - Returns: - metric_fn. - """ - if labels.dtype == tf.bfloat16: - labels = tf.cast(labels, tf.float32) - if predictions.dtype == tf.bfloat16: - predictions = tf.cast(predictions, tf.float32) - return tf.keras.backend.mean( - tf.keras.backend.equal( - tf.argmax(input=labels, axis=-1), - tf.argmax(input=predictions, axis=-1))) - - -def metric_ce(labels, predictions): - """Returns categorical crossentropy given outputs and labels. - - Args: - labels: ground truth tensor (labels). - predictions: network output (logits) - - Returns: - metric_fn. - """ - if labels.dtype == tf.bfloat16: - labels = tf.cast(labels, tf.float32) - if predictions.dtype == tf.bfloat16: - predictions = tf.cast(predictions, tf.float32) - return tf.keras.losses.categorical_crossentropy( - labels, predictions, from_logits=False) - - -def metric_dice(labels, predictions): - """Returns adaptive dice coefficient.""" - if labels.dtype == tf.bfloat16: - labels = tf.cast(labels, tf.float32) - if predictions.dtype == tf.bfloat16: - predictions = tf.cast(predictions, tf.float32) - return adaptive_dice32(labels, predictions) diff --git a/official/vision/segmentation/unet_model.py b/official/vision/segmentation/unet_model.py deleted file mode 100644 index 9d13fc9eb..000000000 --- a/official/vision/segmentation/unet_model.py +++ /dev/null @@ -1,238 +0,0 @@ -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Model definition for the TF2 Keras UNet 3D Model.""" - -from __future__ import absolute_import -from __future__ import division -# from __future__ import google_type_annotations -from __future__ import print_function - -import tensorflow as tf - - -def create_optimizer(init_learning_rate, params): - """Creates optimizer for training.""" - learning_rate = tf.keras.optimizers.schedules.ExponentialDecay( - initial_learning_rate=init_learning_rate, - decay_steps=params.lr_decay_steps, - decay_rate=params.lr_decay_rate) - - # TODO(hongjunchoi): Provide alternative optimizer options depending on model - # config parameters. - optimizer = tf.keras.optimizers.Adam(learning_rate) - return optimizer - - -def create_convolution_block(input_layer, - n_filters, - batch_normalization=False, - kernel=(3, 3, 3), - activation=tf.nn.relu, - padding='SAME', - strides=(1, 1, 1), - data_format='channels_last', - instance_normalization=False): - """UNet convolution block. - - Args: - input_layer: tf.Tensor, the input tensor. - n_filters: integer, the number of the output channels of the convolution. - batch_normalization: boolean, use batch normalization after the convolution. - kernel: kernel size of the convolution. - activation: Tensorflow activation layer to use. (default is 'relu') - padding: padding type of the convolution. - strides: strides of the convolution. - data_format: data format of the convolution. One of 'channels_first' or - 'channels_last'. - instance_normalization: use Instance normalization. Exclusive with batch - normalization. - - Returns: - The Tensor after apply the convolution block to the input. - """ - assert instance_normalization == 0, 'TF 2.0 does not support inst. norm.' - layer = tf.keras.layers.Conv3D( - filters=n_filters, - kernel_size=kernel, - strides=strides, - padding=padding, - data_format=data_format, - activation=None, - )( - inputs=input_layer) - if batch_normalization: - layer = tf.keras.layers.BatchNormalization(axis=1)(inputs=layer) - return activation(layer) - - -def apply_up_convolution(inputs, - num_filters, - pool_size, - kernel_size=(2, 2, 2), - strides=(2, 2, 2), - deconvolution=False): - """Apply up convolution on inputs. - - Args: - inputs: input feature tensor. - num_filters: number of deconvolution output feature channels. - pool_size: pool size of the up-scaling. - kernel_size: kernel size of the deconvolution. - strides: strides of the deconvolution. - deconvolution: Use deconvolution or upsampling. - - Returns: - The tensor of the up-scaled features. - """ - if deconvolution: - return tf.keras.layers.Conv3DTranspose( - filters=num_filters, kernel_size=kernel_size, strides=strides)( - inputs=inputs) - else: - return tf.keras.layers.UpSampling3D(size=pool_size)(inputs) - - -def unet3d_base(input_layer, - pool_size=(2, 2, 2), - n_labels=1, - deconvolution=False, - depth=4, - n_base_filters=32, - batch_normalization=False, - data_format='channels_last'): - """Builds the 3D UNet Tensorflow model and return the last layer logits. - - Args: - input_layer: the input Tensor. - pool_size: Pool size for the max pooling operations. - n_labels: Number of binary labels that the model is learning. - deconvolution: If set to True, will use transpose convolution(deconvolution) - instead of up-sampling. This increases the amount memory required during - training. - depth: indicates the depth of the U-shape for the model. The greater the - depth, the more max pooling layers will be added to the model. Lowering - the depth may reduce the amount of memory required for training. - n_base_filters: The number of filters that the first layer in the - convolution network will have. Following layers will contain a multiple of - this number. Lowering this number will likely reduce the amount of memory - required to train the model. - batch_normalization: boolean. True for use batch normalization after - convolution and before activation. - data_format: string, channel_last (default) or channel_first - - Returns: - The last layer logits of 3D UNet. - """ - levels = [] - current_layer = input_layer - if data_format == 'channels_last': - channel_dim = -1 - else: - channel_dim = 1 - - # add levels with max pooling - for layer_depth in range(depth): - layer1 = create_convolution_block( - input_layer=current_layer, - n_filters=n_base_filters * (2**layer_depth), - batch_normalization=batch_normalization, - kernel=(3, 3, 3), - activation=tf.nn.relu, - padding='SAME', - strides=(1, 1, 1), - data_format=data_format, - instance_normalization=False) - layer2 = create_convolution_block( - input_layer=layer1, - n_filters=n_base_filters * (2**layer_depth) * 2, - batch_normalization=batch_normalization, - kernel=(3, 3, 3), - activation=tf.nn.relu, - padding='SAME', - strides=(1, 1, 1), - data_format=data_format, - instance_normalization=False) - if layer_depth < depth - 1: - current_layer = tf.keras.layers.MaxPool3D( - pool_size=pool_size, - strides=(2, 2, 2), - padding='VALID', - data_format=data_format)( - inputs=layer2) - levels.append([layer1, layer2, current_layer]) - else: - current_layer = layer2 - levels.append([layer1, layer2]) - - # add levels with up-convolution or up-sampling - for layer_depth in range(depth - 2, -1, -1): - up_convolution = apply_up_convolution( - current_layer, - pool_size=pool_size, - deconvolution=deconvolution, - num_filters=current_layer.get_shape().as_list()[channel_dim]) - concat = tf.concat([up_convolution, levels[layer_depth][1]], - axis=channel_dim) - current_layer = create_convolution_block( - n_filters=levels[layer_depth][1].get_shape().as_list()[channel_dim], - input_layer=concat, - batch_normalization=batch_normalization, - kernel=(3, 3, 3), - activation=tf.nn.relu, - padding='SAME', - strides=(1, 1, 1), - data_format=data_format, - instance_normalization=False) - current_layer = create_convolution_block( - n_filters=levels[layer_depth][1].get_shape().as_list()[channel_dim], - input_layer=current_layer, - batch_normalization=batch_normalization, - kernel=(3, 3, 3), - activation=tf.nn.relu, - padding='SAME', - strides=(1, 1, 1), - data_format=data_format, - instance_normalization=False) - - final_convolution = tf.keras.layers.Conv3D( - filters=n_labels, - kernel_size=(1, 1, 1), - padding='VALID', - data_format=data_format, - activation=None)( - current_layer) - return final_convolution - - -def build_unet_model(params): - """Builds the unet model, optimizer included.""" - input_shape = params.input_image_size + [1] - input_layer = tf.keras.layers.Input(shape=input_shape) - - logits = unet3d_base( - input_layer, - pool_size=(2, 2, 2), - n_labels=params.num_classes, - deconvolution=params.deconvolution, - depth=params.depth, - n_base_filters=params.num_base_filters, - batch_normalization=params.use_batch_norm, - data_format=params.data_format) - - # Set output of softmax to float32 to avoid potential numerical overflow. - predictions = tf.keras.layers.Softmax(dtype='float32')(logits) - model = tf.keras.models.Model(inputs=input_layer, outputs=predictions) - model.optimizer = create_optimizer(params.init_learning_rate, params) - return model -- GitLab From a64bc1bd8fca3434327ca82d9271a467aecae2c0 Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Wed, 24 Jun 2020 17:26:34 -0700 Subject: [PATCH 60/79] Introducing SpineNet backbone to TF2. PiperOrigin-RevId: 318174007 --- official/README.md | 6 +- official/vision/detection/README.md | 34 -- .../vision/detection/configs/base_config.py | 7 +- .../modeling/architecture/factory.py | 6 +- .../modeling/architecture/nn_blocks.py | 318 ----------- .../modeling/architecture/spinenet.py | 503 ------------------ 6 files changed, 4 insertions(+), 870 deletions(-) delete mode 100644 official/vision/detection/modeling/architecture/nn_blocks.py delete mode 100644 official/vision/detection/modeling/architecture/spinenet.py diff --git a/official/README.md b/official/README.md index d0e2ef81b..2b3f2dd76 100644 --- a/official/README.md +++ b/official/README.md @@ -19,10 +19,9 @@ In the near future, we will add: * State-of-the-art language understanding models: More members in Transformer family -* State-of-the-art image classification models: +* Start-of-the-art image classification models: EfficientNet, MnasNet, and variants -* State-of-the-art objection detection and instance segmentation models: - RetinaNet, Mask R-CNN, SpineNet, and variants +* A set of excellent objection detection models. ## Table of Contents @@ -53,7 +52,6 @@ In the near future, we will add: | [RetinaNet](vision/detection) | [Focal Loss for Dense Object Detection](https://arxiv.org/abs/1708.02002) | | [Mask R-CNN](vision/detection) | [Mask R-CNN](https://arxiv.org/abs/1703.06870) | | [ShapeMask](vision/detection) | [ShapeMask: Learning to Segment Novel Objects by Refining Shape Priors](https://arxiv.org/abs/1904.03239) | -| [SpineNet](vision/detection) | [SpineNet: Learning Scale-Permuted Backbone for Recognition and Localization](https://arxiv.org/abs/1912.05027) | ### Natural Language Processing diff --git a/official/vision/detection/README.md b/official/vision/detection/README.md index d6cb5d464..53134ec55 100644 --- a/official/vision/detection/README.md +++ b/official/vision/detection/README.md @@ -48,22 +48,6 @@ so the checkpoints are not compatible. We will unify the implementation soon. -### Train a SpineNet-49 based RetinaNet. - -```bash -TPU_NAME="" -MODEL_DIR="" -TRAIN_FILE_PATTERN="" -EVAL_FILE_PATTERN="" -VAL_JSON_FILE="" -python3 ~/models/official/vision/detection/main.py \ - --strategy_type=tpu \ - --tpu="${TPU_NAME?}" \ - --model_dir="${MODEL_DIR?}" \ - --mode=train \ - --params_override="{ type: retinanet, architecture: {backbone: spinenet, multilevel_features: identity}, spinenet: {model_id: 49}, train_file_pattern: ${TRAIN_FILE_PATTERN?} }, eval: { val_json_file: ${VAL_JSON_FILE?}, eval_file_pattern: ${EVAL_FILE_PATTERN?} } }" -``` - ### Train a custom RetinaNet using the config file. @@ -179,24 +163,6 @@ so the checkpoints are not compatible. We will unify the implementation soon. -### Train a SpineNet-49 based Mask R-CNN. - -```bash -TPU_NAME="" -MODEL_DIR="" -TRAIN_FILE_PATTERN="" -EVAL_FILE_PATTERN="" -VAL_JSON_FILE="" -python3 ~/models/official/vision/detection/main.py \ - --strategy_type=tpu \ - --tpu="${TPU_NAME?}" \ - --model_dir="${MODEL_DIR?}" \ - --mode=train \ - --model=mask_rcnn \ - --params_override="{architecture: {backbone: spinenet, multilevel_features: identity}, spinenet: {model_id: 49}, train_file_pattern: ${TRAIN_FILE_PATTERN?} }, eval: { val_json_file: ${VAL_JSON_FILE?}, eval_file_pattern: ${EVAL_FILE_PATTERN?} } }" -``` - - ### Train a custom Mask R-CNN using the config file. First, create a YAML config file, e.g. *my_maskrcnn.yaml*. diff --git a/official/vision/detection/configs/base_config.py b/official/vision/detection/configs/base_config.py index 6e2859e71..0a4e2f5fb 100644 --- a/official/vision/detection/configs/base_config.py +++ b/official/vision/detection/configs/base_config.py @@ -1,4 +1,4 @@ -# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,12 +17,10 @@ BACKBONES = [ 'resnet', - 'spinenet', ] MULTILEVEL_FEATURES = [ 'fpn', - 'identity', ] # pylint: disable=line-too-long @@ -120,9 +118,6 @@ BASE_CFG = { 'resnet': { 'resnet_depth': 50, }, - 'spinenet': { - 'model_id': '49', - }, 'fpn': { 'fpn_feat_dims': 256, 'use_separable_conv': False, diff --git a/official/vision/detection/modeling/architecture/factory.py b/official/vision/detection/modeling/architecture/factory.py index fa563c9ed..ed5647d6f 100644 --- a/official/vision/detection/modeling/architecture/factory.py +++ b/official/vision/detection/modeling/architecture/factory.py @@ -1,4 +1,4 @@ -# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -23,7 +23,6 @@ from official.vision.detection.modeling.architecture import heads from official.vision.detection.modeling.architecture import identity from official.vision.detection.modeling.architecture import nn_ops from official.vision.detection.modeling.architecture import resnet -from official.vision.detection.modeling.architecture import spinenet def norm_activation_generator(params): @@ -43,9 +42,6 @@ def backbone_generator(params): activation=params.norm_activation.activation, norm_activation=norm_activation_generator( params.norm_activation)) - elif params.architecture.backbone == 'spinenet': - spinenet_params = params.spinenet - backbone_fn = spinenet.SpineNetBuilder(model_id=spinenet_params.model_id) else: raise ValueError('Backbone model `{}` is not supported.' .format(params.architecture.backbone)) diff --git a/official/vision/detection/modeling/architecture/nn_blocks.py b/official/vision/detection/modeling/architecture/nn_blocks.py deleted file mode 100644 index c94a079f9..000000000 --- a/official/vision/detection/modeling/architecture/nn_blocks.py +++ /dev/null @@ -1,318 +0,0 @@ -# Copyright 2020 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Contains common building blocks for neural networks.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import tensorflow as tf - -from official.modeling import tf_utils - - -@tf.keras.utils.register_keras_serializable(package='Vision') -class ResidualBlock(tf.keras.layers.Layer): - """A residual block.""" - - def __init__(self, - filters, - strides, - use_projection=False, - kernel_initializer='VarianceScaling', - kernel_regularizer=None, - bias_regularizer=None, - activation='relu', - use_sync_bn=False, - norm_momentum=0.99, - norm_epsilon=0.001, - **kwargs): - """A residual block with BN after convolutions. - - Args: - filters: `int` number of filters for the first two convolutions. Note that - the third and final convolution will use 4 times as many filters. - strides: `int` block stride. If greater than 1, this block will ultimately - downsample the input. - use_projection: `bool` for whether this block should use a projection - shortcut (versus the default identity shortcut). This is usually `True` - for the first block of a block group, which may change the number of - filters and the resolution. - kernel_initializer: kernel_initializer for convolutional layers. - kernel_regularizer: tf.keras.regularizers.Regularizer object for Conv2D. - Default to None. - bias_regularizer: tf.keras.regularizers.Regularizer object for Conv2d. - Default to None. - activation: `str` name of the activation function. - use_sync_bn: if True, use synchronized batch normalization. - norm_momentum: `float` normalization omentum for the moving average. - norm_epsilon: `float` small float added to variance to avoid dividing by - zero. - **kwargs: keyword arguments to be passed. - """ - super(ResidualBlock, self).__init__(**kwargs) - - self._filters = filters - self._strides = strides - self._use_projection = use_projection - self._use_sync_bn = use_sync_bn - self._activation = activation - self._kernel_initializer = kernel_initializer - self._norm_momentum = norm_momentum - self._norm_epsilon = norm_epsilon - self._kernel_regularizer = kernel_regularizer - self._bias_regularizer = bias_regularizer - - if use_sync_bn: - self._norm = tf.keras.layers.experimental.SyncBatchNormalization - else: - self._norm = tf.keras.layers.BatchNormalization - if tf.keras.backend.image_data_format() == 'channels_last': - self._bn_axis = -1 - else: - self._bn_axis = 1 - self._activation_fn = tf_utils.get_activation(activation) - - def build(self, input_shape): - if self._use_projection: - self._shortcut = tf.keras.layers.Conv2D( - filters=self._filters, - kernel_size=1, - strides=self._strides, - use_bias=False, - kernel_initializer=self._kernel_initializer, - kernel_regularizer=self._kernel_regularizer, - bias_regularizer=self._bias_regularizer) - self._norm0 = self._norm( - axis=self._bn_axis, - momentum=self._norm_momentum, - epsilon=self._norm_epsilon) - - self._conv1 = tf.keras.layers.Conv2D( - filters=self._filters, - kernel_size=3, - strides=self._strides, - padding='same', - use_bias=False, - kernel_initializer=self._kernel_initializer, - kernel_regularizer=self._kernel_regularizer, - bias_regularizer=self._bias_regularizer) - self._norm1 = self._norm( - axis=self._bn_axis, - momentum=self._norm_momentum, - epsilon=self._norm_epsilon) - - self._conv2 = tf.keras.layers.Conv2D( - filters=self._filters, - kernel_size=3, - strides=1, - padding='same', - use_bias=False, - kernel_initializer=self._kernel_initializer, - kernel_regularizer=self._kernel_regularizer, - bias_regularizer=self._bias_regularizer) - self._norm2 = self._norm( - axis=self._bn_axis, - momentum=self._norm_momentum, - epsilon=self._norm_epsilon) - - super(ResidualBlock, self).build(input_shape) - - def get_config(self): - config = { - 'filters': self._filters, - 'strides': self._strides, - 'use_projection': self._use_projection, - 'kernel_initializer': self._kernel_initializer, - 'kernel_regularizer': self._kernel_regularizer, - 'bias_regularizer': self._bias_regularizer, - 'activation': self._activation, - 'use_sync_bn': self._use_sync_bn, - 'norm_momentum': self._norm_momentum, - 'norm_epsilon': self._norm_epsilon - } - - base_config = super(ResidualBlock, self).get_config() - return dict(list(base_config.items()) + list(config.items())) - - def call(self, inputs): - shortcut = inputs - if self._use_projection: - shortcut = self._shortcut(shortcut) - shortcut = self._norm0(shortcut) - - x = self._conv1(inputs) - x = self._norm1(x) - x = self._activation_fn(x) - - x = self._conv2(x) - x = self._norm2(x) - - return self._activation_fn(x + shortcut) - - -@tf.keras.utils.register_keras_serializable(package='Vision') -class BottleneckBlock(tf.keras.layers.Layer): - """A standard bottleneck block.""" - - def __init__(self, - filters, - strides, - use_projection=False, - kernel_initializer='VarianceScaling', - kernel_regularizer=None, - bias_regularizer=None, - activation='relu', - use_sync_bn=False, - norm_momentum=0.99, - norm_epsilon=0.001, - **kwargs): - """A standard bottleneck block with BN after convolutions. - - Args: - filters: `int` number of filters for the first two convolutions. Note that - the third and final convolution will use 4 times as many filters. - strides: `int` block stride. If greater than 1, this block will ultimately - downsample the input. - use_projection: `bool` for whether this block should use a projection - shortcut (versus the default identity shortcut). This is usually `True` - for the first block of a block group, which may change the number of - filters and the resolution. - kernel_initializer: kernel_initializer for convolutional layers. - kernel_regularizer: tf.keras.regularizers.Regularizer object for Conv2D. - Default to None. - bias_regularizer: tf.keras.regularizers.Regularizer object for Conv2d. - Default to None. - activation: `str` name of the activation function. - use_sync_bn: if True, use synchronized batch normalization. - norm_momentum: `float` normalization omentum for the moving average. - norm_epsilon: `float` small float added to variance to avoid dividing by - zero. - **kwargs: keyword arguments to be passed. - """ - super(BottleneckBlock, self).__init__(**kwargs) - - self._filters = filters - self._strides = strides - self._use_projection = use_projection - self._use_sync_bn = use_sync_bn - self._activation = activation - self._kernel_initializer = kernel_initializer - self._norm_momentum = norm_momentum - self._norm_epsilon = norm_epsilon - self._kernel_regularizer = kernel_regularizer - self._bias_regularizer = bias_regularizer - if use_sync_bn: - self._norm = tf.keras.layers.experimental.SyncBatchNormalization - else: - self._norm = tf.keras.layers.BatchNormalization - if tf.keras.backend.image_data_format() == 'channels_last': - self._bn_axis = -1 - else: - self._bn_axis = 1 - self._activation_fn = tf_utils.get_activation(activation) - - def build(self, input_shape): - if self._use_projection: - self._shortcut = tf.keras.layers.Conv2D( - filters=self._filters * 4, - kernel_size=1, - strides=self._strides, - use_bias=False, - kernel_initializer=self._kernel_initializer, - kernel_regularizer=self._kernel_regularizer, - bias_regularizer=self._bias_regularizer) - self._norm0 = self._norm( - axis=self._bn_axis, - momentum=self._norm_momentum, - epsilon=self._norm_epsilon) - - self._conv1 = tf.keras.layers.Conv2D( - filters=self._filters, - kernel_size=1, - strides=1, - use_bias=False, - kernel_initializer=self._kernel_initializer, - kernel_regularizer=self._kernel_regularizer, - bias_regularizer=self._bias_regularizer) - self._norm1 = self._norm( - axis=self._bn_axis, - momentum=self._norm_momentum, - epsilon=self._norm_epsilon) - - self._conv2 = tf.keras.layers.Conv2D( - filters=self._filters, - kernel_size=3, - strides=self._strides, - padding='same', - use_bias=False, - kernel_initializer=self._kernel_initializer, - kernel_regularizer=self._kernel_regularizer, - bias_regularizer=self._bias_regularizer) - self._norm2 = self._norm( - axis=self._bn_axis, - momentum=self._norm_momentum, - epsilon=self._norm_epsilon) - - self._conv3 = tf.keras.layers.Conv2D( - filters=self._filters * 4, - kernel_size=1, - strides=1, - use_bias=False, - kernel_initializer=self._kernel_initializer, - kernel_regularizer=self._kernel_regularizer, - bias_regularizer=self._bias_regularizer) - self._norm3 = self._norm( - axis=self._bn_axis, - momentum=self._norm_momentum, - epsilon=self._norm_epsilon) - - super(BottleneckBlock, self).build(input_shape) - - def get_config(self): - config = { - 'filters': self._filters, - 'strides': self._strides, - 'use_projection': self._use_projection, - 'kernel_initializer': self._kernel_initializer, - 'kernel_regularizer': self._kernel_regularizer, - 'bias_regularizer': self._bias_regularizer, - 'activation': self._activation, - 'use_sync_bn': self._use_sync_bn, - 'norm_momentum': self._norm_momentum, - 'norm_epsilon': self._norm_epsilon - } - - base_config = super(BottleneckBlock, self).get_config() - return dict(list(base_config.items()) + list(config.items())) - - def call(self, inputs): - shortcut = inputs - if self._use_projection: - shortcut = self._shortcut(shortcut) - shortcut = self._norm0(shortcut) - - x = self._conv1(inputs) - x = self._norm1(x) - x = self._activation_fn(x) - - x = self._conv2(x) - x = self._norm2(x) - x = self._activation_fn(x) - - x = self._conv3(x) - x = self._norm3(x) - - return self._activation_fn(x + shortcut) diff --git a/official/vision/detection/modeling/architecture/spinenet.py b/official/vision/detection/modeling/architecture/spinenet.py deleted file mode 100644 index 152ed24af..000000000 --- a/official/vision/detection/modeling/architecture/spinenet.py +++ /dev/null @@ -1,503 +0,0 @@ -# Lint as: python3 -# Copyright 2020 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Implementation of SpineNet model. - -X. Du, T-Y. Lin, P. Jin, G. Ghiasi, M. Tan, Y. Cui, Q. V. Le, X. Song -SpineNet: Learning Scale-Permuted Backbone for Recognition and Localization -https://arxiv.org/abs/1912.05027 -""" -import math - -from absl import logging -import tensorflow as tf - -from tensorflow.python.keras import backend -from official.modeling import tf_utils -from official.vision.detection.modeling.architecture import nn_blocks - -layers = tf.keras.layers - -FILTER_SIZE_MAP = { - 1: 32, - 2: 64, - 3: 128, - 4: 256, - 5: 256, - 6: 256, - 7: 256, -} - -# The fixed SpineNet architecture discovered by NAS. -# Each element represents a specification of a building block: -# (block_level, block_fn, (input_offset0, input_offset1), is_output). -SPINENET_BLOCK_SPECS = [ - (2, 'bottleneck', (0, 1), False), - (4, 'residual', (0, 1), False), - (3, 'bottleneck', (2, 3), False), - (4, 'bottleneck', (2, 4), False), - (6, 'residual', (3, 5), False), - (4, 'bottleneck', (3, 5), False), - (5, 'residual', (6, 7), False), - (7, 'residual', (6, 8), False), - (5, 'bottleneck', (8, 9), False), - (5, 'bottleneck', (8, 10), False), - (4, 'bottleneck', (5, 10), True), - (3, 'bottleneck', (4, 10), True), - (5, 'bottleneck', (7, 12), True), - (7, 'bottleneck', (5, 14), True), - (6, 'bottleneck', (12, 14), True), -] - -SCALING_MAP = { - '49S': { - 'endpoints_num_filters': 128, - 'filter_size_scale': 0.65, - 'resample_alpha': 0.5, - 'block_repeats': 1, - }, - '49': { - 'endpoints_num_filters': 256, - 'filter_size_scale': 1.0, - 'resample_alpha': 0.5, - 'block_repeats': 1, - }, - '96': { - 'endpoints_num_filters': 256, - 'filter_size_scale': 1.0, - 'resample_alpha': 0.5, - 'block_repeats': 2, - }, - '143': { - 'endpoints_num_filters': 256, - 'filter_size_scale': 1.0, - 'resample_alpha': 1.0, - 'block_repeats': 3, - }, - '190': { - 'endpoints_num_filters': 512, - 'filter_size_scale': 1.3, - 'resample_alpha': 1.0, - 'block_repeats': 4, - }, -} - - -class BlockSpec(object): - """A container class that specifies the block configuration for SpineNet.""" - - def __init__(self, level, block_fn, input_offsets, is_output): - self.level = level - self.block_fn = block_fn - self.input_offsets = input_offsets - self.is_output = is_output - - -def build_block_specs(block_specs=None): - """Builds the list of BlockSpec objects for SpineNet.""" - if not block_specs: - block_specs = SPINENET_BLOCK_SPECS - logging.info('Building SpineNet block specs: %s', block_specs) - return [BlockSpec(*b) for b in block_specs] - - -@tf.keras.utils.register_keras_serializable(package='Vision') -class SpineNet(tf.keras.Model): - """Class to build SpineNet models.""" - - def __init__(self, - input_specs=tf.keras.layers.InputSpec(shape=[None, 640, 640, 3]), - min_level=3, - max_level=7, - block_specs=build_block_specs(), - endpoints_num_filters=256, - resample_alpha=0.5, - block_repeats=1, - filter_size_scale=1.0, - kernel_initializer='VarianceScaling', - kernel_regularizer=None, - bias_regularizer=None, - activation='relu', - use_sync_bn=False, - norm_momentum=0.99, - norm_epsilon=0.001, - **kwargs): - """SpineNet model.""" - self._min_level = min_level - self._max_level = max_level - self._block_specs = block_specs - self._endpoints_num_filters = endpoints_num_filters - self._resample_alpha = resample_alpha - self._block_repeats = block_repeats - self._filter_size_scale = filter_size_scale - self._kernel_initializer = kernel_initializer - self._kernel_regularizer = kernel_regularizer - self._bias_regularizer = bias_regularizer - self._use_sync_bn = use_sync_bn - self._norm_momentum = norm_momentum - self._norm_epsilon = norm_epsilon - if activation == 'relu': - self._activation = tf.nn.relu - elif activation == 'swish': - self._activation = tf.nn.swish - else: - raise ValueError('Activation {} not implemented.'.format(activation)) - self._init_block_fn = 'bottleneck' - self._num_init_blocks = 2 - - if use_sync_bn: - self._norm = layers.experimental.SyncBatchNormalization - else: - self._norm = layers.BatchNormalization - - if tf.keras.backend.image_data_format() == 'channels_last': - self._bn_axis = -1 - else: - self._bn_axis = 1 - - # Build SpineNet. - inputs = tf.keras.Input(shape=input_specs.shape[1:]) - - net = self._build_stem(inputs=inputs) - net = self._build_scale_permuted_network( - net=net, input_width=input_specs.shape[1]) - net = self._build_endpoints(net=net) - - super(SpineNet, self).__init__(inputs=inputs, outputs=net) - - def _block_group(self, - inputs, - filters, - strides, - block_fn_cand, - block_repeats=1, - name='block_group'): - """Creates one group of blocks for the SpineNet model.""" - block_fn_candidates = { - 'bottleneck': nn_blocks.BottleneckBlock, - 'residual': nn_blocks.ResidualBlock, - } - block_fn = block_fn_candidates[block_fn_cand] - _, _, _, num_filters = inputs.get_shape().as_list() - - if block_fn_cand == 'bottleneck': - use_projection = not (num_filters == (filters * 4) and strides == 1) - else: - use_projection = not (num_filters == filters and strides == 1) - - x = block_fn( - filters=filters, - strides=strides, - use_projection=use_projection, - kernel_initializer=self._kernel_initializer, - kernel_regularizer=self._kernel_regularizer, - bias_regularizer=self._bias_regularizer, - activation=self._activation, - use_sync_bn=self._use_sync_bn, - norm_momentum=self._norm_momentum, - norm_epsilon=self._norm_epsilon)( - inputs) - for _ in range(1, block_repeats): - x = block_fn( - filters=filters, - strides=1, - use_projection=False, - kernel_initializer=self._kernel_initializer, - kernel_regularizer=self._kernel_regularizer, - bias_regularizer=self._bias_regularizer, - activation=self._activation, - use_sync_bn=self._use_sync_bn, - norm_momentum=self._norm_momentum, - norm_epsilon=self._norm_epsilon)( - x) - return tf.identity(x, name=name) - - def _build_stem(self, inputs): - """Build SpineNet stem.""" - x = layers.Conv2D( - filters=64, - kernel_size=7, - strides=2, - use_bias=False, - padding='same', - kernel_initializer=self._kernel_initializer, - kernel_regularizer=self._kernel_regularizer, - bias_regularizer=self._bias_regularizer)( - inputs) - x = self._norm( - axis=self._bn_axis, - momentum=self._norm_momentum, - epsilon=self._norm_epsilon)( - x) - x = tf_utils.get_activation(self._activation)(x) - x = layers.MaxPool2D(pool_size=3, strides=2, padding='same')(x) - - net = [] - # Build the initial level 2 blocks. - for i in range(self._num_init_blocks): - x = self._block_group( - inputs=x, - filters=int(FILTER_SIZE_MAP[2] * self._filter_size_scale), - strides=1, - block_fn_cand=self._init_block_fn, - block_repeats=self._block_repeats, - name='stem_block_{}'.format(i + 1)) - net.append(x) - return net - - def _build_scale_permuted_network(self, - net, - input_width, - weighted_fusion=False): - """Build scale-permuted network.""" - net_sizes = [int(math.ceil(input_width / 2**2))] * len(net) - net_block_fns = [self._init_block_fn] * len(net) - num_outgoing_connections = [0] * len(net) - - endpoints = {} - for i, block_spec in enumerate(self._block_specs): - # Find out specs for the target block. - target_width = int(math.ceil(input_width / 2**block_spec.level)) - target_num_filters = int(FILTER_SIZE_MAP[block_spec.level] * - self._filter_size_scale) - target_block_fn = block_spec.block_fn - - # Resample then merge input0 and input1. - parents = [] - input0 = block_spec.input_offsets[0] - input1 = block_spec.input_offsets[1] - - x0 = self._resample_with_alpha( - inputs=net[input0], - input_width=net_sizes[input0], - input_block_fn=net_block_fns[input0], - target_width=target_width, - target_num_filters=target_num_filters, - target_block_fn=target_block_fn, - alpha=self._resample_alpha) - parents.append(x0) - num_outgoing_connections[input0] += 1 - - x1 = self._resample_with_alpha( - inputs=net[input1], - input_width=net_sizes[input1], - input_block_fn=net_block_fns[input1], - target_width=target_width, - target_num_filters=target_num_filters, - target_block_fn=target_block_fn, - alpha=self._resample_alpha) - parents.append(x1) - num_outgoing_connections[input1] += 1 - - # Merge 0 outdegree blocks to the output block. - if block_spec.is_output: - for j, (j_feat, - j_connections) in enumerate(zip(net, num_outgoing_connections)): - if j_connections == 0 and (j_feat.shape[2] == target_width and - j_feat.shape[3] == x0.shape[3]): - parents.append(j_feat) - num_outgoing_connections[j] += 1 - - # pylint: disable=g-direct-tensorflow-import - if weighted_fusion: - dtype = parents[0].dtype - parent_weights = [ - tf.nn.relu(tf.cast(tf.Variable(1.0, name='block{}_fusion{}'.format( - i, j)), dtype=dtype)) for j in range(len(parents))] - weights_sum = tf.add_n(parent_weights) - parents = [ - parents[i] * parent_weights[i] / (weights_sum + 0.0001) - for i in range(len(parents)) - ] - - # Fuse all parent nodes then build a new block. - x = tf_utils.get_activation(self._activation)(tf.add_n(parents)) - x = self._block_group( - inputs=x, - filters=target_num_filters, - strides=1, - block_fn_cand=target_block_fn, - block_repeats=self._block_repeats, - name='scale_permuted_block_{}'.format(i + 1)) - - net.append(x) - net_sizes.append(target_width) - net_block_fns.append(target_block_fn) - num_outgoing_connections.append(0) - - # Save output feats. - if block_spec.is_output: - if block_spec.level in endpoints: - raise ValueError('Duplicate feats found for output level {}.'.format( - block_spec.level)) - if (block_spec.level < self._min_level or - block_spec.level > self._max_level): - raise ValueError('Output level is out of range [{}, {}]'.format( - self._min_level, self._max_level)) - endpoints[block_spec.level] = x - - return endpoints - - def _build_endpoints(self, net): - """Match filter size for endpoints before sharing conv layers.""" - endpoints = {} - for level in range(self._min_level, self._max_level + 1): - x = layers.Conv2D( - filters=self._endpoints_num_filters, - kernel_size=1, - strides=1, - use_bias=False, - kernel_initializer=self._kernel_initializer, - kernel_regularizer=self._kernel_regularizer, - bias_regularizer=self._bias_regularizer)( - net[level]) - x = self._norm( - axis=self._bn_axis, - momentum=self._norm_momentum, - epsilon=self._norm_epsilon)( - x) - x = tf_utils.get_activation(self._activation)(x) - endpoints[level] = x - return endpoints - - def _resample_with_alpha(self, - inputs, - input_width, - input_block_fn, - target_width, - target_num_filters, - target_block_fn, - alpha=0.5): - """Match resolution and feature dimension.""" - _, _, _, input_num_filters = inputs.get_shape().as_list() - if input_block_fn == 'bottleneck': - input_num_filters /= 4 - new_num_filters = int(input_num_filters * alpha) - - x = layers.Conv2D( - filters=new_num_filters, - kernel_size=1, - strides=1, - use_bias=False, - kernel_initializer=self._kernel_initializer, - kernel_regularizer=self._kernel_regularizer, - bias_regularizer=self._bias_regularizer)( - inputs) - x = self._norm( - axis=self._bn_axis, - momentum=self._norm_momentum, - epsilon=self._norm_epsilon)( - x) - x = tf_utils.get_activation(self._activation)(x) - - # Spatial resampling. - if input_width > target_width: - x = layers.Conv2D( - filters=new_num_filters, - kernel_size=3, - strides=2, - padding='SAME', - use_bias=False, - kernel_initializer=self._kernel_initializer, - kernel_regularizer=self._kernel_regularizer, - bias_regularizer=self._bias_regularizer)( - x) - x = self._norm( - axis=self._bn_axis, - momentum=self._norm_momentum, - epsilon=self._norm_epsilon)( - x) - x = tf_utils.get_activation(self._activation)(x) - input_width /= 2 - while input_width > target_width: - x = layers.MaxPool2D(pool_size=3, strides=2, padding='SAME')(x) - input_width /= 2 - elif input_width < target_width: - scale = target_width // input_width - x = layers.UpSampling2D(size=(scale, scale))(x) - - # Last 1x1 conv to match filter size. - if target_block_fn == 'bottleneck': - target_num_filters *= 4 - x = layers.Conv2D( - filters=target_num_filters, - kernel_size=1, - strides=1, - use_bias=False, - kernel_initializer=self._kernel_initializer, - kernel_regularizer=self._kernel_regularizer, - bias_regularizer=self._bias_regularizer)( - x) - x = self._norm( - axis=self._bn_axis, - momentum=self._norm_momentum, - epsilon=self._norm_epsilon)( - x) - - return x - - -class SpineNetBuilder(object): - """SpineNet builder.""" - - def __init__(self, - model_id, - min_level=3, - max_level=7, - block_specs=build_block_specs(), - kernel_initializer='VarianceScaling', - kernel_regularizer=None, - bias_regularizer=None, - activation='relu', - use_sync_bn=False, - norm_momentum=0.99, - norm_epsilon=0.001): - if model_id not in SCALING_MAP: - raise ValueError( - 'SpineNet {} is not a valid architecture.'.format(model_id)) - scaling_params = SCALING_MAP[model_id] - self._min_level = min_level - self._max_level = max_level - self._block_specs = block_specs - self._endpoints_num_filters = scaling_params['endpoints_num_filters'] - self._resample_alpha = scaling_params['resample_alpha'] - self._block_repeats = scaling_params['block_repeats'] - self._filter_size_scale = scaling_params['filter_size_scale'] - self._kernel_initializer = kernel_initializer - self._kernel_regularizer = kernel_regularizer - self._bias_regularizer = bias_regularizer - self._activation = activation - self._use_sync_bn = use_sync_bn - self._norm_momentum = norm_momentum - self._norm_epsilon = norm_epsilon - - def __call__(self, inputs, is_training=None): - with backend.get_graph().as_default(): - model = SpineNet( - min_level=self._min_level, - max_level=self._max_level, - block_specs=self._block_specs, - endpoints_num_filters=self._endpoints_num_filters, - resample_alpha=self._resample_alpha, - block_repeats=self._block_repeats, - filter_size_scale=self._filter_size_scale, - kernel_initializer=self._kernel_initializer, - kernel_regularizer=self._kernel_regularizer, - bias_regularizer=self._bias_regularizer, - activation=self._activation, - use_sync_bn=self._use_sync_bn, - norm_momentum=self._norm_momentum, - norm_epsilon=self._norm_epsilon) - return model(inputs) -- GitLab From 4140da21e24e55404c502b1d07be8fb775f8a719 Mon Sep 17 00:00:00 2001 From: Hongkun Yu Date: Wed, 24 Jun 2020 22:27:41 -0700 Subject: [PATCH 61/79] Multichannel attention: Override _build_attention method instead of build() PiperOrigin-RevId: 318208409 --- official/nlp/modeling/layers/multi_channel_attention.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/official/nlp/modeling/layers/multi_channel_attention.py b/official/nlp/modeling/layers/multi_channel_attention.py index 499d977c7..160c8aae5 100644 --- a/official/nlp/modeling/layers/multi_channel_attention.py +++ b/official/nlp/modeling/layers/multi_channel_attention.py @@ -117,8 +117,8 @@ class MultiChannelAttention(attention.MultiHeadAttention): cross-attention target sequences. """ - def build(self, input_shape): - super(MultiChannelAttention, self).build(input_shape) + def _build_attention(self, qkv_rank): + super(MultiChannelAttention, self)._build_attention(qkv_rank) self._masked_softmax = masked_softmax.MaskedSoftmax(mask_expansion_axes=[2]) def call(self, inputs, attention_mask=None): -- GitLab From 7ebcbe20a6b77fc8b0a6b8e442c6e5a04732c68b Mon Sep 17 00:00:00 2001 From: Hongkun Yu Date: Thu, 25 Jun 2020 12:18:15 -0700 Subject: [PATCH 62/79] Clean up: use sparse_categorical_crossentropy directly for MLM loss. PiperOrigin-RevId: 318322629 --- official/nlp/tasks/masked_lm.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/official/nlp/tasks/masked_lm.py b/official/nlp/tasks/masked_lm.py index bb5398458..ac1aac568 100644 --- a/official/nlp/tasks/masked_lm.py +++ b/official/nlp/tasks/masked_lm.py @@ -48,12 +48,14 @@ class MaskedLMTask(base_task.Task): metrics, aux_losses=None) -> tf.Tensor: metrics = dict([(metric.name, metric) for metric in metrics]) - lm_output = tf.nn.log_softmax( - tf.cast(model_outputs['lm_output'], tf.float32), axis=-1) - mlm_loss = loss_lib.weighted_sparse_categorical_crossentropy_loss( - labels=labels['masked_lm_ids'], - predictions=lm_output, - weights=labels['masked_lm_weights']) + lm_prediction_losses = tf.keras.losses.sparse_categorical_crossentropy( + labels['masked_lm_ids'], + tf.cast(model_outputs['lm_output'], tf.float32), + from_logits=True) + lm_label_weights = labels['masked_lm_weights'] + lm_numerator_loss = tf.reduce_sum(lm_prediction_losses * lm_label_weights) + lm_denominator_loss = tf.reduce_sum(lm_label_weights) + mlm_loss = tf.math.divide_no_nan(lm_numerator_loss, lm_denominator_loss) metrics['lm_example_loss'].update_state(mlm_loss) if 'next_sentence_labels' in labels: sentence_labels = labels['next_sentence_labels'] @@ -74,6 +76,7 @@ class MaskedLMTask(base_task.Task): def build_inputs(self, params, input_context=None): """Returns tf.data.Dataset for pretraining.""" if params.input_path == 'dummy': + def dummy_data(_): dummy_ids = tf.zeros((1, params.seq_length), dtype=tf.int32) dummy_lm = tf.zeros((1, params.max_predictions_per_seq), dtype=tf.int32) -- GitLab From 02242bc8819ca26e0ce691a0b64a2a61c443065e Mon Sep 17 00:00:00 2001 From: Chen Chen Date: Thu, 25 Jun 2020 18:09:30 -0700 Subject: [PATCH 63/79] Internal change PiperOrigin-RevId: 318387106 --- official/nlp/tasks/tagging.py | 98 +++++++++++++++++++++++------- official/nlp/tasks/tagging_test.py | 27 +++++++- official/pip_package/setup.py | 3 + official/requirements.txt | 7 ++- 4 files changed, 107 insertions(+), 28 deletions(-) diff --git a/official/nlp/tasks/tagging.py b/official/nlp/tasks/tagging.py index f8e10e9f7..e8ed89109 100644 --- a/official/nlp/tasks/tagging.py +++ b/official/nlp/tasks/tagging.py @@ -15,7 +15,12 @@ # ============================================================================== """Tagging (e.g., NER/POS) task.""" import logging +from typing import List, Optional + import dataclasses + +from seqeval import metrics as seqeval_metrics + import tensorflow as tf import tensorflow_hub as hub @@ -36,12 +41,12 @@ class TaggingConfig(cfg.TaskConfig): model: encoders.TransformerEncoderConfig = ( encoders.TransformerEncoderConfig()) - # The number of real labels. Note that a word may be tokenized into - # multiple word_pieces tokens, and we asssume the real label id (non-negative) - # is assigned to the first token of the word, and a negative label id is - # assigned to the remaining tokens. The negative label id will not contribute - # to loss and metrics. - num_classes: int = 0 + # The real class names, the order of which should match real label id. + # Note that a word may be tokenized into multiple word_pieces tokens, and + # we asssume the real label id (non-negative) is assigned to the first token + # of the word, and a negative label id is assigned to the remaining tokens. + # The negative label id will not contribute to loss and metrics. + class_names: Optional[List[str]] = None train_data: cfg.DataConfig = cfg.DataConfig() validation_data: cfg.DataConfig = cfg.DataConfig() @@ -75,8 +80,8 @@ class TaggingTask(base_task.Task): if params.hub_module_url and params.init_checkpoint: raise ValueError('At most one of `hub_module_url` and ' '`init_checkpoint` can be specified.') - if params.num_classes == 0: - raise ValueError('TaggingConfig.num_classes cannot be 0.') + if not params.class_names: + raise ValueError('TaggingConfig.class_names cannot be empty.') if params.hub_module_url: self._hub_module = hub.load(params.hub_module_url) @@ -92,7 +97,7 @@ class TaggingTask(base_task.Task): return models.BertTokenClassifier( network=encoder_network, - num_classes=self.task_config.num_classes, + num_classes=len(self.task_config.class_names), initializer=tf.keras.initializers.TruncatedNormal( stddev=self.task_config.model.initializer_range), dropout_rate=self.task_config.model.dropout_rate, @@ -123,7 +128,7 @@ class TaggingTask(base_task.Task): y = tf.random.uniform( shape=(1, params.seq_length), minval=-1, - maxval=self.task_config.num_classes, + maxval=len(self.task_config.class_names), dtype=tf.dtypes.int32) return (x, y) @@ -136,19 +141,66 @@ class TaggingTask(base_task.Task): dataset = tagging_data_loader.TaggingDataLoader(params).load(input_context) return dataset - def build_metrics(self, training=None): - del training - # TODO(chendouble): evaluate using seqeval's f1/precision/recall. - return [tf.keras.metrics.SparseCategoricalAccuracy(name='accuracy')] - - def process_metrics(self, metrics, labels, model_outputs): - masked_labels, masked_weights = _masked_labels_and_weights(labels) - for metric in metrics: - metric.update_state(masked_labels, model_outputs, masked_weights) - - def process_compiled_metrics(self, compiled_metrics, labels, model_outputs): - masked_labels, masked_weights = _masked_labels_and_weights(labels) - compiled_metrics.update_state(masked_labels, model_outputs, masked_weights) + def validation_step(self, inputs, model: tf.keras.Model, metrics=None): + """Validatation step. + + Args: + inputs: a dictionary of input tensors. + model: the keras.Model. + metrics: a nested structure of metrics objects. + + Returns: + A dictionary of logs. + """ + features, labels = inputs + outputs = self.inference_step(features, model) + loss = self.build_losses(labels=labels, model_outputs=outputs) + + # Negative label ids are padding labels which should be ignored. + real_label_index = tf.where(tf.greater_equal(labels, 0)) + predict_ids = tf.math.argmax(outputs, axis=-1) + predict_ids = tf.gather_nd(predict_ids, real_label_index) + label_ids = tf.gather_nd(labels, real_label_index) + return { + self.loss: loss, + 'predict_ids': predict_ids, + 'label_ids': label_ids, + } + + def aggregate_logs(self, state=None, step_outputs=None): + """Aggregates over logs returned from a validation step.""" + if state is None: + state = {'predict_class': [], 'label_class': []} + + def id_to_class_name(batched_ids): + class_names = [] + for per_example_ids in batched_ids: + class_names.append([]) + for per_token_id in per_example_ids.numpy().tolist(): + class_names[-1].append(self.task_config.class_names[per_token_id]) + + return class_names + + # Convert id to class names, because `seqeval_metrics` relies on the class + # name to decide IOB tags. + state['predict_class'].extend(id_to_class_name(step_outputs['predict_ids'])) + state['label_class'].extend(id_to_class_name(step_outputs['label_ids'])) + return state + + def reduce_aggregated_logs(self, aggregated_logs): + """Reduces aggregated logs over validation steps.""" + label_class = aggregated_logs['label_class'] + predict_class = aggregated_logs['predict_class'] + return { + 'f1': + seqeval_metrics.f1_score(label_class, predict_class), + 'precision': + seqeval_metrics.precision_score(label_class, predict_class), + 'recall': + seqeval_metrics.recall_score(label_class, predict_class), + 'accuracy': + seqeval_metrics.accuracy_score(label_class, predict_class), + } def initialize(self, model): """Load a pretrained checkpoint (if exists) and then train from iter 0.""" diff --git a/official/nlp/tasks/tagging_test.py b/official/nlp/tasks/tagging_test.py index 1eaa398f8..1f9c6268f 100644 --- a/official/nlp/tasks/tagging_test.py +++ b/official/nlp/tasks/tagging_test.py @@ -58,7 +58,7 @@ class TaggingTest(tf.test.TestCase): init_checkpoint=saved_path, model=self._encoder_config, train_data=self._train_data_config, - num_classes=3) + class_names=["O", "B-PER", "I-PER"]) task = tagging.TaggingTask(config) model = task.build_model() metrics = task.build_metrics() @@ -74,7 +74,7 @@ class TaggingTest(tf.test.TestCase): config = tagging.TaggingConfig( model=self._encoder_config, train_data=self._train_data_config, - num_classes=3) + class_names=["O", "B-PER", "I-PER"]) task = tagging.TaggingTask(config) model = task.build_model() @@ -116,10 +116,31 @@ class TaggingTest(tf.test.TestCase): config = tagging.TaggingConfig( hub_module_url=hub_module_url, model=self._encoder_config, - num_classes=4, + class_names=["O", "B-PER", "I-PER"], train_data=self._train_data_config) self._run_task(config) + def test_seqeval_metrics(self): + config = tagging.TaggingConfig( + model=self._encoder_config, + train_data=self._train_data_config, + class_names=["O", "B-PER", "I-PER"]) + task = tagging.TaggingTask(config) + model = task.build_model() + dataset = task.build_inputs(config.train_data) + + iterator = iter(dataset) + strategy = tf.distribute.get_strategy() + distributed_outputs = strategy.run( + functools.partial(task.validation_step, model=model), + args=(next(iterator),)) + outputs = tf.nest.map_structure(strategy.experimental_local_results, + distributed_outputs) + aggregated = task.aggregate_logs(step_outputs=outputs) + aggregated = task.aggregate_logs(state=aggregated, step_outputs=outputs) + self.assertCountEqual({"f1", "precision", "recall", "accuracy"}, + task.reduce_aggregated_logs(aggregated).keys()) + if __name__ == "__main__": tf.test.main() diff --git a/official/pip_package/setup.py b/official/pip_package/setup.py index 903777dde..760314600 100644 --- a/official/pip_package/setup.py +++ b/official/pip_package/setup.py @@ -45,6 +45,9 @@ def _get_requirements(): os.path.join(os.path.dirname(__file__), '../requirements.txt'), 'r') as f: for line in f: package_name = line.strip() + # Skip empty line or comments starting with "#". + if not package_name or package_name[0] == '#': + continue if package_name.startswith('-e '): dependency_links_tmp.append(package_name[3:].strip()) else: diff --git a/official/requirements.txt b/official/requirements.txt index 6df2c2b8e..e8830ede9 100644 --- a/official/requirements.txt +++ b/official/requirements.txt @@ -16,10 +16,13 @@ dataclasses gin-config tf_slim>=1.1.0 typing -sentencepiece Cython matplotlib -opencv-python-headless pyyaml +# CV related dependencies +opencv-python-headless Pillow -e git+https://github.com/cocodataset/cocoapi#egg=pycocotools&subdirectory=PythonAPI +# NLP related dependencies +seqeval +sentencepiece -- GitLab From 1e4fd825bcf4b42933e206bffe2ced288faa9e5e Mon Sep 17 00:00:00 2001 From: Hongkun Yu Date: Thu, 25 Jun 2020 23:53:34 -0700 Subject: [PATCH 64/79] Update weighted sparse categorical loss. Remove per-example loss. PiperOrigin-RevId: 318421049 --- official/nlp/modeling/losses/README.md | 3 - official/nlp/modeling/losses/__init__.py | 1 - ...eighted_sparse_categorical_crossentropy.py | 48 +---- ...ed_sparse_categorical_crossentropy_test.py | 196 ++---------------- official/nlp/tasks/masked_lm.py | 8 +- official/nlp/tasks/sentence_prediction.py | 11 +- 6 files changed, 30 insertions(+), 237 deletions(-) diff --git a/official/nlp/modeling/losses/README.md b/official/nlp/modeling/losses/README.md index 522150cfa..a2607b1da 100644 --- a/official/nlp/modeling/losses/README.md +++ b/official/nlp/modeling/losses/README.md @@ -4,6 +4,3 @@ Losses contains common loss computation used in NLP tasks. * `weighted_sparse_categorical_crossentropy_loss` computes per-batch sparse categorical crossentropy loss. - -* `weighted_sparse_categorical_crossentropy_per_example_loss` computes -per-example sparse categorical crossentropy loss. diff --git a/official/nlp/modeling/losses/__init__.py b/official/nlp/modeling/losses/__init__.py index 919bad308..7a396eb98 100644 --- a/official/nlp/modeling/losses/__init__.py +++ b/official/nlp/modeling/losses/__init__.py @@ -14,4 +14,3 @@ # ============================================================================== """Activations package definition. Subject to change.""" from official.nlp.modeling.losses.weighted_sparse_categorical_crossentropy import loss as weighted_sparse_categorical_crossentropy_loss -from official.nlp.modeling.losses.weighted_sparse_categorical_crossentropy import per_example_loss as weighted_sparse_categorical_crossentropy_per_example_loss diff --git a/official/nlp/modeling/losses/weighted_sparse_categorical_crossentropy.py b/official/nlp/modeling/losses/weighted_sparse_categorical_crossentropy.py index b88d8e366..cd532f858 100644 --- a/official/nlp/modeling/losses/weighted_sparse_categorical_crossentropy.py +++ b/official/nlp/modeling/losses/weighted_sparse_categorical_crossentropy.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== -"""Sparse categorical cross-entropy losses.""" +"""Weighted sparse categorical cross-entropy losses.""" from __future__ import absolute_import from __future__ import division @@ -43,37 +43,7 @@ def _validate_rank(labels, predictions, weights): "predictions.shape was %s.") % (labels.shape, predictions.shape)) -def per_example_loss(labels, predictions, weights=None): - """Calculate a per-example sparse categorical crossentropy loss. - - This loss function assumes that the predictions are post-softmax. - Args: - labels: The labels to evaluate against. Should be a set of integer indices - ranging from 0 to (vocab_size-1). - predictions: The network predictions. Should have softmax already applied. - weights: An optional weight array of the same shape as the 'labels' array. - If None, all examples will be used. - - Returns: - A tensor of shape predictions.shape[:-1] containing the per-example - loss. - """ - # When using these functions with the Keras core API, we will need to squeeze - # the labels tensor - Keras adds a spurious inner dimension. - labels, predictions = _adjust_labels(labels, predictions) - _validate_rank(labels, predictions, weights) - - labels_one_hot = tf.one_hot(labels, predictions.shape[-1]) - labels_one_hot = tf.cast(labels_one_hot, predictions.dtype) - per_example_loss_data = -tf.reduce_sum( - predictions * labels_one_hot, axis=[-1]) - if weights is not None: - weights = tf.cast(weights, per_example_loss_data.dtype) - per_example_loss_data = weights * per_example_loss_data - return per_example_loss_data - - -def loss(labels, predictions, weights=None): +def loss(labels, predictions, weights=None, from_logits=False): """Calculate a per-batch sparse categorical crossentropy loss. This loss function assumes that the predictions are post-softmax. @@ -83,6 +53,7 @@ def loss(labels, predictions, weights=None): predictions: The network predictions. Should have softmax already applied. weights: An optional weight array of the same shape as the 'labels' array. If None, all examples will be used. + from_logits: Whether the input predictions are logits. Returns: A loss scalar. @@ -95,12 +66,11 @@ def loss(labels, predictions, weights=None): labels, predictions = _adjust_labels(labels, predictions) _validate_rank(labels, predictions, weights) - per_example_loss_data = per_example_loss(labels, predictions, weights) + example_losses = tf.keras.losses.sparse_categorical_crossentropy( + labels, predictions, from_logits=from_logits) if weights is None: - return tf.reduce_mean(per_example_loss_data) - else: - numerator = tf.reduce_sum(per_example_loss_data) - weights = tf.cast(weights, predictions.dtype) - denominator = tf.reduce_sum(weights) + 1e-5 - return numerator / denominator + return tf.reduce_mean(example_losses) + weights = tf.cast(weights, predictions.dtype) + return tf.math.divide_no_nan( + tf.reduce_sum(example_losses * weights), tf.reduce_sum(weights)) diff --git a/official/nlp/modeling/losses/weighted_sparse_categorical_crossentropy_test.py b/official/nlp/modeling/losses/weighted_sparse_categorical_crossentropy_test.py index 2fec2a318..b6e1a01d7 100644 --- a/official/nlp/modeling/losses/weighted_sparse_categorical_crossentropy_test.py +++ b/official/nlp/modeling/losses/weighted_sparse_categorical_crossentropy_test.py @@ -53,8 +53,7 @@ class ClassificationLossTest(keras_parameterized.TestCase): # Create a maskedLM from the transformer stack. test_layer = layers.MaskedLM( - embedding_table=xformer_stack.get_embedding_table(), - output=output) + embedding_table=xformer_stack.get_embedding_table(), output=output) # Create a model from the masked LM layer. lm_input_tensor = tf.keras.Input(shape=(sequence_length, hidden_size)) @@ -63,123 +62,6 @@ class ClassificationLossTest(keras_parameterized.TestCase): output = test_layer(lm_input_tensor, masked_positions=masked_lm_positions) return tf.keras.Model([lm_input_tensor, masked_lm_positions], output) - def create_classification_model(self, input_width, num_classes): - test_object = networks.Classification( - input_width=input_width, num_classes=num_classes) - # Create a 2-dimensional input (the first dimension is implicit). - pooled_data = tf.keras.Input(shape=(input_width,), dtype=tf.float32) - output = test_object(pooled_data) - return tf.keras.Model(pooled_data, output) - - def test_per_example_loss_3d_input(self): - """Test per-example loss with a 3-dimensional input, from a masked LM.""" - vocab_size = 100 - sequence_length = 32 - hidden_size = 64 - num_predictions = 21 - model = self.create_lm_model( - vocab_size=vocab_size, - sequence_length=sequence_length, - hidden_size=hidden_size, - num_predictions=num_predictions) - - # Get the output of the masked LM. - batch_size = 3 - lm_input_data = 10 * np.random.random_sample( - (batch_size, sequence_length, hidden_size)) - masked_position_data = np.random.randint( - 2, size=(batch_size, num_predictions)) - output_data = model.predict([lm_input_data, masked_position_data]) - - # Calculate per-example loss. - labels = np.random.randint(vocab_size, size=(batch_size, num_predictions)) - per_example_loss_data = weighted_sparse_categorical_crossentropy.per_example_loss( - predictions=output_data, labels=labels) - - # Per-example loss data should have one value per prediction, and those - # values shouldn't be zero in this case (as we're using random data). - expected_shape = [batch_size, num_predictions] - self.assertEqual(expected_shape, per_example_loss_data.shape.as_list()) - self.assertNotAllClose( - tf.zeros_like(per_example_loss_data), per_example_loss_data) - - def test_per_example_loss_2d_input(self): - """Test per-example loss with a 2-d input, from a classifier.""" - input_width = 512 - num_classes = 10 - model = self.create_classification_model(input_width, num_classes) - - # Invoke the network as part of a Model. - batch_size = 3 - input_data = 10 * np.random.random_sample((batch_size, input_width)) - output_data = model.predict(input_data) - - # Calculate per example loss. - labels = np.random.randint(num_classes, size=(batch_size)) - per_example_loss_data = weighted_sparse_categorical_crossentropy.per_example_loss( - predictions=output_data, labels=labels) - - # Per-example loss data should have one value per batch item, and those - # values shouldn't be zero in this case (as we're using random data). - self.assertEqual([batch_size], per_example_loss_data.shape.as_list()) - self.assertNotAllClose( - tf.zeros_like(per_example_loss_data), per_example_loss_data) - - def test_per_example_loss_weights_3d_input(self): - """Test weighted per-example loss with a 3-d input, from a masked LM.""" - vocab_size = 100 - sequence_length = 32 - hidden_size = 64 - num_predictions = 21 - model = self.create_lm_model( - vocab_size=vocab_size, - sequence_length=sequence_length, - hidden_size=hidden_size, - num_predictions=num_predictions) - - # Get the output of the masked LM. - batch_size = 3 - lm_input_data = 10 * np.random.random_sample( - (batch_size, sequence_length, hidden_size)) - masked_position_data = np.random.randint( - 2, size=(batch_size, num_predictions)) - output_data = model.predict([lm_input_data, masked_position_data]) - - # Calculate per-example loss with weights. - labels = np.random.randint(vocab_size, size=(batch_size, num_predictions)) - weights = np.random.randint(2, size=(batch_size, num_predictions)) - - per_example_loss_data = weighted_sparse_categorical_crossentropy.per_example_loss( - predictions=output_data, labels=labels, weights=weights) - - # Weighted per-example loss data should be equivalent to multiplying the - # loss tensor by the weights tensor. - expected_weighted_loss = per_example_loss_data * weights - self.assertAllClose(expected_weighted_loss, per_example_loss_data) - - def test_per_example_loss_weights_2d_input(self): - """Test weighted per-example loss with a 2-d input, from a classifier.""" - input_width = 512 - num_classes = 10 - model = self.create_classification_model(input_width, num_classes) - - # Invoke the network as part of a Model. - batch_size = 3 - input_data = 10 * np.random.random_sample((batch_size, input_width)) - output_data = model.predict(input_data) - - # Calculate per-example loss with weights. - labels = np.random.randint(num_classes, size=(batch_size)) - weights = np.random.randint(2, size=(batch_size)) - - per_example_loss_data = weighted_sparse_categorical_crossentropy.per_example_loss( - predictions=output_data, labels=labels, weights=weights) - - # Weighted per-example loss data should be equivalent to multiplying the - # loss tensor by the weights tensor. - expected_weighted_loss = per_example_loss_data * weights - self.assertAllClose(expected_weighted_loss, per_example_loss_data) - def test_loss_3d_input(self): """Test overall loss with a 3-dimensional input, from a masked LM.""" vocab_size = 100 @@ -213,26 +95,6 @@ class ClassificationLossTest(keras_parameterized.TestCase): self.assertNotAllClose( tf.zeros_like(per_example_loss_data), per_example_loss_data) - def test_loss_2d_input(self): - """Test overall loss with a 2-d input, from a classifier.""" - input_width = 512 - num_classes = 10 - model = self.create_classification_model(input_width, num_classes) - - # Invoke the network as part of a Model. - batch_size = 3 - input_data = 10 * np.random.random_sample((batch_size, input_width)) - output_data = model.predict(input_data) - - # Calculate per example loss. - labels = np.random.randint(num_classes, size=(batch_size)) - loss_data = weighted_sparse_categorical_crossentropy.loss( - predictions=output_data, labels=labels) - - # Loss data should have one value only, and that value shouldn't be zero in - # this case (as we're using random data). - self.assertNotAllClose(0, loss_data) - def test_loss_weights_3d_input(self): """Test masked loss with a 3-dimensional input, from a masked LM.""" vocab_size = 100 @@ -262,26 +124,6 @@ class ClassificationLossTest(keras_parameterized.TestCase): # Because the tensor is fully masked, the loss should be 0. self.assertAllClose(0, weighted_loss_data) - def test_loss_weights_2d_input(self): - """Test masked loss with a 2-d input, from a classifier.""" - input_width = 512 - num_classes = 10 - model = self.create_classification_model(input_width, num_classes) - - # Invoke the network as part of a Model. - batch_size = 3 - input_data = 10 * np.random.random_sample((batch_size, input_width)) - output_data = model.predict(input_data) - - # Calculate a fully masked weight tensor. This should give a loss of zero. - labels = np.random.randint(num_classes, size=(batch_size)) - null_weights = np.zeros((batch_size)) - weighted_loss_data = weighted_sparse_categorical_crossentropy.loss( - predictions=output_data, labels=labels, weights=null_weights) - - # Because the tensor is fully masked, the loss should be 0. - self.assertAllClose(0, weighted_loss_data) - def test_mismatched_predictions_and_labels_ranks_squeezes(self): """Test that the loss asserts when rank(predictions)-1 != rank(labels).""" batch_size = 3 @@ -289,7 +131,7 @@ class ClassificationLossTest(keras_parameterized.TestCase): labels = np.random.randint(10, size=(batch_size, 1)) # All that this test tests is that the squeeze is successful. - _ = weighted_sparse_categorical_crossentropy.per_example_loss( + _ = weighted_sparse_categorical_crossentropy.loss( predictions=output_data, labels=labels) def test_mismatched_weights_and_labels_ranks_fail(self): @@ -299,9 +141,6 @@ class ClassificationLossTest(keras_parameterized.TestCase): labels = np.random.randint(10, size=(batch_size, 10)) weights = np.random.randint(2, size=(batch_size)) - with self.assertRaisesRegex(RuntimeError, ".*of the same rank.*"): - _ = weighted_sparse_categorical_crossentropy.per_example_loss( - predictions=output_data, labels=labels, weights=weights) with self.assertRaisesRegex(RuntimeError, ".*of the same rank.*"): _ = weighted_sparse_categorical_crossentropy.loss( predictions=output_data, labels=labels, weights=weights) @@ -317,8 +156,6 @@ class ClassificationLossTest(keras_parameterized.TestCase): # We're not trying to validate numerical correctness, just ensure that # we can in fact pass tensors to these functions without causing runtime # errors from the shape checking code. - _ = weighted_sparse_categorical_crossentropy.per_example_loss( - predictions=output_data, labels=labels, weights=weights) _ = weighted_sparse_categorical_crossentropy.loss( predictions=output_data, labels=labels, weights=weights) @@ -338,20 +175,15 @@ class ClassificationLossTest(keras_parameterized.TestCase): [-2.7760355, -1.8219438, -3.0924666, -1.0779881, -0.9407509]]]) labels = np.array([[4, 0], [2, 2], [2, 1]]) - # Validate that per_example loss calculations are the same. - per_example_loss_data = weighted_sparse_categorical_crossentropy.per_example_loss( - predictions=output_data, labels=labels) - expected_per_example_loss_data = [[1.2923571, 2.7117882], - [2.287932, 2.287932], - [3.0924666, 1.8219438]] - self.assertAllClose(expected_per_example_loss_data, per_example_loss_data) - # Validate that overall loss calculations are the same. weights = np.array([[1, 0], [0, 0], [0, 0]]) loss_data = weighted_sparse_categorical_crossentropy.loss( - predictions=output_data, labels=labels, weights=weights) + predictions=output_data, + labels=labels, + weights=weights, + from_logits=True) expected_loss_data = 1.2923441 - self.assertAllClose(expected_loss_data, loss_data) + self.assertAllClose(expected_loss_data, loss_data, rtol=1e-3) def test_legacy_classification_loss_compatibility(self): """Test to validate computational correctness during refactors.""" @@ -362,19 +194,15 @@ class ClassificationLossTest(keras_parameterized.TestCase): [-1.6975292e-03, -6.4009643e+00, -1.0226612e+01]]) labels = np.array([2, 1]) - # Validate that per_example loss calculations are the same. - per_example_loss_data = weighted_sparse_categorical_crossentropy.per_example_loss( - predictions=output_data, labels=labels) - expected_per_example_loss_data = [6.4434357, 6.4009643] - self.assertAllClose(expected_per_example_loss_data, per_example_loss_data) - # Validate that overall loss calculations are the same. weights = None loss_data = weighted_sparse_categorical_crossentropy.loss( - predictions=output_data, labels=labels, weights=weights) + predictions=output_data, + labels=labels, + weights=weights, + from_logits=True) expected_loss_data = 6.4222 - self.assertAllClose(expected_loss_data, loss_data) - + self.assertAllClose(expected_loss_data, loss_data, rtol=1e-3) if __name__ == "__main__": tf.test.main() diff --git a/official/nlp/tasks/masked_lm.py b/official/nlp/tasks/masked_lm.py index ac1aac568..70e55ab5e 100644 --- a/official/nlp/tasks/masked_lm.py +++ b/official/nlp/tasks/masked_lm.py @@ -21,7 +21,6 @@ from official.core import base_task from official.modeling.hyperparams import config_definitions as cfg from official.nlp.configs import bert from official.nlp.data import pretrain_dataloader -from official.nlp.modeling import losses as loss_lib @dataclasses.dataclass @@ -61,9 +60,10 @@ class MaskedLMTask(base_task.Task): sentence_labels = labels['next_sentence_labels'] sentence_outputs = tf.cast( model_outputs['next_sentence'], dtype=tf.float32) - sentence_loss = loss_lib.weighted_sparse_categorical_crossentropy_loss( - labels=sentence_labels, - predictions=tf.nn.log_softmax(sentence_outputs, axis=-1)) + sentence_loss = tf.keras.losses.sparse_categorical_crossentropy( + sentence_labels, + sentence_outputs, + from_logits=True) metrics['next_sentence_loss'].update_state(sentence_loss) total_loss = mlm_loss + sentence_loss else: diff --git a/official/nlp/tasks/sentence_prediction.py b/official/nlp/tasks/sentence_prediction.py index b7c45dfe6..868083af1 100644 --- a/official/nlp/tasks/sentence_prediction.py +++ b/official/nlp/tasks/sentence_prediction.py @@ -26,7 +26,6 @@ from official.core import base_task from official.modeling.hyperparams import config_definitions as cfg from official.nlp.configs import bert from official.nlp.data import sentence_prediction_dataloader -from official.nlp.modeling import losses as loss_lib from official.nlp.tasks import utils @@ -75,10 +74,10 @@ class SentencePredictionTask(base_task.Task): return bert.instantiate_bertpretrainer_from_cfg(self.task_config.model) def build_losses(self, labels, model_outputs, aux_losses=None) -> tf.Tensor: - loss = loss_lib.weighted_sparse_categorical_crossentropy_loss( - labels=labels, - predictions=tf.nn.log_softmax( - tf.cast(model_outputs['sentence_prediction'], tf.float32), axis=-1)) + loss = tf.keras.losses.sparse_categorical_crossentropy( + labels, + tf.cast(model_outputs['sentence_prediction'], tf.float32), + from_logits=True) if aux_losses: loss += tf.add_n(aux_losses) @@ -94,7 +93,7 @@ class SentencePredictionTask(base_task.Task): input_word_ids=dummy_ids, input_mask=dummy_ids, input_type_ids=dummy_ids) - y = tf.ones((1, 1), dtype=tf.int32) + y = tf.zeros((1, 1), dtype=tf.int32) return (x, y) dataset = tf.data.Dataset.range(1) -- GitLab From 0f0c7745d169abd3d9c37e59a7a4ed2b1db14d75 Mon Sep 17 00:00:00 2001 From: vivek rathod Date: Fri, 26 Jun 2020 09:55:10 -0700 Subject: [PATCH 65/79] Merged commit includes the following changes: (#8739) 318417714 by jonathanhuang: Internal change. -- 318367213 by sbeery: Pointing users to more documentation for beam -- 318358685 by sbeery: Context R-CNN sample config for GPU -- 318309800 by rathodv: Internal -- 318303364 by ronnyvotel: Adding the option for parsing and including DensePose annotations. http://densepose.org/ -- 318291319 by aom: Adds conv_bn_act conv_block option, and naming convention changes for BiFPN utils. -- 318200598 by ronnyvotel: Updating the TF Example Decoder to parse DensePose annotations. -- 318174065 by jonathanhuang: Internal change. -- 318167805 by rathodv: Add use_tpu flag to TF2 binary. -- 318145285 by aom: Adds option for convolutional keras box predictor to force use_bias. -- PiperOrigin-RevId: 318417714 --- research/object_detection/core/model.py | 37 +- research/object_detection/core/model_test.py | 3 + .../object_detection/core/standard_fields.py | 8 + .../data_decoders/tf_example_decoder.py | 132 ++- .../data_decoders/tf_example_decoder_test.py | 97 +- .../generate_detection_data_tf1_test.py | 3 + .../generate_embedding_data_tf1_test.py | 3 + .../dataset_tools/create_coco_tf_record.py | 179 +++- .../create_coco_tf_record_test.py | 146 ++- .../export_tflite_ssd_graph_lib_tf1_test.py | 3 + .../object_detection/exporter_lib_tf2_test.py | 3 + .../object_detection/exporter_tf1_test.py | 3 + .../object_detection/g3doc/context_rcnn.md | 10 +- .../legacy/trainer_tf1_test.py | 3 + .../center_net_meta_arch.py | 35 +- .../center_net_meta_arch_tf2_test.py | 8 +- .../faster_rcnn_meta_arch.py | 62 +- .../meta_architectures/ssd_meta_arch.py | 92 +- .../object_detection/model_lib_tf2_test.py | 7 +- research/object_detection/model_lib_v2.py | 74 +- research/object_detection/model_main_tf2.py | 12 +- ...ption_resnet_v2_keras_feature_extractor.py | 970 +----------------- ...net_v2_keras_feature_extractor_tf2_test.py | 2 +- ...ter_rcnn_resnet_keras_feature_extractor.py | 17 - ...sd_mobilenet_v1_keras_feature_extractor.py | 11 - ...obilenet_v2_fpn_keras_feature_extractor.py | 11 - ...sd_mobilenet_v2_keras_feature_extractor.py | 11 - ...d_resnet_v1_fpn_keras_feature_extractor.py | 11 - .../convolutional_keras_box_predictor.py | 10 +- .../protos/input_reader.proto | 6 +- research/object_detection/protos/train.proto | 3 +- ...t_rcnn_resnet101_snapshot_serengeti.config | 164 +++ .../object_detection/utils/bifpn_utils.py | 30 +- .../object_detection/utils/config_util.py | 1 + 34 files changed, 967 insertions(+), 1200 deletions(-) create mode 100644 research/object_detection/samples/configs/context_rcnn_resnet101_snapshot_serengeti.config diff --git a/research/object_detection/core/model.py b/research/object_detection/core/model.py index 0430b37b5..437ed08e1 100644 --- a/research/object_detection/core/model.py +++ b/research/object_detection/core/model.py @@ -391,7 +391,9 @@ class DetectionModel(six.with_metaclass(abc.ABCMeta, _BaseClass)): pass @abc.abstractmethod - def restore_map(self, fine_tune_checkpoint_type='detection'): + def restore_map(self, + fine_tune_checkpoint_type='detection', + load_all_detection_checkpoint_vars=False): """Returns a map of variables to load from a foreign checkpoint. Returns a map of variable names to load from a checkpoint to variables in @@ -407,6 +409,9 @@ class DetectionModel(six.with_metaclass(abc.ABCMeta, _BaseClass)): checkpoint (with compatible variable names) or to restore from a classification checkpoint for initialization prior to training. Valid values: `detection`, `classification`. Default 'detection'. + load_all_detection_checkpoint_vars: whether to load all variables (when + `fine_tune_checkpoint_type` is `detection`). If False, only variables + within the feature extractor scope are included. Default False. Returns: A dict mapping variable names (to load from a checkpoint) to variables in @@ -414,6 +419,36 @@ class DetectionModel(six.with_metaclass(abc.ABCMeta, _BaseClass)): """ pass + @abc.abstractmethod + def restore_from_objects(self, fine_tune_checkpoint_type='detection'): + """Returns a map of variables to load from a foreign checkpoint. + + Returns a dictionary of Tensorflow 2 Trackable objects (e.g. tf.Module + or Checkpoint). This enables the model to initialize based on weights from + another task. For example, the feature extractor variables from a + classification model can be used to bootstrap training of an object + detector. When loading from an object detection model, the checkpoint model + should have the same parameters as this detection model with exception of + the num_classes parameter. + + Note that this function is intended to be used to restore Keras-based + models when running Tensorflow 2, whereas restore_map (above) is intended + to be used to restore Slim-based models when running Tensorflow 1.x. + + TODO(jonathanhuang,rathodv): Check tf_version and raise unimplemented + error for both restore_map and restore_from_objects depending on version. + + Args: + fine_tune_checkpoint_type: whether to restore from a full detection + checkpoint (with compatible variable names) or to restore from a + classification checkpoint for initialization prior to training. + Valid values: `detection`, `classification`. Default 'detection'. + + Returns: + A dict mapping keys to Trackable objects (tf.Module or Checkpoint). + """ + pass + @abc.abstractmethod def updates(self): """Returns a list of update operators for this model. diff --git a/research/object_detection/core/model_test.py b/research/object_detection/core/model_test.py index 2bb1ab343..fcc36c03d 100644 --- a/research/object_detection/core/model_test.py +++ b/research/object_detection/core/model_test.py @@ -57,6 +57,9 @@ class FakeModel(model.DetectionModel): def restore_map(self): return {} + def restore_from_objects(self, fine_tune_checkpoint_type): + pass + def regularization_losses(self): return [] diff --git a/research/object_detection/core/standard_fields.py b/research/object_detection/core/standard_fields.py index df995b4a4..fcfb97ae8 100644 --- a/research/object_detection/core/standard_fields.py +++ b/research/object_detection/core/standard_fields.py @@ -66,6 +66,11 @@ class InputDataFields(object): groundtruth_keypoint_weights: groundtruth weight factor for keypoints. groundtruth_label_weights: groundtruth label weights. groundtruth_weights: groundtruth weight factor for bounding boxes. + groundtruth_dp_num_points: The number of DensePose sampled points for each + instance. + groundtruth_dp_part_ids: Part indices for DensePose points. + groundtruth_dp_surface_coords: Image locations and UV coordinates for + DensePose points. num_groundtruth_boxes: number of groundtruth boxes. is_annotated: whether an image has been labeled or not. true_image_shapes: true shapes of images in the resized images, as resized @@ -108,6 +113,9 @@ class InputDataFields(object): groundtruth_keypoint_weights = 'groundtruth_keypoint_weights' groundtruth_label_weights = 'groundtruth_label_weights' groundtruth_weights = 'groundtruth_weights' + groundtruth_dp_num_points = 'groundtruth_dp_num_points' + groundtruth_dp_part_ids = 'groundtruth_dp_part_ids' + groundtruth_dp_surface_coords = 'groundtruth_dp_surface_coords' num_groundtruth_boxes = 'num_groundtruth_boxes' is_annotated = 'is_annotated' true_image_shape = 'true_image_shape' diff --git a/research/object_detection/data_decoders/tf_example_decoder.py b/research/object_detection/data_decoders/tf_example_decoder.py index bd1fa2c77..04cc4db59 100644 --- a/research/object_detection/data_decoders/tf_example_decoder.py +++ b/research/object_detection/data_decoders/tf_example_decoder.py @@ -30,6 +30,7 @@ from object_detection.core import data_decoder from object_detection.core import standard_fields as fields from object_detection.protos import input_reader_pb2 from object_detection.utils import label_map_util +from object_detection.utils import shape_utils # pylint: disable=g-import-not-at-top try: @@ -170,7 +171,8 @@ class TfExampleDecoder(data_decoder.DataDecoder): num_additional_channels=0, load_multiclass_scores=False, load_context_features=False, - expand_hierarchy_labels=False): + expand_hierarchy_labels=False, + load_dense_pose=False): """Constructor sets keys_to_features and items_to_handlers. Args: @@ -201,6 +203,7 @@ class TfExampleDecoder(data_decoder.DataDecoder): account the provided hierarchy in the label_map_proto_file. For positive classes, the labels are extended to ancestor. For negative classes, the labels are expanded to descendants. + load_dense_pose: Whether to load DensePose annotations. Raises: ValueError: If `instance_mask_type` option is not one of @@ -371,6 +374,34 @@ class TfExampleDecoder(data_decoder.DataDecoder): self._decode_png_instance_masks)) else: raise ValueError('Did not recognize the `instance_mask_type` option.') + if load_dense_pose: + self.keys_to_features['image/object/densepose/num'] = ( + tf.VarLenFeature(tf.int64)) + self.keys_to_features['image/object/densepose/part_index'] = ( + tf.VarLenFeature(tf.int64)) + self.keys_to_features['image/object/densepose/x'] = ( + tf.VarLenFeature(tf.float32)) + self.keys_to_features['image/object/densepose/y'] = ( + tf.VarLenFeature(tf.float32)) + self.keys_to_features['image/object/densepose/u'] = ( + tf.VarLenFeature(tf.float32)) + self.keys_to_features['image/object/densepose/v'] = ( + tf.VarLenFeature(tf.float32)) + self.items_to_handlers[ + fields.InputDataFields.groundtruth_dp_num_points] = ( + slim_example_decoder.Tensor('image/object/densepose/num')) + self.items_to_handlers[fields.InputDataFields.groundtruth_dp_part_ids] = ( + slim_example_decoder.ItemHandlerCallback( + ['image/object/densepose/part_index', + 'image/object/densepose/num'], self._dense_pose_part_indices)) + self.items_to_handlers[ + fields.InputDataFields.groundtruth_dp_surface_coords] = ( + slim_example_decoder.ItemHandlerCallback( + ['image/object/densepose/x', 'image/object/densepose/y', + 'image/object/densepose/u', 'image/object/densepose/v', + 'image/object/densepose/num'], + self._dense_pose_surface_coordinates)) + if label_map_proto_file: # If the label_map_proto is provided, try to use it in conjunction with # the class text, and fall back to a materialized ID. @@ -547,6 +578,14 @@ class TfExampleDecoder(data_decoder.DataDecoder): group_of = fields.InputDataFields.groundtruth_group_of tensor_dict[group_of] = tf.cast(tensor_dict[group_of], dtype=tf.bool) + if fields.InputDataFields.groundtruth_dp_num_points in tensor_dict: + tensor_dict[fields.InputDataFields.groundtruth_dp_num_points] = tf.cast( + tensor_dict[fields.InputDataFields.groundtruth_dp_num_points], + dtype=tf.int32) + tensor_dict[fields.InputDataFields.groundtruth_dp_part_ids] = tf.cast( + tensor_dict[fields.InputDataFields.groundtruth_dp_part_ids], + dtype=tf.int32) + return tensor_dict def _reshape_keypoints(self, keys_to_tensors): @@ -697,6 +736,97 @@ class TfExampleDecoder(data_decoder.DataDecoder): lambda: tf.map_fn(decode_png_mask, png_masks, dtype=tf.float32), lambda: tf.zeros(tf.cast(tf.stack([0, height, width]), dtype=tf.int32))) + def _dense_pose_part_indices(self, keys_to_tensors): + """Creates a tensor that contains part indices for each DensePose point. + + Args: + keys_to_tensors: a dictionary from keys to tensors. + + Returns: + A 2-D int32 tensor of shape [num_instances, num_points] where each element + contains the DensePose part index (0-23). The value `num_points` + corresponds to the maximum number of sampled points across all instances + in the image. Note that instances with less sampled points will be padded + with zeros in the last dimension. + """ + num_points_per_instances = keys_to_tensors['image/object/densepose/num'] + part_index = keys_to_tensors['image/object/densepose/part_index'] + if isinstance(num_points_per_instances, tf.SparseTensor): + num_points_per_instances = tf.sparse_tensor_to_dense( + num_points_per_instances) + if isinstance(part_index, tf.SparseTensor): + part_index = tf.sparse_tensor_to_dense(part_index) + part_index = tf.cast(part_index, dtype=tf.int32) + max_points_per_instance = tf.cast( + tf.math.reduce_max(num_points_per_instances), dtype=tf.int32) + num_points_cumulative = tf.concat([ + [0], tf.math.cumsum(num_points_per_instances)], axis=0) + + def pad_parts_tensor(instance_ind): + points_range_start = num_points_cumulative[instance_ind] + points_range_end = num_points_cumulative[instance_ind + 1] + part_inds = part_index[points_range_start:points_range_end] + return shape_utils.pad_or_clip_nd(part_inds, + output_shape=[max_points_per_instance]) + + return tf.map_fn(pad_parts_tensor, + tf.range(tf.size(num_points_per_instances)), + dtype=tf.int32) + + def _dense_pose_surface_coordinates(self, keys_to_tensors): + """Creates a tensor that contains surface coords for each DensePose point. + + Args: + keys_to_tensors: a dictionary from keys to tensors. + + Returns: + A 3-D float32 tensor of shape [num_instances, num_points, 4] where each + point contains (y, x, v, u) data for each sampled DensePose point. The + (y, x) coordinate has normalized image locations for the point, and (v, u) + contains the surface coordinate (also normalized) for the part. The value + `num_points` corresponds to the maximum number of sampled points across + all instances in the image. Note that instances with less sampled points + will be padded with zeros in dim=1. + """ + num_points_per_instances = keys_to_tensors['image/object/densepose/num'] + dp_y = keys_to_tensors['image/object/densepose/y'] + dp_x = keys_to_tensors['image/object/densepose/x'] + dp_v = keys_to_tensors['image/object/densepose/v'] + dp_u = keys_to_tensors['image/object/densepose/u'] + if isinstance(num_points_per_instances, tf.SparseTensor): + num_points_per_instances = tf.sparse_tensor_to_dense( + num_points_per_instances) + if isinstance(dp_y, tf.SparseTensor): + dp_y = tf.sparse_tensor_to_dense(dp_y) + if isinstance(dp_x, tf.SparseTensor): + dp_x = tf.sparse_tensor_to_dense(dp_x) + if isinstance(dp_v, tf.SparseTensor): + dp_v = tf.sparse_tensor_to_dense(dp_v) + if isinstance(dp_u, tf.SparseTensor): + dp_u = tf.sparse_tensor_to_dense(dp_u) + max_points_per_instance = tf.cast( + tf.math.reduce_max(num_points_per_instances), dtype=tf.int32) + num_points_cumulative = tf.concat([ + [0], tf.math.cumsum(num_points_per_instances)], axis=0) + + def pad_surface_coordinates_tensor(instance_ind): + """Pads DensePose surface coordinates for each instance.""" + points_range_start = num_points_cumulative[instance_ind] + points_range_end = num_points_cumulative[instance_ind + 1] + y = dp_y[points_range_start:points_range_end] + x = dp_x[points_range_start:points_range_end] + v = dp_v[points_range_start:points_range_end] + u = dp_u[points_range_start:points_range_end] + # Create [num_points_i, 4] tensor, where num_points_i is the number of + # sampled points for instance i. + unpadded_tensor = tf.stack([y, x, v, u], axis=1) + return shape_utils.pad_or_clip_nd( + unpadded_tensor, output_shape=[max_points_per_instance, 4]) + + return tf.map_fn(pad_surface_coordinates_tensor, + tf.range(tf.size(num_points_per_instances)), + dtype=tf.float32) + def _expand_image_label_hierarchy(self, image_classes, image_confidences): """Expand image level labels according to the hierarchy. diff --git a/research/object_detection/data_decoders/tf_example_decoder_test.py b/research/object_detection/data_decoders/tf_example_decoder_test.py index 9cbed32fc..81ed9258e 100644 --- a/research/object_detection/data_decoders/tf_example_decoder_test.py +++ b/research/object_detection/data_decoders/tf_example_decoder_test.py @@ -1096,8 +1096,8 @@ class TfExampleDecoderTest(test_case.TestCase): return example_decoder.decode(tf.convert_to_tensor(example)) tensor_dict = self.execute_cpu(graph_fn, []) - self.assertTrue( - fields.InputDataFields.groundtruth_instance_masks not in tensor_dict) + self.assertNotIn(fields.InputDataFields.groundtruth_instance_masks, + tensor_dict) def testDecodeImageLabels(self): image_tensor = np.random.randint(256, size=(4, 5, 3)).astype(np.uint8) @@ -1116,8 +1116,7 @@ class TfExampleDecoderTest(test_case.TestCase): return example_decoder.decode(tf.convert_to_tensor(example)) tensor_dict = self.execute_cpu(graph_fn_1, []) - self.assertTrue( - fields.InputDataFields.groundtruth_image_classes in tensor_dict) + self.assertIn(fields.InputDataFields.groundtruth_image_classes, tensor_dict) self.assertAllEqual( tensor_dict[fields.InputDataFields.groundtruth_image_classes], np.array([1, 2])) @@ -1152,8 +1151,7 @@ class TfExampleDecoderTest(test_case.TestCase): return example_decoder.decode(tf.convert_to_tensor(example)) tensor_dict = self.execute_cpu(graph_fn_2, []) - self.assertTrue( - fields.InputDataFields.groundtruth_image_classes in tensor_dict) + self.assertIn(fields.InputDataFields.groundtruth_image_classes, tensor_dict) self.assertAllEqual( tensor_dict[fields.InputDataFields.groundtruth_image_classes], np.array([1, 3])) @@ -1345,6 +1343,93 @@ class TfExampleDecoderTest(test_case.TestCase): expected_image_confidence, tensor_dict[fields.InputDataFields.groundtruth_image_confidences]) + def testDecodeDensePose(self): + image_tensor = np.random.randint(256, size=(4, 5, 3)).astype(np.uint8) + encoded_jpeg, _ = self._create_encoded_and_decoded_data( + image_tensor, 'jpeg') + bbox_ymins = [0.0, 4.0, 2.0] + bbox_xmins = [1.0, 5.0, 8.0] + bbox_ymaxs = [2.0, 6.0, 1.0] + bbox_xmaxs = [3.0, 7.0, 3.3] + densepose_num = [0, 4, 2] + densepose_part_index = [2, 2, 3, 4, 2, 9] + densepose_x = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6] + densepose_y = [0.9, 0.8, 0.7, 0.6, 0.5, 0.4] + densepose_u = [0.01, 0.02, 0.03, 0.04, 0.05, 0.06] + densepose_v = [0.99, 0.98, 0.97, 0.96, 0.95, 0.94] + + def graph_fn(): + example = tf.train.Example( + features=tf.train.Features( + feature={ + 'image/encoded': + dataset_util.bytes_feature(encoded_jpeg), + 'image/format': + dataset_util.bytes_feature(six.b('jpeg')), + 'image/object/bbox/ymin': + dataset_util.float_list_feature(bbox_ymins), + 'image/object/bbox/xmin': + dataset_util.float_list_feature(bbox_xmins), + 'image/object/bbox/ymax': + dataset_util.float_list_feature(bbox_ymaxs), + 'image/object/bbox/xmax': + dataset_util.float_list_feature(bbox_xmaxs), + 'image/object/densepose/num': + dataset_util.int64_list_feature(densepose_num), + 'image/object/densepose/part_index': + dataset_util.int64_list_feature(densepose_part_index), + 'image/object/densepose/x': + dataset_util.float_list_feature(densepose_x), + 'image/object/densepose/y': + dataset_util.float_list_feature(densepose_y), + 'image/object/densepose/u': + dataset_util.float_list_feature(densepose_u), + 'image/object/densepose/v': + dataset_util.float_list_feature(densepose_v), + + })).SerializeToString() + + example_decoder = tf_example_decoder.TfExampleDecoder( + load_dense_pose=True) + output = example_decoder.decode(tf.convert_to_tensor(example)) + dp_num_points = output[fields.InputDataFields.groundtruth_dp_num_points] + dp_part_ids = output[fields.InputDataFields.groundtruth_dp_part_ids] + dp_surface_coords = output[ + fields.InputDataFields.groundtruth_dp_surface_coords] + return dp_num_points, dp_part_ids, dp_surface_coords + + dp_num_points, dp_part_ids, dp_surface_coords = self.execute_cpu( + graph_fn, []) + + expected_dp_num_points = [0, 4, 2] + expected_dp_part_ids = [ + [0, 0, 0, 0], + [2, 2, 3, 4], + [2, 9, 0, 0] + ] + expected_dp_surface_coords = np.array( + [ + # Instance 0 (no points). + [[0., 0., 0., 0.], + [0., 0., 0., 0.], + [0., 0., 0., 0.], + [0., 0., 0., 0.]], + # Instance 1 (4 points). + [[0.9, 0.1, 0.99, 0.01], + [0.8, 0.2, 0.98, 0.02], + [0.7, 0.3, 0.97, 0.03], + [0.6, 0.4, 0.96, 0.04]], + # Instance 2 (2 points). + [[0.5, 0.5, 0.95, 0.05], + [0.4, 0.6, 0.94, 0.06], + [0., 0., 0., 0.], + [0., 0., 0., 0.]], + ], dtype=np.float32) + + self.assertAllEqual(dp_num_points, expected_dp_num_points) + self.assertAllEqual(dp_part_ids, expected_dp_part_ids) + self.assertAllClose(dp_surface_coords, expected_dp_surface_coords) + if __name__ == '__main__': tf.test.main() diff --git a/research/object_detection/dataset_tools/context_rcnn/generate_detection_data_tf1_test.py b/research/object_detection/dataset_tools/context_rcnn/generate_detection_data_tf1_test.py index 9002e750f..279183110 100644 --- a/research/object_detection/dataset_tools/context_rcnn/generate_detection_data_tf1_test.py +++ b/research/object_detection/dataset_tools/context_rcnn/generate_detection_data_tf1_test.py @@ -67,6 +67,9 @@ class FakeModel(model.DetectionModel): def restore_map(self, checkpoint_path, fine_tune_checkpoint_type): pass + def restore_from_objects(self, fine_tune_checkpoint_type): + pass + def loss(self, prediction_dict, true_image_shapes): pass diff --git a/research/object_detection/dataset_tools/context_rcnn/generate_embedding_data_tf1_test.py b/research/object_detection/dataset_tools/context_rcnn/generate_embedding_data_tf1_test.py index 064a57e13..6409399eb 100644 --- a/research/object_detection/dataset_tools/context_rcnn/generate_embedding_data_tf1_test.py +++ b/research/object_detection/dataset_tools/context_rcnn/generate_embedding_data_tf1_test.py @@ -73,6 +73,9 @@ class FakeModel(model.DetectionModel): def restore_map(self, checkpoint_path, fine_tune_checkpoint_type): pass + def restore_from_objects(self, fine_tune_checkpoint_type): + pass + def loss(self, prediction_dict, true_image_shapes): pass diff --git a/research/object_detection/dataset_tools/create_coco_tf_record.py b/research/object_detection/dataset_tools/create_coco_tf_record.py index 51ed38910..2703c427e 100644 --- a/research/object_detection/dataset_tools/create_coco_tf_record.py +++ b/research/object_detection/dataset_tools/create_coco_tf_record.py @@ -14,6 +14,9 @@ # ============================================================================== r"""Convert raw COCO dataset to TFRecord for object_detection. +This tool supports data generation for object detection (boxes, masks), +keypoint detection, and DensePose. + Please note that this tool creates sharded output files. Example usage: @@ -63,7 +66,18 @@ tf.flags.DEFINE_string('train_keypoint_annotations_file', '', 'Training annotations JSON file.') tf.flags.DEFINE_string('val_keypoint_annotations_file', '', 'Validation annotations JSON file.') +# DensePose is only available for coco 2014. +tf.flags.DEFINE_string('train_densepose_annotations_file', '', + 'Training annotations JSON file for DensePose.') +tf.flags.DEFINE_string('val_densepose_annotations_file', '', + 'Validation annotations JSON file for DensePose.') tf.flags.DEFINE_string('output_dir', '/tmp/', 'Output data directory.') +# Whether to only produce images/annotations on person class (for keypoint / +# densepose task). +tf.flags.DEFINE_boolean('remove_non_person_annotations', False, 'Whether to ' + 'remove all annotations for non-person objects.') +tf.flags.DEFINE_boolean('remove_non_person_images', False, 'Whether to ' + 'remove all examples that do not contain a person.') FLAGS = flags.FLAGS @@ -77,13 +91,33 @@ _COCO_KEYPOINT_NAMES = [ b'left_knee', b'right_knee', b'left_ankle', b'right_ankle' ] +_COCO_PART_NAMES = [ + b'torso_back', b'torso_front', b'right_hand', b'left_hand', b'left_foot', + b'right_foot', b'right_upper_leg_back', b'left_upper_leg_back', + b'right_upper_leg_front', b'left_upper_leg_front', b'right_lower_leg_back', + b'left_lower_leg_back', b'right_lower_leg_front', b'left_lower_leg_front', + b'left_upper_arm_back', b'right_upper_arm_back', b'left_upper_arm_front', + b'right_upper_arm_front', b'left_lower_arm_back', b'right_lower_arm_back', + b'left_lower_arm_front', b'right_lower_arm_front', b'right_face', + b'left_face', +] + +_DP_PART_ID_OFFSET = 1 + + +def clip_to_unit(x): + return min(max(x, 0.0), 1.0) + def create_tf_example(image, annotations_list, image_dir, category_index, include_masks=False, - keypoint_annotations_dict=None): + keypoint_annotations_dict=None, + densepose_annotations_dict=None, + remove_non_person_annotations=False, + remove_non_person_images=False): """Converts image and annotations to a tf.Example proto. Args: @@ -108,10 +142,23 @@ def create_tf_example(image, dictionary with keys: [u'keypoints', u'num_keypoints'] represeting the keypoint information for this person object annotation. If None, then no keypoint annotations will be populated. + densepose_annotations_dict: A dictionary that maps from annotation_id to a + dictionary with keys: [u'dp_I', u'dp_x', u'dp_y', 'dp_U', 'dp_V'] + representing part surface coordinates. For more information see + http://densepose.org/. + remove_non_person_annotations: Whether to remove any annotations that are + not the "person" class. + remove_non_person_images: Whether to remove any images that do not contain + at least one "person" annotation. Returns: + key: SHA256 hash of the image. example: The converted tf.Example num_annotations_skipped: Number of (invalid) annotations that were ignored. + num_keypoint_annotation_skipped: Number of keypoint annotations that were + skipped. + num_densepose_annotation_skipped: Number of DensePose annotations that were + skipped. Raises: ValueError: if the image pointed to by data['filename'] is not a valid JPEG @@ -146,6 +193,16 @@ def create_tf_example(image, num_annotations_skipped = 0 num_keypoint_annotation_used = 0 num_keypoint_annotation_skipped = 0 + dp_part_index = [] + dp_x = [] + dp_y = [] + dp_u = [] + dp_v = [] + dp_num_points = [] + densepose_keys = ['dp_I', 'dp_U', 'dp_V', 'dp_x', 'dp_y', 'bbox'] + include_densepose = densepose_annotations_dict is not None + num_densepose_annotation_used = 0 + num_densepose_annotation_skipped = 0 for object_annotations in annotations_list: (x, y, width, height) = tuple(object_annotations['bbox']) if width <= 0 or height <= 0: @@ -154,14 +211,18 @@ def create_tf_example(image, if x + width > image_width or y + height > image_height: num_annotations_skipped += 1 continue + category_id = int(object_annotations['category_id']) + category_name = category_index[category_id]['name'].encode('utf8') + if remove_non_person_annotations and category_name != b'person': + num_annotations_skipped += 1 + continue xmin.append(float(x) / image_width) xmax.append(float(x + width) / image_width) ymin.append(float(y) / image_height) ymax.append(float(y + height) / image_height) is_crowd.append(object_annotations['iscrowd']) - category_id = int(object_annotations['category_id']) category_ids.append(category_id) - category_names.append(category_index[category_id]['name'].encode('utf8')) + category_names.append(category_name) area.append(object_annotations['area']) if include_masks: @@ -197,6 +258,40 @@ def create_tf_example(image, keypoints_visibility.extend([0] * len(_COCO_KEYPOINT_NAMES)) keypoints_name.extend(_COCO_KEYPOINT_NAMES) num_keypoints.append(0) + + if include_densepose: + annotation_id = object_annotations['id'] + if (annotation_id in densepose_annotations_dict and + all(key in densepose_annotations_dict[annotation_id] + for key in densepose_keys)): + dp_annotations = densepose_annotations_dict[annotation_id] + num_densepose_annotation_used += 1 + dp_num_points.append(len(dp_annotations['dp_I'])) + dp_part_index.extend([int(i - _DP_PART_ID_OFFSET) + for i in dp_annotations['dp_I']]) + # DensePose surface coordinates are defined on a [256, 256] grid + # relative to each instance box (i.e. absolute coordinates in range + # [0., 256.]). The following converts the coordinates + # so that they are expressed in normalized image coordinates. + dp_x_box_rel = [ + clip_to_unit(val / 256.) for val in dp_annotations['dp_x']] + dp_x_norm = [(float(x) + x_box_rel * width) / image_width + for x_box_rel in dp_x_box_rel] + dp_y_box_rel = [ + clip_to_unit(val / 256.) for val in dp_annotations['dp_y']] + dp_y_norm = [(float(y) + y_box_rel * height) / image_height + for y_box_rel in dp_y_box_rel] + dp_x.extend(dp_x_norm) + dp_y.extend(dp_y_norm) + dp_u.extend(dp_annotations['dp_U']) + dp_v.extend(dp_annotations['dp_V']) + else: + dp_num_points.append(0) + + if (remove_non_person_images and + not any(name == b'person' for name in category_names)): + return (key, None, num_annotations_skipped, + num_keypoint_annotation_skipped, num_densepose_annotation_skipped) feature_dict = { 'image/height': dataset_util.int64_feature(image_height), @@ -243,15 +338,34 @@ def create_tf_example(image, dataset_util.bytes_list_feature(keypoints_name)) num_keypoint_annotation_skipped = ( len(keypoint_annotations_dict) - num_keypoint_annotation_used) + if include_densepose: + feature_dict['image/object/densepose/num'] = ( + dataset_util.int64_list_feature(dp_num_points)) + feature_dict['image/object/densepose/part_index'] = ( + dataset_util.int64_list_feature(dp_part_index)) + feature_dict['image/object/densepose/x'] = ( + dataset_util.float_list_feature(dp_x)) + feature_dict['image/object/densepose/y'] = ( + dataset_util.float_list_feature(dp_y)) + feature_dict['image/object/densepose/u'] = ( + dataset_util.float_list_feature(dp_u)) + feature_dict['image/object/densepose/v'] = ( + dataset_util.float_list_feature(dp_v)) + num_densepose_annotation_skipped = ( + len(densepose_annotations_dict) - num_densepose_annotation_used) example = tf.train.Example(features=tf.train.Features(feature=feature_dict)) - return key, example, num_annotations_skipped, num_keypoint_annotation_skipped + return (key, example, num_annotations_skipped, + num_keypoint_annotation_skipped, num_densepose_annotation_skipped) def _create_tf_record_from_coco_annotations(annotations_file, image_dir, output_path, include_masks, num_shards, - keypoint_annotations_file=''): + keypoint_annotations_file='', + densepose_annotations_file='', + remove_non_person_annotations=False, + remove_non_person_images=False): """Loads COCO annotation json files and converts to tf.Record format. Args: @@ -264,6 +378,12 @@ def _create_tf_record_from_coco_annotations(annotations_file, image_dir, keypoint_annotations_file: JSON file containing the person keypoint annotations. If empty, then no person keypoint annotations will be generated. + densepose_annotations_file: JSON file containing the DensePose annotations. + If empty, then no DensePose annotations will be generated. + remove_non_person_annotations: Whether to remove any annotations that are + not the "person" class. + remove_non_person_images: Whether to remove any images that do not contain + at least one "person" annotation. """ with contextlib2.ExitStack() as tf_record_close_stack, \ tf.gfile.GFile(annotations_file, 'r') as fid: @@ -288,7 +408,8 @@ def _create_tf_record_from_coco_annotations(annotations_file, image_dir, if image_id not in annotations_index: missing_annotation_count += 1 annotations_index[image_id] = [] - logging.info('%d images are missing annotations.', missing_annotation_count) + logging.info('%d images are missing annotations.', + missing_annotation_count) keypoint_annotations_index = {} if keypoint_annotations_file: @@ -301,8 +422,20 @@ def _create_tf_record_from_coco_annotations(annotations_file, image_dir, keypoint_annotations_index[image_id] = {} keypoint_annotations_index[image_id][annotation['id']] = annotation + densepose_annotations_index = {} + if densepose_annotations_file: + with tf.gfile.GFile(densepose_annotations_file, 'r') as fid: + densepose_groundtruth_data = json.load(fid) + if 'annotations' in densepose_groundtruth_data: + for annotation in densepose_groundtruth_data['annotations']: + image_id = annotation['image_id'] + if image_id not in densepose_annotations_index: + densepose_annotations_index[image_id] = {} + densepose_annotations_index[image_id][annotation['id']] = annotation + total_num_annotations_skipped = 0 total_num_keypoint_annotations_skipped = 0 + total_num_densepose_annotations_skipped = 0 for idx, image in enumerate(images): if idx % 100 == 0: logging.info('On image %d of %d', idx, len(images)) @@ -312,19 +445,31 @@ def _create_tf_record_from_coco_annotations(annotations_file, image_dir, keypoint_annotations_dict = {} if image['id'] in keypoint_annotations_index: keypoint_annotations_dict = keypoint_annotations_index[image['id']] - (_, tf_example, num_annotations_skipped, - num_keypoint_annotations_skipped) = create_tf_example( + densepose_annotations_dict = None + if densepose_annotations_file: + densepose_annotations_dict = {} + if image['id'] in densepose_annotations_index: + densepose_annotations_dict = densepose_annotations_index[image['id']] + (_, tf_example, num_annotations_skipped, num_keypoint_annotations_skipped, + num_densepose_annotations_skipped) = create_tf_example( image, annotations_list, image_dir, category_index, include_masks, - keypoint_annotations_dict) + keypoint_annotations_dict, densepose_annotations_dict, + remove_non_person_annotations, remove_non_person_images) total_num_annotations_skipped += num_annotations_skipped total_num_keypoint_annotations_skipped += num_keypoint_annotations_skipped + total_num_densepose_annotations_skipped += ( + num_densepose_annotations_skipped) shard_idx = idx % num_shards - output_tfrecords[shard_idx].write(tf_example.SerializeToString()) + if tf_example: + output_tfrecords[shard_idx].write(tf_example.SerializeToString()) logging.info('Finished writing, skipped %d annotations.', total_num_annotations_skipped) if keypoint_annotations_file: logging.info('Finished writing, skipped %d keypoint annotations.', total_num_keypoint_annotations_skipped) + if densepose_annotations_file: + logging.info('Finished writing, skipped %d DensePose annotations.', + total_num_densepose_annotations_skipped) def main(_): @@ -347,20 +492,26 @@ def main(_): train_output_path, FLAGS.include_masks, num_shards=100, - keypoint_annotations_file=FLAGS.train_keypoint_annotations_file) + keypoint_annotations_file=FLAGS.train_keypoint_annotations_file, + densepose_annotations_file=FLAGS.train_densepose_annotations_file, + remove_non_person_annotations=FLAGS.remove_non_person_annotations, + remove_non_person_images=FLAGS.remove_non_person_images) _create_tf_record_from_coco_annotations( FLAGS.val_annotations_file, FLAGS.val_image_dir, val_output_path, FLAGS.include_masks, - num_shards=100, - keypoint_annotations_file=FLAGS.val_keypoint_annotations_file) + num_shards=50, + keypoint_annotations_file=FLAGS.val_keypoint_annotations_file, + densepose_annotations_file=FLAGS.val_densepose_annotations_file, + remove_non_person_annotations=FLAGS.remove_non_person_annotations, + remove_non_person_images=FLAGS.remove_non_person_images) _create_tf_record_from_coco_annotations( FLAGS.testdev_annotations_file, FLAGS.test_image_dir, testdev_output_path, FLAGS.include_masks, - num_shards=100) + num_shards=50) if __name__ == '__main__': diff --git a/research/object_detection/dataset_tools/create_coco_tf_record_test.py b/research/object_detection/dataset_tools/create_coco_tf_record_test.py index 0bcc8be9c..659142b7b 100644 --- a/research/object_detection/dataset_tools/create_coco_tf_record_test.py +++ b/research/object_detection/dataset_tools/create_coco_tf_record_test.py @@ -89,7 +89,7 @@ class CreateCocoTFRecordTest(tf.test.TestCase): } (_, example, - num_annotations_skipped, _) = create_coco_tf_record.create_tf_example( + num_annotations_skipped, _, _) = create_coco_tf_record.create_tf_example( image, annotations_list, image_dir, category_index) self.assertEqual(num_annotations_skipped, 0) @@ -156,7 +156,7 @@ class CreateCocoTFRecordTest(tf.test.TestCase): } (_, example, - num_annotations_skipped, _) = create_coco_tf_record.create_tf_example( + num_annotations_skipped, _, _) = create_coco_tf_record.create_tf_example( image, annotations_list, image_dir, category_index, include_masks=True) self.assertEqual(num_annotations_skipped, 0) @@ -259,14 +259,14 @@ class CreateCocoTFRecordTest(tf.test.TestCase): } } - (_, example, _, - num_keypoint_annotation_skipped) = create_coco_tf_record.create_tf_example( - image, - annotations_list, - image_dir, - category_index, - include_masks=False, - keypoint_annotations_dict=keypoint_annotations_dict) + _, example, _, num_keypoint_annotation_skipped, _ = ( + create_coco_tf_record.create_tf_example( + image, + annotations_list, + image_dir, + category_index, + include_masks=False, + keypoint_annotations_dict=keypoint_annotations_dict)) self.assertEqual(num_keypoint_annotation_skipped, 0) self._assertProtoEqual( @@ -310,6 +310,132 @@ class CreateCocoTFRecordTest(tf.test.TestCase): example.features.feature[ 'image/object/keypoint/visibility'].int64_list.value, vv) + def test_create_tf_example_with_dense_pose(self): + image_dir = self.get_temp_dir() + image_file_name = 'tmp_image.jpg' + image_data = np.random.randint(low=0, high=256, size=(256, 256, 3)).astype( + np.uint8) + save_path = os.path.join(image_dir, image_file_name) + image = PIL.Image.fromarray(image_data, 'RGB') + image.save(save_path) + + image = { + 'file_name': image_file_name, + 'height': 256, + 'width': 256, + 'id': 11, + } + + min_x, min_y = 64, 64 + max_x, max_y = 128, 128 + keypoints = [] + num_visible_keypoints = 0 + xv = [] + yv = [] + vv = [] + for _ in range(17): + xc = min_x + int(np.random.rand()*(max_x - min_x)) + yc = min_y + int(np.random.rand()*(max_y - min_y)) + vis = np.random.randint(0, 3) + xv.append(xc) + yv.append(yc) + vv.append(vis) + keypoints.extend([xc, yc, vis]) + num_visible_keypoints += (vis > 0) + + annotations_list = [{ + 'area': 0.5, + 'iscrowd': False, + 'image_id': 11, + 'bbox': [64, 64, 128, 128], + 'category_id': 1, + 'id': 1000 + }] + + num_points = 45 + dp_i = np.random.randint(1, 25, (num_points,)).astype(np.float32) + dp_u = np.random.randn(num_points) + dp_v = np.random.randn(num_points) + dp_x = np.random.rand(num_points)*256. + dp_y = np.random.rand(num_points)*256. + densepose_annotations_dict = { + 1000: { + 'dp_I': dp_i, + 'dp_U': dp_u, + 'dp_V': dp_v, + 'dp_x': dp_x, + 'dp_y': dp_y, + 'bbox': [64, 64, 128, 128], + } + } + + category_index = { + 1: { + 'name': 'person', + 'id': 1 + } + } + + _, example, _, _, num_densepose_annotation_skipped = ( + create_coco_tf_record.create_tf_example( + image, + annotations_list, + image_dir, + category_index, + include_masks=False, + densepose_annotations_dict=densepose_annotations_dict)) + + self.assertEqual(num_densepose_annotation_skipped, 0) + self._assertProtoEqual( + example.features.feature['image/height'].int64_list.value, [256]) + self._assertProtoEqual( + example.features.feature['image/width'].int64_list.value, [256]) + self._assertProtoEqual( + example.features.feature['image/filename'].bytes_list.value, + [six.b(image_file_name)]) + self._assertProtoEqual( + example.features.feature['image/source_id'].bytes_list.value, + [six.b(str(image['id']))]) + self._assertProtoEqual( + example.features.feature['image/format'].bytes_list.value, + [six.b('jpeg')]) + self._assertProtoEqual( + example.features.feature['image/object/bbox/xmin'].float_list.value, + [0.25]) + self._assertProtoEqual( + example.features.feature['image/object/bbox/ymin'].float_list.value, + [0.25]) + self._assertProtoEqual( + example.features.feature['image/object/bbox/xmax'].float_list.value, + [0.75]) + self._assertProtoEqual( + example.features.feature['image/object/bbox/ymax'].float_list.value, + [0.75]) + self._assertProtoEqual( + example.features.feature['image/object/class/text'].bytes_list.value, + [six.b('person')]) + self._assertProtoEqual( + example.features.feature['image/object/densepose/num'].int64_list.value, + [num_points]) + self.assertAllEqual( + example.features.feature[ + 'image/object/densepose/part_index'].int64_list.value, + dp_i.astype(np.int64) - create_coco_tf_record._DP_PART_ID_OFFSET) + self.assertAllClose( + example.features.feature['image/object/densepose/u'].float_list.value, + dp_u) + self.assertAllClose( + example.features.feature['image/object/densepose/v'].float_list.value, + dp_v) + expected_dp_x = (64 + dp_x * 128. / 256.) / 256. + expected_dp_y = (64 + dp_y * 128. / 256.) / 256. + self.assertAllClose( + example.features.feature['image/object/densepose/x'].float_list.value, + expected_dp_x) + self.assertAllClose( + example.features.feature['image/object/densepose/y'].float_list.value, + expected_dp_y) + def test_create_sharded_tf_record(self): tmp_dir = self.get_temp_dir() image_paths = ['tmp1_image.jpg', 'tmp2_image.jpg'] diff --git a/research/object_detection/export_tflite_ssd_graph_lib_tf1_test.py b/research/object_detection/export_tflite_ssd_graph_lib_tf1_test.py index 0da7b9aa2..721d2988a 100644 --- a/research/object_detection/export_tflite_ssd_graph_lib_tf1_test.py +++ b/research/object_detection/export_tflite_ssd_graph_lib_tf1_test.py @@ -74,6 +74,9 @@ class FakeModel(model.DetectionModel): def restore_map(self, checkpoint_path, from_detection_checkpoint): pass + def restore_from_objects(self, fine_tune_checkpoint_type): + pass + def loss(self, prediction_dict, true_image_shapes): pass diff --git a/research/object_detection/exporter_lib_tf2_test.py b/research/object_detection/exporter_lib_tf2_test.py index d30d80cb0..99cbf263b 100644 --- a/research/object_detection/exporter_lib_tf2_test.py +++ b/research/object_detection/exporter_lib_tf2_test.py @@ -76,6 +76,9 @@ class FakeModel(model.DetectionModel): def restore_map(self, checkpoint_path, fine_tune_checkpoint_type): pass + def restore_from_objects(self, fine_tune_checkpoint_type): + pass + def loss(self, prediction_dict, true_image_shapes): pass diff --git a/research/object_detection/exporter_tf1_test.py b/research/object_detection/exporter_tf1_test.py index 40bdb966f..b33bafd8d 100644 --- a/research/object_detection/exporter_tf1_test.py +++ b/research/object_detection/exporter_tf1_test.py @@ -105,6 +105,9 @@ class FakeModel(model.DetectionModel): def restore_map(self, checkpoint_path, fine_tune_checkpoint_type): pass + def restore_from_objects(self, fine_tune_checkpoint_type): + pass + def loss(self, prediction_dict, true_image_shapes): pass diff --git a/research/object_detection/g3doc/context_rcnn.md b/research/object_detection/g3doc/context_rcnn.md index a51e4f041..8d132b15b 100644 --- a/research/object_detection/g3doc/context_rcnn.md +++ b/research/object_detection/g3doc/context_rcnn.md @@ -30,9 +30,12 @@ pip install apache-beam ``` and can be run locally, or on a cluster for efficient processing of large -amounts of data. See the +amounts of data. Note that generate_detection_data.py and +generate_embedding_data.py both involve running inference, and may be very slow +to run locally. See the [Apache Beam documentation](https://beam.apache.org/documentation/runners/dataflow/) -for more information. +for more information, and Google Cloud Documentation for a tutorial on +[running Beam jobs on DataFlow](https://cloud.google.com/dataflow/docs/quickstarts/quickstart-python). ### Generating TfRecords from a set of images and a COCO-CameraTraps style JSON @@ -191,3 +194,6 @@ python export_inference_graph.py \ --side_input_types float,int ``` + +If you have questions about Context R-CNN, please contact +[Sara Beery](https://beerys.github.io/). diff --git a/research/object_detection/legacy/trainer_tf1_test.py b/research/object_detection/legacy/trainer_tf1_test.py index 5b3f01c01..0cde654e6 100644 --- a/research/object_detection/legacy/trainer_tf1_test.py +++ b/research/object_detection/legacy/trainer_tf1_test.py @@ -185,6 +185,9 @@ class FakeDetectionModel(model.DetectionModel): """ return {var.op.name: var for var in tf.global_variables()} + def restore_from_objects(self, fine_tune_checkpoint_type): + pass + def updates(self): """Returns a list of update operators for this model. diff --git a/research/object_detection/meta_architectures/center_net_meta_arch.py b/research/object_detection/meta_architectures/center_net_meta_arch.py index 8ae98bb1f..6d4f9a2ba 100644 --- a/research/object_detection/meta_architectures/center_net_meta_arch.py +++ b/research/object_detection/meta_architectures/center_net_meta_arch.py @@ -2330,8 +2330,39 @@ class CenterNetMetaArch(model.DetectionModel): def regularization_losses(self): return [] - def restore_map(self, fine_tune_checkpoint_type='classification', + def restore_map(self, + fine_tune_checkpoint_type='detection', load_all_detection_checkpoint_vars=False): + raise RuntimeError('CenterNetMetaArch not supported under TF1.x.') + + def restore_from_objects(self, fine_tune_checkpoint_type='detection'): + """Returns a map of Trackable objects to load from a foreign checkpoint. + + Returns a dictionary of Tensorflow 2 Trackable objects (e.g. tf.Module + or Checkpoint). This enables the model to initialize based on weights from + another task. For example, the feature extractor variables from a + classification model can be used to bootstrap training of an object + detector. When loading from an object detection model, the checkpoint model + should have the same parameters as this detection model with exception of + the num_classes parameter. + + Note that this function is intended to be used to restore Keras-based + models when running Tensorflow 2, whereas restore_map (not implemented + in CenterNet) is intended to be used to restore Slim-based models when + running Tensorflow 1.x. + + TODO(jonathanhuang): Make this function consistent with other + meta-architectures. + + Args: + fine_tune_checkpoint_type: whether to restore from a full detection + checkpoint (with compatible variable names) or to restore from a + classification checkpoint for initialization prior to training. + Valid values: `detection`, `classification`. Default 'detection'. + + Returns: + A dict mapping keys to Trackable objects (tf.Module or Checkpoint). + """ if fine_tune_checkpoint_type == 'classification': return {'feature_extractor': self._feature_extractor.get_base_model()} @@ -2340,7 +2371,7 @@ class CenterNetMetaArch(model.DetectionModel): return {'feature_extractor': self._feature_extractor.get_model()} else: - raise ValueError('Unknown fine tune checkpoint type - {}'.format( + raise ValueError('Not supported fine tune checkpoint type - {}'.format( fine_tune_checkpoint_type)) def updates(self): diff --git a/research/object_detection/meta_architectures/center_net_meta_arch_tf2_test.py b/research/object_detection/meta_architectures/center_net_meta_arch_tf2_test.py index 247ffd1bd..298081b7b 100644 --- a/research/object_detection/meta_architectures/center_net_meta_arch_tf2_test.py +++ b/research/object_detection/meta_architectures/center_net_meta_arch_tf2_test.py @@ -1574,8 +1574,9 @@ class CenterNetMetaArchRestoreTest(test_case.TestCase): """Test restore map for a resnet backbone.""" model = build_center_net_meta_arch(build_resnet=True) - restore_map = model.restore_map('classification') - self.assertIsInstance(restore_map['feature_extractor'], tf.keras.Model) + restore_from_objects_map = model.restore_from_objects('classification') + self.assertIsInstance(restore_from_objects_map['feature_extractor'], + tf.keras.Model) class DummyFeatureExtractor(cnma.CenterNetFeatureExtractor): @@ -1601,9 +1602,6 @@ class DummyFeatureExtractor(cnma.CenterNetFeatureExtractor): def postprocess(self): pass - def restore_map(self): - pass - def call(self, inputs): batch_size, input_height, input_width, _ = inputs.shape fake_output = tf.ones([ diff --git a/research/object_detection/meta_architectures/faster_rcnn_meta_arch.py b/research/object_detection/meta_architectures/faster_rcnn_meta_arch.py index 2b6c093a3..8672f9817 100644 --- a/research/object_detection/meta_architectures/faster_rcnn_meta_arch.py +++ b/research/object_detection/meta_architectures/faster_rcnn_meta_arch.py @@ -261,31 +261,6 @@ class FasterRCNNKerasFeatureExtractor(object): """Get model that extracts second stage box classifier features.""" pass - def restore_from_classification_checkpoint_fn( - self, - first_stage_feature_extractor_scope, - second_stage_feature_extractor_scope): - """Returns a map of variables to load from a foreign checkpoint. - - Args: - first_stage_feature_extractor_scope: A scope name for the first stage - feature extractor. - second_stage_feature_extractor_scope: A scope name for the second stage - feature extractor. - - Returns: - A dict mapping variable names (to load from a checkpoint) to variables in - the model graph. - """ - variables_to_restore = {} - for variable in variables_helper.get_global_variables_safely(): - for scope_name in [first_stage_feature_extractor_scope, - second_stage_feature_extractor_scope]: - if variable.op.name.startswith(scope_name): - var_name = variable.op.name.replace(scope_name + '/', '') - variables_to_restore[var_name] = variable - return variables_to_restore - class FasterRCNNMetaArch(model.DetectionModel): """Faster R-CNN Meta-architecture definition.""" @@ -2801,6 +2776,43 @@ class FasterRCNNMetaArch(model.DetectionModel): variables_to_restore, include_patterns=include_patterns) return {var.op.name: var for var in feature_extractor_variables} + def restore_from_objects(self, fine_tune_checkpoint_type='detection'): + """Returns a map of Trackable objects to load from a foreign checkpoint. + + Returns a dictionary of Tensorflow 2 Trackable objects (e.g. tf.Module + or Checkpoint). This enables the model to initialize based on weights from + another task. For example, the feature extractor variables from a + classification model can be used to bootstrap training of an object + detector. When loading from an object detection model, the checkpoint model + should have the same parameters as this detection model with exception of + the num_classes parameter. + + Note that this function is intended to be used to restore Keras-based + models when running Tensorflow 2, whereas restore_map (above) is intended + to be used to restore Slim-based models when running Tensorflow 1.x. + + Args: + fine_tune_checkpoint_type: whether to restore from a full detection + checkpoint (with compatible variable names) or to restore from a + classification checkpoint for initialization prior to training. + Valid values: `detection`, `classification`. Default 'detection'. + + Returns: + A dict mapping keys to Trackable objects (tf.Module or Checkpoint). + """ + if fine_tune_checkpoint_type == 'classification': + return {'feature_extractor': self.classification_backbone} + elif fine_tune_checkpoint_type == 'detection': + fake_model = tf.train.Checkpoint( + _feature_extractor_for_box_classifier_features= + self._feature_extractor_for_box_classifier_features, + _feature_extractor_for_proposal_features= + self._feature_extractor_for_proposal_features) + return {'model': fake_model} + else: + raise ValueError('Not supported fine_tune_checkpoint_type: {}'.format( + fine_tune_checkpoint_type)) + def updates(self): """Returns a list of update operators for this model. diff --git a/research/object_detection/meta_architectures/ssd_meta_arch.py b/research/object_detection/meta_architectures/ssd_meta_arch.py index d401b0de7..6105aa7f1 100644 --- a/research/object_detection/meta_architectures/ssd_meta_arch.py +++ b/research/object_detection/meta_architectures/ssd_meta_arch.py @@ -250,35 +250,6 @@ class SSDKerasFeatureExtractor(tf.keras.Model): def call(self, inputs, **kwargs): return self._extract_features(inputs) - def restore_from_classification_checkpoint_fn(self, feature_extractor_scope): - """Returns a map of variables to load from a foreign checkpoint. - - Args: - feature_extractor_scope: A scope name for the feature extractor. - - Returns: - A dict mapping variable names (to load from a checkpoint) to variables in - the model graph. - """ - variables_to_restore = {} - if tf.executing_eagerly(): - for variable in self.variables: - # variable.name includes ":0" at the end, but the names in the - # checkpoint do not have the suffix ":0". So, we strip it here. - var_name = variable.name[:-2] - if var_name.startswith(feature_extractor_scope + '/'): - var_name = var_name.replace(feature_extractor_scope + '/', '') - variables_to_restore[var_name] = variable - else: - # b/137854499: use global_variables. - for variable in variables_helper.get_global_variables_safely(): - var_name = variable.op.name - if var_name.startswith(feature_extractor_scope + '/'): - var_name = var_name.replace(feature_extractor_scope + '/', '') - variables_to_restore[var_name] = variable - - return variables_to_restore - class SSDMetaArch(model.DetectionModel): """SSD Meta-architecture definition.""" @@ -1295,8 +1266,8 @@ class SSDMetaArch(model.DetectionModel): classification checkpoint for initialization prior to training. Valid values: `detection`, `classification`. Default 'detection'. load_all_detection_checkpoint_vars: whether to load all variables (when - `fine_tune_checkpoint_type='detection'`). If False, only variables - within the appropriate scopes are included. Default False. + `fine_tune_checkpoint_type` is `detection`). If False, only variables + within the feature extractor scope are included. Default False. Returns: A dict mapping variable names (to load from a checkpoint) to variables in @@ -1311,36 +1282,53 @@ class SSDMetaArch(model.DetectionModel): elif fine_tune_checkpoint_type == 'detection': variables_to_restore = {} - if tf.executing_eagerly(): + for variable in variables_helper.get_global_variables_safely(): + var_name = variable.op.name if load_all_detection_checkpoint_vars: - # Grab all detection vars by name - for variable in self.variables: - # variable.name includes ":0" at the end, but the names in the - # checkpoint do not have the suffix ":0". So, we strip it here. - var_name = variable.name[:-2] - variables_to_restore[var_name] = variable + variables_to_restore[var_name] = variable else: - # Grab just the feature extractor vars by name - for variable in self._feature_extractor.variables: - # variable.name includes ":0" at the end, but the names in the - # checkpoint do not have the suffix ":0". So, we strip it here. - var_name = variable.name[:-2] - variables_to_restore[var_name] = variable - else: - for variable in variables_helper.get_global_variables_safely(): - var_name = variable.op.name - if load_all_detection_checkpoint_vars: + if var_name.startswith(self._extract_features_scope): variables_to_restore[var_name] = variable - else: - if var_name.startswith(self._extract_features_scope): - variables_to_restore[var_name] = variable - return variables_to_restore else: raise ValueError('Not supported fine_tune_checkpoint_type: {}'.format( fine_tune_checkpoint_type)) + def restore_from_objects(self, fine_tune_checkpoint_type='detection'): + """Returns a map of Trackable objects to load from a foreign checkpoint. + + Returns a dictionary of Tensorflow 2 Trackable objects (e.g. tf.Module + or Checkpoint). This enables the model to initialize based on weights from + another task. For example, the feature extractor variables from a + classification model can be used to bootstrap training of an object + detector. When loading from an object detection model, the checkpoint model + should have the same parameters as this detection model with exception of + the num_classes parameter. + + Note that this function is intended to be used to restore Keras-based + models when running Tensorflow 2, whereas restore_map (above) is intended + to be used to restore Slim-based models when running Tensorflow 1.x. + + Args: + fine_tune_checkpoint_type: whether to restore from a full detection + checkpoint (with compatible variable names) or to restore from a + classification checkpoint for initialization prior to training. + Valid values: `detection`, `classification`. Default 'detection'. + + Returns: + A dict mapping keys to Trackable objects (tf.Module or Checkpoint). + """ + if fine_tune_checkpoint_type == 'classification': + return {'feature_extractor': self.classification_backbone} + elif fine_tune_checkpoint_type == 'detection': + fake_model = tf.train.Checkpoint( + _feature_extractor=self._feature_extractor) + return {'model': fake_model} + else: + raise ValueError('Not supported fine_tune_checkpoint_type: {}'.format( + fine_tune_checkpoint_type)) + def updates(self): """Returns a list of update operators for this model. diff --git a/research/object_detection/model_lib_tf2_test.py b/research/object_detection/model_lib_tf2_test.py index 4e9b5a82e..f65273660 100644 --- a/research/object_detection/model_lib_tf2_test.py +++ b/research/object_detection/model_lib_tf2_test.py @@ -123,6 +123,9 @@ class SimpleModel(model.DetectionModel): return [] def restore_map(self, *args, **kwargs): + pass + + def restore_from_objects(self, fine_tune_checkpoint_type): return {'model': self} def preprocess(self, _): @@ -174,7 +177,7 @@ class ModelCheckpointTest(tf.test.TestCase): class IncompatibleModel(SimpleModel): - def restore_map(self, *args, **kwargs): + def restore_from_objects(self, *args, **kwargs): return {'weight': self.weight} @@ -207,7 +210,6 @@ class CheckpointV2Test(tf.test.TestCase): model_lib_v2.load_fine_tune_checkpoint( self._model, self._ckpt_path, checkpoint_type='', checkpoint_version=train_pb2.CheckpointVersion.V2, - load_all_detection_checkpoint_vars=True, input_dataset=self._train_input_fn(), unpad_groundtruth_tensors=True) np.testing.assert_allclose(self._model.weight.numpy(), 42) @@ -220,7 +222,6 @@ class CheckpointV2Test(tf.test.TestCase): model_lib_v2.load_fine_tune_checkpoint( IncompatibleModel(), self._ckpt_path, checkpoint_type='', checkpoint_version=train_pb2.CheckpointVersion.V2, - load_all_detection_checkpoint_vars=True, input_dataset=self._train_input_fn(), unpad_groundtruth_tensors=True) diff --git a/research/object_detection/model_lib_v2.py b/research/object_detection/model_lib_v2.py index 6a764d17a..d004d5354 100644 --- a/research/object_detection/model_lib_v2.py +++ b/research/object_detection/model_lib_v2.py @@ -34,7 +34,6 @@ from object_detection.protos import train_pb2 from object_detection.utils import config_util from object_detection.utils import label_map_util from object_detection.utils import ops -from object_detection.utils import variables_helper from object_detection.utils import visualization_utils as vutils # pylint: disable=g-import-not-at-top @@ -47,13 +46,6 @@ except ImportError: MODEL_BUILD_UTIL_MAP = model_lib.MODEL_BUILD_UTIL_MAP -### NOTE: This file is a wip. -### TODO(kaftan): Explore adding unit tests for individual methods -### TODO(kaftan): Add unit test that checks training on a single image w/ -#### groundtruth, and verfiy that loss goes to zero. -#### Possibly have version that takes it as the whole train & eval dataset, -#### & verify the loss output from the eval_loop method. -### TODO(kaftan): Make sure the unit tests run in TAP presubmits or Kokoro RESTORE_MAP_ERROR_TEMPLATE = ( 'Since we are restoring a v2 style checkpoint' @@ -277,14 +269,21 @@ def validate_tf_v2_checkpoint_restore_map(checkpoint_restore_map): """ for key, value in checkpoint_restore_map.items(): - if not (isinstance(key, str) and isinstance(value, tf.Module)): + if not (isinstance(key, str) and + (isinstance(value, tf.Module) + or isinstance(value, tf.train.Checkpoint))): raise TypeError(RESTORE_MAP_ERROR_TEMPLATE.format( key.__class__.__name__, value.__class__.__name__)) +def is_object_based_checkpoint(checkpoint_path): + """Returns true if `checkpoint_path` points to an object-based checkpoint.""" + var_names = [var[0] for var in tf.train.list_variables(checkpoint_path)] + return '_CHECKPOINTABLE_OBJECT_GRAPH' in var_names + + def load_fine_tune_checkpoint( - model, checkpoint_path, checkpoint_type, checkpoint_version, - load_all_detection_checkpoint_vars, input_dataset, + model, checkpoint_path, checkpoint_type, checkpoint_version, input_dataset, unpad_groundtruth_tensors): """Load a fine tuning classification or detection checkpoint. @@ -292,8 +291,7 @@ def load_fine_tune_checkpoint( the model by computing a dummy loss. (Models might not have built their variables before their first execution) - It then loads a variable-name based classification or detection checkpoint - that comes from converted TF 1.x slim model checkpoints. + It then loads an object-based classification or detection checkpoint. This method updates the model in-place and does not return a value. @@ -306,14 +304,22 @@ def load_fine_tune_checkpoint( classification checkpoint for initialization prior to training. Valid values: `detection`, `classification`. checkpoint_version: train_pb2.CheckpointVersion.V1 or V2 enum indicating - whether to load checkpoints in V1 style or V2 style. - load_all_detection_checkpoint_vars: whether to load all variables (when - `fine_tune_checkpoint_type` is `detection`). If False, only variables - within the feature extractor scopes are included. Default False. + whether to load checkpoints in V1 style or V2 style. In this binary + we only support V2 style (object-based) checkpoints. input_dataset: The tf.data Dataset the model is being trained on. Needed to get the shapes for the dummy loss computation. unpad_groundtruth_tensors: A parameter passed to unstack_batch. + + Raises: + IOError: if `checkpoint_path` does not point at a valid object-based + checkpoint + ValueError: if `checkpoint_version` is not train_pb2.CheckpointVersion.V2 """ + if not is_object_based_checkpoint(checkpoint_path): + raise IOError('Checkpoint is expected to be an object-based checkpoint.') + if checkpoint_version == train_pb2.CheckpointVersion.V1: + raise ValueError('Checkpoint version should be V2') + features, labels = iter(input_dataset).next() @tf.function @@ -336,26 +342,11 @@ def load_fine_tune_checkpoint( labels, )) - if checkpoint_version == train_pb2.CheckpointVersion.V1: - var_map = model.restore_map( - fine_tune_checkpoint_type=checkpoint_type, - load_all_detection_checkpoint_vars=( - load_all_detection_checkpoint_vars)) - available_var_map = variables_helper.get_variables_available_in_checkpoint( - var_map, - checkpoint_path, - include_global_step=False) - tf.train.init_from_checkpoint(checkpoint_path, - available_var_map) - elif checkpoint_version == train_pb2.CheckpointVersion.V2: - restore_map = model.restore_map( - fine_tune_checkpoint_type=checkpoint_type, - load_all_detection_checkpoint_vars=( - load_all_detection_checkpoint_vars)) - validate_tf_v2_checkpoint_restore_map(restore_map) - - ckpt = tf.train.Checkpoint(**restore_map) - ckpt.restore(checkpoint_path).assert_existing_objects_matched() + restore_from_objects_dict = model.restore_from_objects( + fine_tune_checkpoint_type=checkpoint_type) + validate_tf_v2_checkpoint_restore_map(restore_from_objects_dict) + ckpt = tf.train.Checkpoint(**restore_from_objects_dict) + ckpt.restore(checkpoint_path).assert_existing_objects_matched() def get_filepath(strategy, filepath): @@ -464,8 +455,10 @@ def train_loop( if kwargs['use_bfloat16']: tf.compat.v2.keras.mixed_precision.experimental.set_policy('mixed_bfloat16') - load_all_detection_checkpoint_vars = ( - train_config.load_all_detection_checkpoint_vars) + if train_config.load_all_detection_checkpoint_vars: + raise ValueError('train_pb2.load_all_detection_checkpoint_vars ' + 'unsupported in TF2') + config_util.update_fine_tune_checkpoint_type(train_config) fine_tune_checkpoint_type = train_config.fine_tune_checkpoint_type fine_tune_checkpoint_version = train_config.fine_tune_checkpoint_version @@ -533,7 +526,6 @@ def train_loop( train_config.fine_tune_checkpoint, fine_tune_checkpoint_type, fine_tune_checkpoint_version, - load_all_detection_checkpoint_vars, train_input, unpad_groundtruth_tensors) @@ -807,8 +799,10 @@ def eager_eval_loop( eval_metrics[loss_key] = loss_metrics[loss_key].result() eval_metrics = {str(k): v for k, v in eval_metrics.items()} + tf.logging.info('Eval metrics at step %d', global_step) for k in eval_metrics: tf.compat.v2.summary.scalar(k, eval_metrics[k], step=global_step) + tf.logging.info('\t+ %s: %f', k, eval_metrics[k]) return eval_metrics diff --git a/research/object_detection/model_main_tf2.py b/research/object_detection/model_main_tf2.py index e20a8bc20..715dc798c 100644 --- a/research/object_detection/model_main_tf2.py +++ b/research/object_detection/model_main_tf2.py @@ -16,14 +16,6 @@ r"""Creates and runs TF2 object detection models. -################################## -NOTE: This module has not been fully tested; please bear with us while we iron -out the kinks. -################################## - -When a TPU device is available, this binary uses TPUStrategy. Otherwise, it uses -GPUS with MirroredStrategy/MultiWorkerMirroredStrategy. - For local training/evaluation run: PIPELINE_CONFIG_PATH=path/to/pipeline.config MODEL_DIR=/tmp/model_outputs @@ -60,6 +52,8 @@ flags.DEFINE_string( flags.DEFINE_integer('eval_timeout', 3600, 'Number of seconds to wait for an' 'evaluation checkpoint before exiting.') + +flags.DEFINE_bool('use_tpu', False, 'Whether the job is executing on a TPU.') flags.DEFINE_integer( 'num_workers', 1, 'When num_workers > 1, training uses ' 'MultiWorkerMirroredStrategy. When num_workers = 1 it uses ' @@ -84,7 +78,7 @@ def main(unused_argv): checkpoint_dir=FLAGS.checkpoint_dir, wait_interval=300, timeout=FLAGS.eval_timeout) else: - if tf.config.get_visible_devices('TPU'): + if FLAGS.use_tpu: resolver = tf.distribute.cluster_resolver.TPUClusterResolver() tf.config.experimental_connect_to_cluster(resolver) tf.tpu.experimental.initialize_tpu_system(resolver) diff --git a/research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor.py b/research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor.py index 9196871bd..f185aa01d 100644 --- a/research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor.py +++ b/research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor.py @@ -59,6 +59,7 @@ class FasterRCNNInceptionResnetV2KerasFeatureExtractor( is_training, first_stage_features_stride, batch_norm_trainable, weight_decay) self._variable_dict = {} + self.classification_backbone = None def preprocess(self, resized_inputs): """Faster R-CNN with Inception Resnet v2 preprocessing. @@ -95,19 +96,20 @@ class FasterRCNNInceptionResnetV2KerasFeatureExtractor( And returns rpn_feature_map: A tensor with shape [batch, height, width, depth] """ - with tf.name_scope(name): - with tf.name_scope('InceptionResnetV2'): - model = inception_resnet_v2.inception_resnet_v2( + if not self.classification_backbone: + self.classification_backbone = inception_resnet_v2.inception_resnet_v2( self._train_batch_norm, output_stride=self._first_stage_features_stride, align_feature_maps=True, weight_decay=self._weight_decay, weights=None, include_top=False) - proposal_features = model.get_layer( + with tf.name_scope(name): + with tf.name_scope('InceptionResnetV2'): + proposal_features = self.classification_backbone.get_layer( name='block17_20_ac').output keras_model = tf.keras.Model( - inputs=model.inputs, + inputs=self.classification_backbone.inputs, outputs=proposal_features) for variable in keras_model.variables: self._variable_dict[variable.name[:-2]] = variable @@ -132,962 +134,26 @@ class FasterRCNNInceptionResnetV2KerasFeatureExtractor( [batch_size * self.max_num_proposals, height, width, depth] representing box classifier features for each proposal. """ + if not self.classification_backbone: + self.classification_backbone = inception_resnet_v2.inception_resnet_v2( + self._train_batch_norm, + output_stride=self._first_stage_features_stride, + align_feature_maps=True, + weight_decay=self._weight_decay, + weights=None, + include_top=False) with tf.name_scope(name): with tf.name_scope('InceptionResnetV2'): - model = inception_resnet_v2.inception_resnet_v2( - self._train_batch_norm, - output_stride=16, - align_feature_maps=False, - weight_decay=self._weight_decay, - weights=None, - include_top=False) - - proposal_feature_maps = model.get_layer( + proposal_feature_maps = self.classification_backbone.get_layer( name='block17_20_ac').output - proposal_classifier_features = model.get_layer( + proposal_classifier_features = self.classification_backbone.get_layer( name='conv_7b_ac').output keras_model = model_util.extract_submodel( - model=model, + model=self.classification_backbone, inputs=proposal_feature_maps, outputs=proposal_classifier_features) for variable in keras_model.variables: self._variable_dict[variable.name[:-2]] = variable return keras_model - def restore_from_classification_checkpoint_fn( - self, - first_stage_feature_extractor_scope, - second_stage_feature_extractor_scope): - """Returns a map of variables to load from a foreign checkpoint. - - This uses a hard-coded conversion to load into Keras from a slim-trained - inception_resnet_v2 checkpoint. - Note that this overrides the default implementation in - faster_rcnn_meta_arch.FasterRCNNKerasFeatureExtractor which does not work - for InceptionResnetV2 checkpoints. - - Args: - first_stage_feature_extractor_scope: A scope name for the first stage - feature extractor. - second_stage_feature_extractor_scope: A scope name for the second stage - feature extractor. - - Returns: - A dict mapping variable names (to load from a checkpoint) to variables in - the model graph. - """ - - keras_to_slim_name_mapping = { - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d/kernel': 'InceptionResnetV2/Conv2d_1a_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm/beta': 'InceptionResnetV2/Conv2d_1a_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm/moving_mean': 'InceptionResnetV2/Conv2d_1a_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm/moving_variance': 'InceptionResnetV2/Conv2d_1a_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_1/kernel': 'InceptionResnetV2/Conv2d_2a_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_1/beta': 'InceptionResnetV2/Conv2d_2a_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_1/moving_mean': 'InceptionResnetV2/Conv2d_2a_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_1/moving_variance': 'InceptionResnetV2/Conv2d_2a_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_2/kernel': 'InceptionResnetV2/Conv2d_2b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_2/beta': 'InceptionResnetV2/Conv2d_2b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_2/moving_mean': 'InceptionResnetV2/Conv2d_2b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_2/moving_variance': 'InceptionResnetV2/Conv2d_2b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_3/kernel': 'InceptionResnetV2/Conv2d_3b_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_3/beta': 'InceptionResnetV2/Conv2d_3b_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_3/moving_mean': 'InceptionResnetV2/Conv2d_3b_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_3/moving_variance': 'InceptionResnetV2/Conv2d_3b_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_4/kernel': 'InceptionResnetV2/Conv2d_4a_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_4/beta': 'InceptionResnetV2/Conv2d_4a_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_4/moving_mean': 'InceptionResnetV2/Conv2d_4a_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_4/moving_variance': 'InceptionResnetV2/Conv2d_4a_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_5/kernel': 'InceptionResnetV2/Mixed_5b/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_5/beta': 'InceptionResnetV2/Mixed_5b/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_5/moving_mean': 'InceptionResnetV2/Mixed_5b/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_5/moving_variance': 'InceptionResnetV2/Mixed_5b/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_6/kernel': 'InceptionResnetV2/Mixed_5b/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_6/beta': 'InceptionResnetV2/Mixed_5b/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_6/moving_mean': 'InceptionResnetV2/Mixed_5b/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_6/moving_variance': 'InceptionResnetV2/Mixed_5b/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_7/kernel': 'InceptionResnetV2/Mixed_5b/Branch_1/Conv2d_0b_5x5/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_7/beta': 'InceptionResnetV2/Mixed_5b/Branch_1/Conv2d_0b_5x5/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_7/moving_mean': 'InceptionResnetV2/Mixed_5b/Branch_1/Conv2d_0b_5x5/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_7/moving_variance': 'InceptionResnetV2/Mixed_5b/Branch_1/Conv2d_0b_5x5/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_8/kernel': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_8/beta': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_8/moving_mean': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_8/moving_variance': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_9/kernel': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_9/beta': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_9/moving_mean': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_9/moving_variance': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_10/kernel': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0c_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_10/beta': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0c_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_10/moving_mean': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_10/moving_variance': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_11/kernel': 'InceptionResnetV2/Mixed_5b/Branch_3/Conv2d_0b_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_11/beta': 'InceptionResnetV2/Mixed_5b/Branch_3/Conv2d_0b_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_11/moving_mean': 'InceptionResnetV2/Mixed_5b/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_11/moving_variance': 'InceptionResnetV2/Mixed_5b/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_12/kernel': 'InceptionResnetV2/Repeat/block35_1/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_12/beta': 'InceptionResnetV2/Repeat/block35_1/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_12/moving_mean': 'InceptionResnetV2/Repeat/block35_1/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_12/moving_variance': 'InceptionResnetV2/Repeat/block35_1/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_13/kernel': 'InceptionResnetV2/Repeat/block35_1/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_13/beta': 'InceptionResnetV2/Repeat/block35_1/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_13/moving_mean': 'InceptionResnetV2/Repeat/block35_1/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_13/moving_variance': 'InceptionResnetV2/Repeat/block35_1/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_14/kernel': 'InceptionResnetV2/Repeat/block35_1/Branch_1/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_14/beta': 'InceptionResnetV2/Repeat/block35_1/Branch_1/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_14/moving_mean': 'InceptionResnetV2/Repeat/block35_1/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_14/moving_variance': 'InceptionResnetV2/Repeat/block35_1/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_15/kernel': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_15/beta': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_15/moving_mean': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_15/moving_variance': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_16/kernel': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_16/beta': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_16/moving_mean': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_16/moving_variance': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_17/kernel': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0c_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_17/beta': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0c_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_17/moving_mean': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_17/moving_variance': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block35_1_conv/kernel': 'InceptionResnetV2/Repeat/block35_1/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block35_1_conv/bias': 'InceptionResnetV2/Repeat/block35_1/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_18/kernel': 'InceptionResnetV2/Repeat/block35_2/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_18/beta': 'InceptionResnetV2/Repeat/block35_2/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_18/moving_mean': 'InceptionResnetV2/Repeat/block35_2/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_18/moving_variance': 'InceptionResnetV2/Repeat/block35_2/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_19/kernel': 'InceptionResnetV2/Repeat/block35_2/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_19/beta': 'InceptionResnetV2/Repeat/block35_2/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_19/moving_mean': 'InceptionResnetV2/Repeat/block35_2/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_19/moving_variance': 'InceptionResnetV2/Repeat/block35_2/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_20/kernel': 'InceptionResnetV2/Repeat/block35_2/Branch_1/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_20/beta': 'InceptionResnetV2/Repeat/block35_2/Branch_1/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_20/moving_mean': 'InceptionResnetV2/Repeat/block35_2/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_20/moving_variance': 'InceptionResnetV2/Repeat/block35_2/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_21/kernel': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_21/beta': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_21/moving_mean': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_21/moving_variance': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_22/kernel': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_22/beta': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_22/moving_mean': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_22/moving_variance': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_23/kernel': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0c_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_23/beta': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0c_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_23/moving_mean': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_23/moving_variance': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block35_2_conv/kernel': 'InceptionResnetV2/Repeat/block35_2/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block35_2_conv/bias': 'InceptionResnetV2/Repeat/block35_2/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_24/kernel': 'InceptionResnetV2/Repeat/block35_3/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_24/beta': 'InceptionResnetV2/Repeat/block35_3/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_24/moving_mean': 'InceptionResnetV2/Repeat/block35_3/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_24/moving_variance': 'InceptionResnetV2/Repeat/block35_3/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_25/kernel': 'InceptionResnetV2/Repeat/block35_3/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_25/beta': 'InceptionResnetV2/Repeat/block35_3/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_25/moving_mean': 'InceptionResnetV2/Repeat/block35_3/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_25/moving_variance': 'InceptionResnetV2/Repeat/block35_3/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_26/kernel': 'InceptionResnetV2/Repeat/block35_3/Branch_1/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_26/beta': 'InceptionResnetV2/Repeat/block35_3/Branch_1/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_26/moving_mean': 'InceptionResnetV2/Repeat/block35_3/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_26/moving_variance': 'InceptionResnetV2/Repeat/block35_3/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_27/kernel': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_27/beta': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_27/moving_mean': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_27/moving_variance': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_28/kernel': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_28/beta': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_28/moving_mean': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_28/moving_variance': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_29/kernel': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0c_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_29/beta': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0c_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_29/moving_mean': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_29/moving_variance': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block35_3_conv/kernel': 'InceptionResnetV2/Repeat/block35_3/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block35_3_conv/bias': 'InceptionResnetV2/Repeat/block35_3/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_30/kernel': 'InceptionResnetV2/Repeat/block35_4/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_30/beta': 'InceptionResnetV2/Repeat/block35_4/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_30/moving_mean': 'InceptionResnetV2/Repeat/block35_4/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_30/moving_variance': 'InceptionResnetV2/Repeat/block35_4/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_31/kernel': 'InceptionResnetV2/Repeat/block35_4/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_31/beta': 'InceptionResnetV2/Repeat/block35_4/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_31/moving_mean': 'InceptionResnetV2/Repeat/block35_4/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_31/moving_variance': 'InceptionResnetV2/Repeat/block35_4/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_32/kernel': 'InceptionResnetV2/Repeat/block35_4/Branch_1/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_32/beta': 'InceptionResnetV2/Repeat/block35_4/Branch_1/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_32/moving_mean': 'InceptionResnetV2/Repeat/block35_4/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_32/moving_variance': 'InceptionResnetV2/Repeat/block35_4/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_33/kernel': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_33/beta': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_33/moving_mean': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_33/moving_variance': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_34/kernel': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_34/beta': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_34/moving_mean': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_34/moving_variance': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_35/kernel': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0c_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_35/beta': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0c_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_35/moving_mean': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_35/moving_variance': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block35_4_conv/kernel': 'InceptionResnetV2/Repeat/block35_4/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block35_4_conv/bias': 'InceptionResnetV2/Repeat/block35_4/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_36/kernel': 'InceptionResnetV2/Repeat/block35_5/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_36/beta': 'InceptionResnetV2/Repeat/block35_5/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_36/moving_mean': 'InceptionResnetV2/Repeat/block35_5/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_36/moving_variance': 'InceptionResnetV2/Repeat/block35_5/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_37/kernel': 'InceptionResnetV2/Repeat/block35_5/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_37/beta': 'InceptionResnetV2/Repeat/block35_5/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_37/moving_mean': 'InceptionResnetV2/Repeat/block35_5/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_37/moving_variance': 'InceptionResnetV2/Repeat/block35_5/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_38/kernel': 'InceptionResnetV2/Repeat/block35_5/Branch_1/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_38/beta': 'InceptionResnetV2/Repeat/block35_5/Branch_1/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_38/moving_mean': 'InceptionResnetV2/Repeat/block35_5/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_38/moving_variance': 'InceptionResnetV2/Repeat/block35_5/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_39/kernel': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_39/beta': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_39/moving_mean': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_39/moving_variance': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_40/kernel': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_40/beta': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_40/moving_mean': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_40/moving_variance': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_41/kernel': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0c_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_41/beta': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0c_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_41/moving_mean': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_41/moving_variance': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block35_5_conv/kernel': 'InceptionResnetV2/Repeat/block35_5/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block35_5_conv/bias': 'InceptionResnetV2/Repeat/block35_5/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_42/kernel': 'InceptionResnetV2/Repeat/block35_6/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_42/beta': 'InceptionResnetV2/Repeat/block35_6/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_42/moving_mean': 'InceptionResnetV2/Repeat/block35_6/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_42/moving_variance': 'InceptionResnetV2/Repeat/block35_6/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_43/kernel': 'InceptionResnetV2/Repeat/block35_6/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_43/beta': 'InceptionResnetV2/Repeat/block35_6/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_43/moving_mean': 'InceptionResnetV2/Repeat/block35_6/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_43/moving_variance': 'InceptionResnetV2/Repeat/block35_6/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_44/kernel': 'InceptionResnetV2/Repeat/block35_6/Branch_1/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_44/beta': 'InceptionResnetV2/Repeat/block35_6/Branch_1/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_44/moving_mean': 'InceptionResnetV2/Repeat/block35_6/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_44/moving_variance': 'InceptionResnetV2/Repeat/block35_6/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_45/kernel': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_45/beta': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_45/moving_mean': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_45/moving_variance': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_46/kernel': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_46/beta': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_46/moving_mean': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_46/moving_variance': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_47/kernel': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0c_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_47/beta': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0c_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_47/moving_mean': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_47/moving_variance': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block35_6_conv/kernel': 'InceptionResnetV2/Repeat/block35_6/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block35_6_conv/bias': 'InceptionResnetV2/Repeat/block35_6/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_48/kernel': 'InceptionResnetV2/Repeat/block35_7/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_48/beta': 'InceptionResnetV2/Repeat/block35_7/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_48/moving_mean': 'InceptionResnetV2/Repeat/block35_7/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_48/moving_variance': 'InceptionResnetV2/Repeat/block35_7/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_49/kernel': 'InceptionResnetV2/Repeat/block35_7/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_49/beta': 'InceptionResnetV2/Repeat/block35_7/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_49/moving_mean': 'InceptionResnetV2/Repeat/block35_7/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_49/moving_variance': 'InceptionResnetV2/Repeat/block35_7/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_50/kernel': 'InceptionResnetV2/Repeat/block35_7/Branch_1/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_50/beta': 'InceptionResnetV2/Repeat/block35_7/Branch_1/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_50/moving_mean': 'InceptionResnetV2/Repeat/block35_7/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_50/moving_variance': 'InceptionResnetV2/Repeat/block35_7/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_51/kernel': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_51/beta': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_51/moving_mean': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_51/moving_variance': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_52/kernel': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_52/beta': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_52/moving_mean': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_52/moving_variance': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_53/kernel': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0c_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_53/beta': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0c_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_53/moving_mean': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_53/moving_variance': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block35_7_conv/kernel': 'InceptionResnetV2/Repeat/block35_7/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block35_7_conv/bias': 'InceptionResnetV2/Repeat/block35_7/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_54/kernel': 'InceptionResnetV2/Repeat/block35_8/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_54/beta': 'InceptionResnetV2/Repeat/block35_8/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_54/moving_mean': 'InceptionResnetV2/Repeat/block35_8/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_54/moving_variance': 'InceptionResnetV2/Repeat/block35_8/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_55/kernel': 'InceptionResnetV2/Repeat/block35_8/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_55/beta': 'InceptionResnetV2/Repeat/block35_8/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_55/moving_mean': 'InceptionResnetV2/Repeat/block35_8/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_55/moving_variance': 'InceptionResnetV2/Repeat/block35_8/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_56/kernel': 'InceptionResnetV2/Repeat/block35_8/Branch_1/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_56/beta': 'InceptionResnetV2/Repeat/block35_8/Branch_1/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_56/moving_mean': 'InceptionResnetV2/Repeat/block35_8/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_56/moving_variance': 'InceptionResnetV2/Repeat/block35_8/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_57/kernel': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_57/beta': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_57/moving_mean': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_57/moving_variance': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_58/kernel': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_58/beta': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_58/moving_mean': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_58/moving_variance': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_59/kernel': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0c_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_59/beta': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0c_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_59/moving_mean': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_59/moving_variance': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block35_8_conv/kernel': 'InceptionResnetV2/Repeat/block35_8/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block35_8_conv/bias': 'InceptionResnetV2/Repeat/block35_8/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_60/kernel': 'InceptionResnetV2/Repeat/block35_9/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_60/beta': 'InceptionResnetV2/Repeat/block35_9/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_60/moving_mean': 'InceptionResnetV2/Repeat/block35_9/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_60/moving_variance': 'InceptionResnetV2/Repeat/block35_9/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_61/kernel': 'InceptionResnetV2/Repeat/block35_9/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_61/beta': 'InceptionResnetV2/Repeat/block35_9/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_61/moving_mean': 'InceptionResnetV2/Repeat/block35_9/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_61/moving_variance': 'InceptionResnetV2/Repeat/block35_9/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_62/kernel': 'InceptionResnetV2/Repeat/block35_9/Branch_1/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_62/beta': 'InceptionResnetV2/Repeat/block35_9/Branch_1/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_62/moving_mean': 'InceptionResnetV2/Repeat/block35_9/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_62/moving_variance': 'InceptionResnetV2/Repeat/block35_9/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_63/kernel': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_63/beta': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_63/moving_mean': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_63/moving_variance': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_64/kernel': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_64/beta': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_64/moving_mean': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_64/moving_variance': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_65/kernel': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0c_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_65/beta': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0c_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_65/moving_mean': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_65/moving_variance': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block35_9_conv/kernel': 'InceptionResnetV2/Repeat/block35_9/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block35_9_conv/bias': 'InceptionResnetV2/Repeat/block35_9/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_66/kernel': 'InceptionResnetV2/Repeat/block35_10/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_66/beta': 'InceptionResnetV2/Repeat/block35_10/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_66/moving_mean': 'InceptionResnetV2/Repeat/block35_10/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_66/moving_variance': 'InceptionResnetV2/Repeat/block35_10/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_67/kernel': 'InceptionResnetV2/Repeat/block35_10/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_67/beta': 'InceptionResnetV2/Repeat/block35_10/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_67/moving_mean': 'InceptionResnetV2/Repeat/block35_10/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_67/moving_variance': 'InceptionResnetV2/Repeat/block35_10/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_68/kernel': 'InceptionResnetV2/Repeat/block35_10/Branch_1/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_68/beta': 'InceptionResnetV2/Repeat/block35_10/Branch_1/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_68/moving_mean': 'InceptionResnetV2/Repeat/block35_10/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_68/moving_variance': 'InceptionResnetV2/Repeat/block35_10/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_69/kernel': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_69/beta': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_69/moving_mean': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_69/moving_variance': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_70/kernel': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_70/beta': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_70/moving_mean': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_70/moving_variance': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_71/kernel': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0c_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_71/beta': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0c_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_71/moving_mean': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_71/moving_variance': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block35_10_conv/kernel': 'InceptionResnetV2/Repeat/block35_10/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block35_10_conv/bias': 'InceptionResnetV2/Repeat/block35_10/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_72/kernel': 'InceptionResnetV2/Mixed_6a/Branch_0/Conv2d_1a_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_72/beta': 'InceptionResnetV2/Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_72/moving_mean': 'InceptionResnetV2/Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_72/moving_variance': 'InceptionResnetV2/Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_73/kernel': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_73/beta': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_73/moving_mean': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_73/moving_variance': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_74/kernel': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_0b_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_74/beta': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_74/moving_mean': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_74/moving_variance': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_75/kernel': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_1a_3x3/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_75/beta': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_75/moving_mean': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_75/moving_variance': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_76/kernel': 'InceptionResnetV2/Repeat_1/block17_1/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_76/beta': 'InceptionResnetV2/Repeat_1/block17_1/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_76/moving_mean': 'InceptionResnetV2/Repeat_1/block17_1/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_76/moving_variance': 'InceptionResnetV2/Repeat_1/block17_1/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_77/kernel': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_77/beta': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_77/moving_mean': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_77/moving_variance': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_78/kernel': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0b_1x7/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_78/beta': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0b_1x7/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_78/moving_mean': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_78/moving_variance': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_79/kernel': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0c_7x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_79/beta': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0c_7x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_79/moving_mean': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_79/moving_variance': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_1_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_1/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_1_conv/bias': 'InceptionResnetV2/Repeat_1/block17_1/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_80/kernel': 'InceptionResnetV2/Repeat_1/block17_2/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_80/beta': 'InceptionResnetV2/Repeat_1/block17_2/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_80/moving_mean': 'InceptionResnetV2/Repeat_1/block17_2/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_80/moving_variance': 'InceptionResnetV2/Repeat_1/block17_2/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_81/kernel': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_81/beta': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_81/moving_mean': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_81/moving_variance': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_82/kernel': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0b_1x7/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_82/beta': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0b_1x7/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_82/moving_mean': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_82/moving_variance': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_83/kernel': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0c_7x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_83/beta': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0c_7x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_83/moving_mean': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_83/moving_variance': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_2_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_2/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_2_conv/bias': 'InceptionResnetV2/Repeat_1/block17_2/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_84/kernel': 'InceptionResnetV2/Repeat_1/block17_3/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_84/beta': 'InceptionResnetV2/Repeat_1/block17_3/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_84/moving_mean': 'InceptionResnetV2/Repeat_1/block17_3/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_84/moving_variance': 'InceptionResnetV2/Repeat_1/block17_3/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_85/kernel': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_85/beta': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_85/moving_mean': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_85/moving_variance': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_86/kernel': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0b_1x7/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_86/beta': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0b_1x7/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_86/moving_mean': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_86/moving_variance': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_87/kernel': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0c_7x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_87/beta': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0c_7x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_87/moving_mean': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_87/moving_variance': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_3_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_3/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_3_conv/bias': 'InceptionResnetV2/Repeat_1/block17_3/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_88/kernel': 'InceptionResnetV2/Repeat_1/block17_4/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_88/beta': 'InceptionResnetV2/Repeat_1/block17_4/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_88/moving_mean': 'InceptionResnetV2/Repeat_1/block17_4/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_88/moving_variance': 'InceptionResnetV2/Repeat_1/block17_4/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_89/kernel': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_89/beta': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_89/moving_mean': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_89/moving_variance': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_90/kernel': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0b_1x7/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_90/beta': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0b_1x7/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_90/moving_mean': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_90/moving_variance': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_91/kernel': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0c_7x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_91/beta': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0c_7x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_91/moving_mean': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_91/moving_variance': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_4_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_4/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_4_conv/bias': 'InceptionResnetV2/Repeat_1/block17_4/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_92/kernel': 'InceptionResnetV2/Repeat_1/block17_5/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_92/beta': 'InceptionResnetV2/Repeat_1/block17_5/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_92/moving_mean': 'InceptionResnetV2/Repeat_1/block17_5/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_92/moving_variance': 'InceptionResnetV2/Repeat_1/block17_5/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_93/kernel': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_93/beta': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_93/moving_mean': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_93/moving_variance': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_94/kernel': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0b_1x7/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_94/beta': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0b_1x7/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_94/moving_mean': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_94/moving_variance': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_95/kernel': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0c_7x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_95/beta': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0c_7x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_95/moving_mean': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_95/moving_variance': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_5_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_5/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_5_conv/bias': 'InceptionResnetV2/Repeat_1/block17_5/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_96/kernel': 'InceptionResnetV2/Repeat_1/block17_6/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_96/beta': 'InceptionResnetV2/Repeat_1/block17_6/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_96/moving_mean': 'InceptionResnetV2/Repeat_1/block17_6/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_96/moving_variance': 'InceptionResnetV2/Repeat_1/block17_6/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_97/kernel': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_97/beta': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_97/moving_mean': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_97/moving_variance': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_98/kernel': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0b_1x7/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_98/beta': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0b_1x7/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_98/moving_mean': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_98/moving_variance': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_99/kernel': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0c_7x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_99/beta': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0c_7x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_99/moving_mean': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_99/moving_variance': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_6_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_6/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_6_conv/bias': 'InceptionResnetV2/Repeat_1/block17_6/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_100/kernel': 'InceptionResnetV2/Repeat_1/block17_7/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_100/beta': 'InceptionResnetV2/Repeat_1/block17_7/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_100/moving_mean': 'InceptionResnetV2/Repeat_1/block17_7/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_100/moving_variance': 'InceptionResnetV2/Repeat_1/block17_7/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_101/kernel': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_101/beta': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_101/moving_mean': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_101/moving_variance': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_102/kernel': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0b_1x7/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_102/beta': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0b_1x7/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_102/moving_mean': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_102/moving_variance': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_103/kernel': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0c_7x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_103/beta': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0c_7x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_103/moving_mean': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_103/moving_variance': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_7_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_7/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_7_conv/bias': 'InceptionResnetV2/Repeat_1/block17_7/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_104/kernel': 'InceptionResnetV2/Repeat_1/block17_8/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_104/beta': 'InceptionResnetV2/Repeat_1/block17_8/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_104/moving_mean': 'InceptionResnetV2/Repeat_1/block17_8/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_104/moving_variance': 'InceptionResnetV2/Repeat_1/block17_8/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_105/kernel': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_105/beta': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_105/moving_mean': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_105/moving_variance': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_106/kernel': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0b_1x7/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_106/beta': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0b_1x7/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_106/moving_mean': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_106/moving_variance': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_107/kernel': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0c_7x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_107/beta': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0c_7x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_107/moving_mean': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_107/moving_variance': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_8_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_8/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_8_conv/bias': 'InceptionResnetV2/Repeat_1/block17_8/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_108/kernel': 'InceptionResnetV2/Repeat_1/block17_9/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_108/beta': 'InceptionResnetV2/Repeat_1/block17_9/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_108/moving_mean': 'InceptionResnetV2/Repeat_1/block17_9/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_108/moving_variance': 'InceptionResnetV2/Repeat_1/block17_9/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_109/kernel': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_109/beta': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_109/moving_mean': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_109/moving_variance': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_110/kernel': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0b_1x7/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_110/beta': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0b_1x7/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_110/moving_mean': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_110/moving_variance': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_111/kernel': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0c_7x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_111/beta': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0c_7x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_111/moving_mean': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_111/moving_variance': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_9_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_9/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_9_conv/bias': 'InceptionResnetV2/Repeat_1/block17_9/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_112/kernel': 'InceptionResnetV2/Repeat_1/block17_10/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_112/beta': 'InceptionResnetV2/Repeat_1/block17_10/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_112/moving_mean': 'InceptionResnetV2/Repeat_1/block17_10/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_112/moving_variance': 'InceptionResnetV2/Repeat_1/block17_10/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_113/kernel': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_113/beta': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_113/moving_mean': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_113/moving_variance': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_114/kernel': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0b_1x7/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_114/beta': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0b_1x7/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_114/moving_mean': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_114/moving_variance': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_115/kernel': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0c_7x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_115/beta': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0c_7x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_115/moving_mean': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_115/moving_variance': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_10_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_10/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_10_conv/bias': 'InceptionResnetV2/Repeat_1/block17_10/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_116/kernel': 'InceptionResnetV2/Repeat_1/block17_11/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_116/beta': 'InceptionResnetV2/Repeat_1/block17_11/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_116/moving_mean': 'InceptionResnetV2/Repeat_1/block17_11/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_116/moving_variance': 'InceptionResnetV2/Repeat_1/block17_11/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_117/kernel': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_117/beta': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_117/moving_mean': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_117/moving_variance': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_118/kernel': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0b_1x7/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_118/beta': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0b_1x7/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_118/moving_mean': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_118/moving_variance': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_119/kernel': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0c_7x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_119/beta': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0c_7x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_119/moving_mean': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_119/moving_variance': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_11_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_11/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_11_conv/bias': 'InceptionResnetV2/Repeat_1/block17_11/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_120/kernel': 'InceptionResnetV2/Repeat_1/block17_12/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_120/beta': 'InceptionResnetV2/Repeat_1/block17_12/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_120/moving_mean': 'InceptionResnetV2/Repeat_1/block17_12/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_120/moving_variance': 'InceptionResnetV2/Repeat_1/block17_12/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_121/kernel': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_121/beta': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_121/moving_mean': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_121/moving_variance': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_122/kernel': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0b_1x7/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_122/beta': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0b_1x7/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_122/moving_mean': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_122/moving_variance': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_123/kernel': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0c_7x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_123/beta': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0c_7x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_123/moving_mean': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_123/moving_variance': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_12_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_12/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_12_conv/bias': 'InceptionResnetV2/Repeat_1/block17_12/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_124/kernel': 'InceptionResnetV2/Repeat_1/block17_13/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_124/beta': 'InceptionResnetV2/Repeat_1/block17_13/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_124/moving_mean': 'InceptionResnetV2/Repeat_1/block17_13/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_124/moving_variance': 'InceptionResnetV2/Repeat_1/block17_13/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_125/kernel': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_125/beta': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_125/moving_mean': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_125/moving_variance': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_126/kernel': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0b_1x7/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_126/beta': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0b_1x7/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_126/moving_mean': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_126/moving_variance': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_127/kernel': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0c_7x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_127/beta': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0c_7x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_127/moving_mean': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_127/moving_variance': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_13_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_13/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_13_conv/bias': 'InceptionResnetV2/Repeat_1/block17_13/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_128/kernel': 'InceptionResnetV2/Repeat_1/block17_14/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_128/beta': 'InceptionResnetV2/Repeat_1/block17_14/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_128/moving_mean': 'InceptionResnetV2/Repeat_1/block17_14/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_128/moving_variance': 'InceptionResnetV2/Repeat_1/block17_14/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_129/kernel': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_129/beta': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_129/moving_mean': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_129/moving_variance': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_130/kernel': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0b_1x7/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_130/beta': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0b_1x7/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_130/moving_mean': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_130/moving_variance': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_131/kernel': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0c_7x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_131/beta': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0c_7x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_131/moving_mean': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_131/moving_variance': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_14_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_14/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_14_conv/bias': 'InceptionResnetV2/Repeat_1/block17_14/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_132/kernel': 'InceptionResnetV2/Repeat_1/block17_15/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_132/beta': 'InceptionResnetV2/Repeat_1/block17_15/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_132/moving_mean': 'InceptionResnetV2/Repeat_1/block17_15/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_132/moving_variance': 'InceptionResnetV2/Repeat_1/block17_15/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_133/kernel': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_133/beta': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_133/moving_mean': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_133/moving_variance': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_134/kernel': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0b_1x7/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_134/beta': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0b_1x7/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_134/moving_mean': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_134/moving_variance': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_135/kernel': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0c_7x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_135/beta': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0c_7x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_135/moving_mean': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_135/moving_variance': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_15_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_15/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_15_conv/bias': 'InceptionResnetV2/Repeat_1/block17_15/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_136/kernel': 'InceptionResnetV2/Repeat_1/block17_16/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_136/beta': 'InceptionResnetV2/Repeat_1/block17_16/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_136/moving_mean': 'InceptionResnetV2/Repeat_1/block17_16/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_136/moving_variance': 'InceptionResnetV2/Repeat_1/block17_16/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_137/kernel': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_137/beta': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_137/moving_mean': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_137/moving_variance': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_138/kernel': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0b_1x7/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_138/beta': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0b_1x7/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_138/moving_mean': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_138/moving_variance': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_139/kernel': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0c_7x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_139/beta': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0c_7x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_139/moving_mean': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_139/moving_variance': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_16_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_16/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_16_conv/bias': 'InceptionResnetV2/Repeat_1/block17_16/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_140/kernel': 'InceptionResnetV2/Repeat_1/block17_17/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_140/beta': 'InceptionResnetV2/Repeat_1/block17_17/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_140/moving_mean': 'InceptionResnetV2/Repeat_1/block17_17/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_140/moving_variance': 'InceptionResnetV2/Repeat_1/block17_17/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_141/kernel': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_141/beta': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_141/moving_mean': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_141/moving_variance': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_142/kernel': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0b_1x7/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_142/beta': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0b_1x7/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_142/moving_mean': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_142/moving_variance': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_143/kernel': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0c_7x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_143/beta': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0c_7x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_143/moving_mean': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_143/moving_variance': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_17_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_17/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_17_conv/bias': 'InceptionResnetV2/Repeat_1/block17_17/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_144/kernel': 'InceptionResnetV2/Repeat_1/block17_18/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_144/beta': 'InceptionResnetV2/Repeat_1/block17_18/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_144/moving_mean': 'InceptionResnetV2/Repeat_1/block17_18/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_144/moving_variance': 'InceptionResnetV2/Repeat_1/block17_18/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_145/kernel': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_145/beta': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_145/moving_mean': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_145/moving_variance': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_146/kernel': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0b_1x7/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_146/beta': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0b_1x7/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_146/moving_mean': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_146/moving_variance': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_147/kernel': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0c_7x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_147/beta': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0c_7x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_147/moving_mean': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_147/moving_variance': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_18_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_18/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_18_conv/bias': 'InceptionResnetV2/Repeat_1/block17_18/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_148/kernel': 'InceptionResnetV2/Repeat_1/block17_19/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_148/beta': 'InceptionResnetV2/Repeat_1/block17_19/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_148/moving_mean': 'InceptionResnetV2/Repeat_1/block17_19/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_148/moving_variance': 'InceptionResnetV2/Repeat_1/block17_19/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_149/kernel': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_149/beta': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_149/moving_mean': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_149/moving_variance': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_150/kernel': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0b_1x7/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_150/beta': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0b_1x7/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_150/moving_mean': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_150/moving_variance': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_151/kernel': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0c_7x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_151/beta': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0c_7x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_151/moving_mean': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_151/moving_variance': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_19_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_19/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_19_conv/bias': 'InceptionResnetV2/Repeat_1/block17_19/Conv2d_1x1/biases', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_152/kernel': 'InceptionResnetV2/Repeat_1/block17_20/Branch_0/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_152/beta': 'InceptionResnetV2/Repeat_1/block17_20/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_152/moving_mean': 'InceptionResnetV2/Repeat_1/block17_20/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_152/moving_variance': 'InceptionResnetV2/Repeat_1/block17_20/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_153/kernel': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0a_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_153/beta': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_153/moving_mean': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_153/moving_variance': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_154/kernel': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0b_1x7/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_154/beta': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0b_1x7/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_154/moving_mean': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_154/moving_variance': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_155/kernel': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0c_7x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_155/beta': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0c_7x1/BatchNorm/beta', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_155/moving_mean': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean', - 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_155/moving_variance': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_20_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_20/Conv2d_1x1/weights', - 'FirstStageFeatureExtractor/InceptionResnetV2/block17_20_conv/bias': 'InceptionResnetV2/Repeat_1/block17_20/Conv2d_1x1/biases', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_359/kernel': 'InceptionResnetV2/Mixed_7a/Branch_0/Conv2d_0a_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_359/beta': 'InceptionResnetV2/Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_359/moving_mean': 'InceptionResnetV2/Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_359/moving_variance': 'InceptionResnetV2/Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_360/kernel': 'InceptionResnetV2/Mixed_7a/Branch_0/Conv2d_1a_3x3/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_360/beta': 'InceptionResnetV2/Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_360/moving_mean': 'InceptionResnetV2/Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_360/moving_variance': 'InceptionResnetV2/Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_361/kernel': 'InceptionResnetV2/Mixed_7a/Branch_1/Conv2d_0a_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_361/beta': 'InceptionResnetV2/Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_361/moving_mean': 'InceptionResnetV2/Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_361/moving_variance': 'InceptionResnetV2/Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_362/kernel': 'InceptionResnetV2/Mixed_7a/Branch_1/Conv2d_1a_3x3/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_362/beta': 'InceptionResnetV2/Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_362/moving_mean': 'InceptionResnetV2/Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_362/moving_variance': 'InceptionResnetV2/Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_363/kernel': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_0a_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_363/beta': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_363/moving_mean': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_363/moving_variance': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_364/kernel': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_0b_3x3/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_364/beta': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_364/moving_mean': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_364/moving_variance': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_365/kernel': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_1a_3x3/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_365/beta': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_365/moving_mean': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_365/moving_variance': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_366/kernel': 'InceptionResnetV2/Repeat_2/block8_1/Branch_0/Conv2d_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_366/beta': 'InceptionResnetV2/Repeat_2/block8_1/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_366/moving_mean': 'InceptionResnetV2/Repeat_2/block8_1/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_366/moving_variance': 'InceptionResnetV2/Repeat_2/block8_1/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_367/kernel': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0a_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_367/beta': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_367/moving_mean': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_367/moving_variance': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_368/kernel': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0b_1x3/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_368/beta': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0b_1x3/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_368/moving_mean': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_368/moving_variance': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_369/kernel': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0c_3x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_369/beta': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0c_3x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_369/moving_mean': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_369/moving_variance': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/block8_1_conv/kernel': 'InceptionResnetV2/Repeat_2/block8_1/Conv2d_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/block8_1_conv/bias': 'InceptionResnetV2/Repeat_2/block8_1/Conv2d_1x1/biases', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_370/kernel': 'InceptionResnetV2/Repeat_2/block8_2/Branch_0/Conv2d_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_370/beta': 'InceptionResnetV2/Repeat_2/block8_2/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_370/moving_mean': 'InceptionResnetV2/Repeat_2/block8_2/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_370/moving_variance': 'InceptionResnetV2/Repeat_2/block8_2/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_371/kernel': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0a_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_371/beta': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_371/moving_mean': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_371/moving_variance': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_372/kernel': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0b_1x3/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_372/beta': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0b_1x3/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_372/moving_mean': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_372/moving_variance': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_373/kernel': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0c_3x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_373/beta': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0c_3x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_373/moving_mean': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_373/moving_variance': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/block8_2_conv/kernel': 'InceptionResnetV2/Repeat_2/block8_2/Conv2d_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/block8_2_conv/bias': 'InceptionResnetV2/Repeat_2/block8_2/Conv2d_1x1/biases', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_374/kernel': 'InceptionResnetV2/Repeat_2/block8_3/Branch_0/Conv2d_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_374/beta': 'InceptionResnetV2/Repeat_2/block8_3/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_374/moving_mean': 'InceptionResnetV2/Repeat_2/block8_3/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_374/moving_variance': 'InceptionResnetV2/Repeat_2/block8_3/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_375/kernel': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0a_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_375/beta': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_375/moving_mean': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_375/moving_variance': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_376/kernel': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0b_1x3/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_376/beta': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0b_1x3/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_376/moving_mean': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_376/moving_variance': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_377/kernel': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0c_3x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_377/beta': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0c_3x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_377/moving_mean': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_377/moving_variance': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/block8_3_conv/kernel': 'InceptionResnetV2/Repeat_2/block8_3/Conv2d_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/block8_3_conv/bias': 'InceptionResnetV2/Repeat_2/block8_3/Conv2d_1x1/biases', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_378/kernel': 'InceptionResnetV2/Repeat_2/block8_4/Branch_0/Conv2d_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_378/beta': 'InceptionResnetV2/Repeat_2/block8_4/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_378/moving_mean': 'InceptionResnetV2/Repeat_2/block8_4/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_378/moving_variance': 'InceptionResnetV2/Repeat_2/block8_4/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_379/kernel': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0a_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_379/beta': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_379/moving_mean': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_379/moving_variance': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_380/kernel': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0b_1x3/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_380/beta': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0b_1x3/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_380/moving_mean': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_380/moving_variance': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_381/kernel': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0c_3x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_381/beta': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0c_3x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_381/moving_mean': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_381/moving_variance': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/block8_4_conv/kernel': 'InceptionResnetV2/Repeat_2/block8_4/Conv2d_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/block8_4_conv/bias': 'InceptionResnetV2/Repeat_2/block8_4/Conv2d_1x1/biases', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_382/kernel': 'InceptionResnetV2/Repeat_2/block8_5/Branch_0/Conv2d_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_382/beta': 'InceptionResnetV2/Repeat_2/block8_5/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_382/moving_mean': 'InceptionResnetV2/Repeat_2/block8_5/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_382/moving_variance': 'InceptionResnetV2/Repeat_2/block8_5/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_383/kernel': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0a_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_383/beta': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_383/moving_mean': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_383/moving_variance': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_384/kernel': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0b_1x3/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_384/beta': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0b_1x3/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_384/moving_mean': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_384/moving_variance': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_385/kernel': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0c_3x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_385/beta': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0c_3x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_385/moving_mean': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_385/moving_variance': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/block8_5_conv/kernel': 'InceptionResnetV2/Repeat_2/block8_5/Conv2d_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/block8_5_conv/bias': 'InceptionResnetV2/Repeat_2/block8_5/Conv2d_1x1/biases', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_386/kernel': 'InceptionResnetV2/Repeat_2/block8_6/Branch_0/Conv2d_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_386/beta': 'InceptionResnetV2/Repeat_2/block8_6/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_386/moving_mean': 'InceptionResnetV2/Repeat_2/block8_6/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_386/moving_variance': 'InceptionResnetV2/Repeat_2/block8_6/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_387/kernel': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0a_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_387/beta': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_387/moving_mean': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_387/moving_variance': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_388/kernel': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0b_1x3/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_388/beta': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0b_1x3/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_388/moving_mean': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_388/moving_variance': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_389/kernel': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0c_3x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_389/beta': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0c_3x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_389/moving_mean': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_389/moving_variance': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/block8_6_conv/kernel': 'InceptionResnetV2/Repeat_2/block8_6/Conv2d_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/block8_6_conv/bias': 'InceptionResnetV2/Repeat_2/block8_6/Conv2d_1x1/biases', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_390/kernel': 'InceptionResnetV2/Repeat_2/block8_7/Branch_0/Conv2d_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_390/beta': 'InceptionResnetV2/Repeat_2/block8_7/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_390/moving_mean': 'InceptionResnetV2/Repeat_2/block8_7/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_390/moving_variance': 'InceptionResnetV2/Repeat_2/block8_7/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_391/kernel': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0a_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_391/beta': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_391/moving_mean': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_391/moving_variance': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_392/kernel': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0b_1x3/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_392/beta': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0b_1x3/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_392/moving_mean': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_392/moving_variance': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_393/kernel': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0c_3x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_393/beta': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0c_3x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_393/moving_mean': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_393/moving_variance': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/block8_7_conv/kernel': 'InceptionResnetV2/Repeat_2/block8_7/Conv2d_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/block8_7_conv/bias': 'InceptionResnetV2/Repeat_2/block8_7/Conv2d_1x1/biases', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_394/kernel': 'InceptionResnetV2/Repeat_2/block8_8/Branch_0/Conv2d_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_394/beta': 'InceptionResnetV2/Repeat_2/block8_8/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_394/moving_mean': 'InceptionResnetV2/Repeat_2/block8_8/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_394/moving_variance': 'InceptionResnetV2/Repeat_2/block8_8/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_395/kernel': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0a_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_395/beta': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_395/moving_mean': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_395/moving_variance': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_396/kernel': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0b_1x3/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_396/beta': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0b_1x3/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_396/moving_mean': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_396/moving_variance': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_397/kernel': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0c_3x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_397/beta': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0c_3x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_397/moving_mean': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_397/moving_variance': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/block8_8_conv/kernel': 'InceptionResnetV2/Repeat_2/block8_8/Conv2d_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/block8_8_conv/bias': 'InceptionResnetV2/Repeat_2/block8_8/Conv2d_1x1/biases', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_398/kernel': 'InceptionResnetV2/Repeat_2/block8_9/Branch_0/Conv2d_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_398/beta': 'InceptionResnetV2/Repeat_2/block8_9/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_398/moving_mean': 'InceptionResnetV2/Repeat_2/block8_9/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_398/moving_variance': 'InceptionResnetV2/Repeat_2/block8_9/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_399/kernel': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0a_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_399/beta': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_399/moving_mean': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_399/moving_variance': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_400/kernel': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0b_1x3/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_400/beta': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0b_1x3/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_400/moving_mean': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_400/moving_variance': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_401/kernel': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0c_3x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_401/beta': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0c_3x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_401/moving_mean': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_401/moving_variance': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/block8_9_conv/kernel': 'InceptionResnetV2/Repeat_2/block8_9/Conv2d_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/block8_9_conv/bias': 'InceptionResnetV2/Repeat_2/block8_9/Conv2d_1x1/biases', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_402/kernel': 'InceptionResnetV2/Block8/Branch_0/Conv2d_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_402/beta': 'InceptionResnetV2/Block8/Branch_0/Conv2d_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_402/moving_mean': 'InceptionResnetV2/Block8/Branch_0/Conv2d_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_402/moving_variance': 'InceptionResnetV2/Block8/Branch_0/Conv2d_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_403/kernel': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0a_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_403/beta': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0a_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_403/moving_mean': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_403/moving_variance': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_404/kernel': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0b_1x3/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_404/beta': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0b_1x3/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_404/moving_mean': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_404/moving_variance': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_405/kernel': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0c_3x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_405/beta': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0c_3x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_405/moving_mean': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_405/moving_variance': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_variance', - 'SecondStageFeatureExtractor/InceptionResnetV2/block8_10_conv/kernel': 'InceptionResnetV2/Block8/Conv2d_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/block8_10_conv/bias': 'InceptionResnetV2/Block8/Conv2d_1x1/biases', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv_7b/kernel': 'InceptionResnetV2/Conv2d_7b_1x1/weights', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv_7b_bn/beta': 'InceptionResnetV2/Conv2d_7b_1x1/BatchNorm/beta', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv_7b_bn/moving_mean': 'InceptionResnetV2/Conv2d_7b_1x1/BatchNorm/moving_mean', - 'SecondStageFeatureExtractor/InceptionResnetV2/conv_7b_bn/moving_variance': 'InceptionResnetV2/Conv2d_7b_1x1/BatchNorm/moving_variance', - } - - variables_to_restore = {} - if tf.executing_eagerly(): - for key in self._variable_dict: - # variable.name includes ":0" at the end, but the names in the - # checkpoint do not have the suffix ":0". So, we strip it here. - var_name = keras_to_slim_name_mapping.get(key) - if var_name: - variables_to_restore[var_name] = self._variable_dict[key] - else: - for variable in variables_helper.get_global_variables_safely(): - var_name = keras_to_slim_name_mapping.get(variable.op.name) - if var_name: - variables_to_restore[var_name] = variable - return variables_to_restore diff --git a/research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor_tf2_test.py b/research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor_tf2_test.py index 49c560457..20bb50ef8 100644 --- a/research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor_tf2_test.py +++ b/research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor_tf2_test.py @@ -73,7 +73,7 @@ class FasterRcnnInceptionResnetV2KerasFeatureExtractorTest(tf.test.TestCase): proposal_classifier_features = ( model(proposal_feature_maps)) features_shape = tf.shape(proposal_classifier_features) - self.assertAllEqual(features_shape.numpy(), [2, 8, 8, 1536]) + self.assertAllEqual(features_shape.numpy(), [2, 9, 9, 1536]) if __name__ == '__main__': diff --git a/research/object_detection/models/faster_rcnn_resnet_keras_feature_extractor.py b/research/object_detection/models/faster_rcnn_resnet_keras_feature_extractor.py index a2029d242..a6b1e2540 100644 --- a/research/object_detection/models/faster_rcnn_resnet_keras_feature_extractor.py +++ b/research/object_detection/models/faster_rcnn_resnet_keras_feature_extractor.py @@ -175,23 +175,6 @@ class FasterRCNNResnetKerasFeatureExtractor( self._variable_dict[variable.name[:-2]] = variable return keras_model - def restore_from_classification_checkpoint_fn( - self, - first_stage_feature_extractor_scope, - second_stage_feature_extractor_scope): - """Returns a map for restoring from an (object-based) checkpoint. - - Args: - first_stage_feature_extractor_scope: A scope name for the first stage - feature extractor (unused). - second_stage_feature_extractor_scope: A scope name for the second stage - feature extractor (unused). - - Returns: - A dict mapping keys to Keras models - """ - return {'feature_extractor': self.classification_backbone} - class FasterRCNNResnet50KerasFeatureExtractor( FasterRCNNResnetKerasFeatureExtractor): diff --git a/research/object_detection/models/ssd_mobilenet_v1_keras_feature_extractor.py b/research/object_detection/models/ssd_mobilenet_v1_keras_feature_extractor.py index 82b48c1a8..2f0df9154 100644 --- a/research/object_detection/models/ssd_mobilenet_v1_keras_feature_extractor.py +++ b/research/object_detection/models/ssd_mobilenet_v1_keras_feature_extractor.py @@ -163,14 +163,3 @@ class SSDMobileNetV1KerasFeatureExtractor( 'Conv2d_13_pointwise': image_features[1]}) return list(feature_maps.values()) - - def restore_from_classification_checkpoint_fn(self, feature_extractor_scope): - """Returns a map for restoring from an (object-based) checkpoint. - - Args: - feature_extractor_scope: A scope name for the feature extractor (unused). - - Returns: - A dict mapping keys to Keras models - """ - return {'feature_extractor': self.classification_backbone} diff --git a/research/object_detection/models/ssd_mobilenet_v2_fpn_keras_feature_extractor.py b/research/object_detection/models/ssd_mobilenet_v2_fpn_keras_feature_extractor.py index 0e36e8bda..0834ea6b9 100644 --- a/research/object_detection/models/ssd_mobilenet_v2_fpn_keras_feature_extractor.py +++ b/research/object_detection/models/ssd_mobilenet_v2_fpn_keras_feature_extractor.py @@ -241,14 +241,3 @@ class SSDMobileNetV2FpnKerasFeatureExtractor( last_feature_map = layer(last_feature_map) feature_maps.append(last_feature_map) return feature_maps - - def restore_from_classification_checkpoint_fn(self, feature_extractor_scope): - """Returns a map for restoring from an (object-based) checkpoint. - - Args: - feature_extractor_scope: A scope name for the feature extractor (unused). - - Returns: - A dict mapping keys to Keras models - """ - return {'feature_extractor': self.classification_backbone} diff --git a/research/object_detection/models/ssd_mobilenet_v2_keras_feature_extractor.py b/research/object_detection/models/ssd_mobilenet_v2_keras_feature_extractor.py index 9f0622f32..0f79fc271 100644 --- a/research/object_detection/models/ssd_mobilenet_v2_keras_feature_extractor.py +++ b/research/object_detection/models/ssd_mobilenet_v2_keras_feature_extractor.py @@ -166,14 +166,3 @@ class SSDMobileNetV2KerasFeatureExtractor( 'layer_19': image_features[1]}) return list(feature_maps.values()) - - def restore_from_classification_checkpoint_fn(self, feature_extractor_scope): - """Returns a map for restoring from an (object-based) checkpoint. - - Args: - feature_extractor_scope: A scope name for the feature extractor (unused). - - Returns: - A dict mapping keys to Keras models - """ - return {'feature_extractor': self.classification_backbone} diff --git a/research/object_detection/models/ssd_resnet_v1_fpn_keras_feature_extractor.py b/research/object_detection/models/ssd_resnet_v1_fpn_keras_feature_extractor.py index 6de9ae3e5..0ac929cc6 100644 --- a/research/object_detection/models/ssd_resnet_v1_fpn_keras_feature_extractor.py +++ b/research/object_detection/models/ssd_resnet_v1_fpn_keras_feature_extractor.py @@ -246,17 +246,6 @@ class SSDResNetV1FpnKerasFeatureExtractor( feature_maps.append(last_feature_map) return feature_maps - def restore_from_classification_checkpoint_fn(self, feature_extractor_scope): - """Returns a map for restoring from an (object-based) checkpoint. - - Args: - feature_extractor_scope: A scope name for the feature extractor (unused). - - Returns: - A dict mapping keys to Keras models - """ - return {'feature_extractor': self.classification_backbone} - class SSDResNet50V1FpnKerasFeatureExtractor( SSDResNetV1FpnKerasFeatureExtractor): diff --git a/research/object_detection/predictors/convolutional_keras_box_predictor.py b/research/object_detection/predictors/convolutional_keras_box_predictor.py index 630c68039..fc72fb04c 100644 --- a/research/object_detection/predictors/convolutional_keras_box_predictor.py +++ b/research/object_detection/predictors/convolutional_keras_box_predictor.py @@ -314,7 +314,8 @@ class WeightSharedConvolutionalBoxPredictor(box_predictor.KerasBoxPredictor): self, inserted_layer_counter, target_channel): projection_layers = [] if inserted_layer_counter >= 0: - use_bias = False if self._apply_batch_norm else True + use_bias = False if (self._apply_batch_norm and not + self._conv_hyperparams.force_use_bias()) else True projection_layers.append(keras.Conv2D( target_channel, [1, 1], strides=1, padding='SAME', name='ProjectionLayer/conv2d_{}'.format(inserted_layer_counter), @@ -331,7 +332,8 @@ class WeightSharedConvolutionalBoxPredictor(box_predictor.KerasBoxPredictor): conv_layers = [] batch_norm_layers = [] activation_layers = [] - use_bias = False if self._apply_batch_norm else True + use_bias = False if (self._apply_batch_norm and not + self._conv_hyperparams.force_use_bias()) else True for additional_conv_layer_idx in range(self._num_layers_before_predictor): layer_name = '{}/conv2d_{}'.format( tower_name_scope, additional_conv_layer_idx) @@ -363,7 +365,9 @@ class WeightSharedConvolutionalBoxPredictor(box_predictor.KerasBoxPredictor): training=(self._is_training and not self._freeze_batchnorm), name='{}/conv2d_{}/BatchNorm/feature_{}'.format( tower_name_scope, additional_conv_layer_idx, feature_index))) - activation_layers.append(tf.keras.layers.Lambda(tf.nn.relu6)) + activation_layers.append(self._conv_hyperparams.build_activation_layer( + name='{}/conv2d_{}/activation_{}'.format( + tower_name_scope, additional_conv_layer_idx, feature_index))) # Set conv layers as the shared conv layers for different feature maps with # the same tower_name_scope. diff --git a/research/object_detection/protos/input_reader.proto b/research/object_detection/protos/input_reader.proto index 2d9deda11..27d022532 100644 --- a/research/object_detection/protos/input_reader.proto +++ b/research/object_detection/protos/input_reader.proto @@ -31,7 +31,7 @@ enum InputType { TF_SEQUENCE_EXAMPLE = 2; // TfSequenceExample Input } -// Next id: 31 +// Next id: 32 message InputReader { // Name of input reader. Typically used to describe the dataset that is read // by this input reader. @@ -119,6 +119,10 @@ message InputReader { // Type of instance mask. optional InstanceMaskType mask_type = 10 [default = NUMERICAL_MASKS]; + // Whether to load DensePose data. If set, must also set load_instance_masks + // to true. + optional bool load_dense_pose = 31 [default = false]; + // Whether to use the display name when decoding examples. This is only used // when mapping class text strings to integers. optional bool use_display_name = 17 [default = false]; diff --git a/research/object_detection/protos/train.proto b/research/object_detection/protos/train.proto index 0da8b2ede..62d326cdf 100644 --- a/research/object_detection/protos/train.proto +++ b/research/object_detection/protos/train.proto @@ -59,7 +59,8 @@ message TrainConfig { // Whether to load all checkpoint vars that match model variable names and // sizes. This option is only available if `from_detection_checkpoint` is - // True. + // True. This option is *not* supported for TF2 --- setting it to true + // will raise an error. optional bool load_all_detection_checkpoint_vars = 19 [default = false]; // Number of steps to train the DetectionModel for. If 0, will train the model diff --git a/research/object_detection/samples/configs/context_rcnn_resnet101_snapshot_serengeti.config b/research/object_detection/samples/configs/context_rcnn_resnet101_snapshot_serengeti.config new file mode 100644 index 000000000..8167731c7 --- /dev/null +++ b/research/object_detection/samples/configs/context_rcnn_resnet101_snapshot_serengeti.config @@ -0,0 +1,164 @@ +# Context R-CNN configuration for Snapshot Serengeti Dataset, with sequence +# example input data with context_features. +# This model uses attention into contextual features within the Faster R-CNN +# object detection framework to improve object detection performance. +# See https://arxiv.org/abs/1912.03538 for more information. +# Search for "PATH_TO_BE_CONFIGURED" to find the fields that should be +# configured. + +model { + faster_rcnn { + num_classes: 48 + image_resizer { + fixed_shape_resizer { + height: 640 + width: 640 + } + } + feature_extractor { + type: "faster_rcnn_resnet101" + first_stage_features_stride: 16 + batch_norm_trainable: true + } + first_stage_anchor_generator { + grid_anchor_generator { + height_stride: 16 + width_stride: 16 + scales: 0.25 + scales: 0.5 + scales: 1.0 + scales: 2.0 + aspect_ratios: 0.5 + aspect_ratios: 1.0 + aspect_ratios: 2.0 + } + } + first_stage_box_predictor_conv_hyperparams { + op: CONV + regularizer { + l2_regularizer { + weight: 0.0 + } + } + initializer { + truncated_normal_initializer { + stddev: 0.00999999977648 + } + } + } + first_stage_nms_score_threshold: 0.0 + first_stage_nms_iou_threshold: 0.699999988079 + first_stage_max_proposals: 300 + first_stage_localization_loss_weight: 2.0 + first_stage_objectness_loss_weight: 1.0 + initial_crop_size: 14 + maxpool_kernel_size: 2 + maxpool_stride: 2 + second_stage_box_predictor { + mask_rcnn_box_predictor { + fc_hyperparams { + op: FC + regularizer { + l2_regularizer { + weight: 0.0 + } + } + initializer { + variance_scaling_initializer { + factor: 1.0 + uniform: true + mode: FAN_AVG + } + } + } + use_dropout: false + dropout_keep_probability: 1.0 + share_box_across_classes: true + } + } + second_stage_post_processing { + batch_non_max_suppression { + score_threshold: 0.0 + iou_threshold: 0.600000023842 + max_detections_per_class: 100 + max_total_detections: 300 + } + score_converter: SOFTMAX + } + second_stage_localization_loss_weight: 2.0 + second_stage_classification_loss_weight: 1.0 + use_matmul_crop_and_resize: true + clip_anchors_to_image: true + use_matmul_gather_in_matcher: true + use_static_balanced_label_sampler: true + use_static_shapes: true + context_config { + max_num_context_features: 2000 + context_feature_length: 2057 + } + } +} +train_config { + batch_size: 8 + data_augmentation_options { + random_horizontal_flip { + } + } + sync_replicas: true + optimizer { + momentum_optimizer { + learning_rate { + manual_step_learning_rate { + initial_learning_rate: 0.0 + schedule { + step: 400000 + learning_rate: 0.002 + } + schedule { + step: 500000 + learning_rate: 0.0002 + } + schedule { + step: 600000 + learning_rate: 0.00002 + } + warmup: true + } + } + momentum_optimizer_value: 0.9 + } + use_moving_average: false + } + gradient_clipping_by_norm: 10.0 + fine_tune_checkpoint: "PATH_TO_BE_CONFIGURED/faster_rcnn_resnet101_coco_2018_08_14/model.ckpt" + from_detection_checkpoint: true + num_steps: 5000000 + replicas_to_aggregate: 8 + max_number_of_boxes: 100 + unpad_groundtruth_tensors: false + use_bfloat16: true +} +train_input_reader { + label_map_path: "PATH_TO_BE_CONFIGURED/ss_label_map.pbtxt" + tf_record_input_reader { + input_path: "PATH_TO_BE_CONFIGURED/snapshot_serengeti_train-?????-of-?????" + } + load_context_features: true + input_type: TF_SEQUENCE_EXAMPLE +} +eval_config { + max_evals: 50 + metrics_set: "coco_detection_metrics" + use_moving_averages: false + batch_size: 1 +} +eval_input_reader { + label_map_path: "PATH_TO_BE_CONFIGURED/ss_label_map.pbtxt" + shuffle: false + num_epochs: 1 + tf_record_input_reader { + input_path: "PATH_TO_BE_CONFIGURED/snapshot_serengeti_val-?????-of-?????" + } + load_context_features: true + input_type: TF_SEQUENCE_EXAMPLE +} diff --git a/research/object_detection/utils/bifpn_utils.py b/research/object_detection/utils/bifpn_utils.py index b4b244355..d14cb841e 100644 --- a/research/object_detection/utils/bifpn_utils.py +++ b/research/object_detection/utils/bifpn_utils.py @@ -26,7 +26,8 @@ from object_detection.utils import shape_utils def create_conv_block(name, num_filters, kernel_size, strides, padding, use_separable, apply_batchnorm, apply_activation, - conv_hyperparams, is_training, freeze_batchnorm): + conv_hyperparams, is_training, freeze_batchnorm, + conv_bn_act_pattern=True): """Create Keras layers for regular or separable convolutions. Args: @@ -50,6 +51,9 @@ def create_conv_block(name, num_filters, kernel_size, strides, padding, training or not. When training with a small batch size (e.g. 1), it is desirable to freeze batch norm update and use pretrained batch norm params. + conv_bn_act_pattern: Bool. By default, when True, the layers returned by + this function are in the order [conv, batchnorm, activation]. Otherwise, + when False, the order of the layers is [activation, conv, batchnorm]. Returns: A list of keras layers, including (regular or seperable) convolution, and @@ -73,7 +77,7 @@ def create_conv_block(name, num_filters, kernel_size, strides, padding, depth_multiplier=1, padding=padding, strides=strides, - name=name + '_separable_conv', + name=name + 'separable_conv', **kwargs)) else: layers.append( @@ -82,18 +86,22 @@ def create_conv_block(name, num_filters, kernel_size, strides, padding, kernel_size=kernel_size, padding=padding, strides=strides, - name=name + '_conv', + name=name + 'conv', **conv_hyperparams.params())) if apply_batchnorm: layers.append( conv_hyperparams.build_batch_norm( training=(is_training and not freeze_batchnorm), - name=name + '_batchnorm')) + name=name + 'batchnorm')) if apply_activation: - layers.append( - conv_hyperparams.build_activation_layer(name=name + '_activation')) + activation_layer = conv_hyperparams.build_activation_layer( + name=name + 'activation') + if conv_bn_act_pattern: + layers.append(activation_layer) + else: + layers = [activation_layer] + layers return layers @@ -133,28 +141,28 @@ def create_downsample_feature_map_ops(scale, downsample_method, pool_size=kernel_size, strides=stride, padding=padding, - name=name + '_downsample_max_x{}'.format(stride))) + name=name + 'downsample_max_x{}'.format(stride))) elif downsample_method == 'avg_pooling': layers.append( tf.keras.layers.AveragePooling2D( pool_size=kernel_size, strides=stride, padding=padding, - name=name + '_downsample_avg_x{}'.format(stride))) + name=name + 'downsample_avg_x{}'.format(stride))) elif downsample_method == 'depthwise_conv': layers.append( tf.keras.layers.DepthwiseConv2D( kernel_size=kernel_size, strides=stride, padding=padding, - name=name + '_downsample_depthwise_x{}'.format(stride))) + name=name + 'downsample_depthwise_x{}'.format(stride))) layers.append( conv_hyperparams.build_batch_norm( training=(is_training and not freeze_batchnorm), - name=name + '_downsample_batchnorm')) + name=name + 'downsample_batchnorm')) layers.append( conv_hyperparams.build_activation_layer(name=name + - '_downsample_activation')) + 'downsample_activation')) else: raise ValueError('Unknown downsample method: {}'.format(downsample_method)) diff --git a/research/object_detection/utils/config_util.py b/research/object_detection/utils/config_util.py index 8dd405dff..662d42e13 100644 --- a/research/object_detection/utils/config_util.py +++ b/research/object_detection/utils/config_util.py @@ -147,6 +147,7 @@ def clear_fine_tune_checkpoint(pipeline_config_path, """Clears fine_tune_checkpoint and writes a new pipeline config file.""" configs = get_configs_from_pipeline_file(pipeline_config_path) configs["train_config"].fine_tune_checkpoint = "" + configs["train_config"].load_all_detection_checkpoint_vars = False pipeline_proto = create_pipeline_proto_from_configs(configs) with tf.gfile.Open(new_pipeline_config_path, "wb") as f: f.write(text_format.MessageToString(pipeline_proto)) -- GitLab From a555f1b07bf5c30a20a86e9420f5a1e372116aa7 Mon Sep 17 00:00:00 2001 From: vivek rathod Date: Fri, 26 Jun 2020 10:45:37 -0700 Subject: [PATCH 66/79] Merged commit includes the following changes: (#8740) 318497061 by rathodv: 1. Replace strategy.run() with strategy.experimental_run_v2() and replace tensor.ref() with tensor.experimental_ref() to be compatible with TF2.1 runtime on cloud. 2. Fix expected string in failing PY3 tests. -- 318493408 by aom: Implements "Bidirectional Feature Pyramid Network Generators" for BiFPN-based feature extractors (e.g. EfficientDet). -- PiperOrigin-RevId: 318497061 --- .../builders/dataset_builder_test.py | 2 +- .../add_context_to_examples_tf1_test.py | 2 +- ...eate_cococameratraps_tfexample_tf1_test.py | 12 +- .../generate_embedding_data_tf1_test.py | 10 +- .../dataset_tools/seq_example_util_test.py | 4 +- .../tf_record_creation_util_test.py | 2 +- .../dockerfiles/{1.15 => tf1}/Dockerfile | 0 .../dockerfiles/{1.15 => tf1}/README.md | 0 .../dockerfiles/{2.2 => tf2}/Dockerfile | 0 .../dockerfiles/{2.2 => tf2}/README.md | 0 .../export_tflite_ssd_graph_lib_tf1_test.py | 2 +- research/object_detection/model_lib_v2.py | 4 +- ...idirectional_feature_pyramid_generators.py | 486 ++++++++++++++++++ ...nal_feature_pyramid_generators_tf2_test.py | 167 ++++++ .../models/keras_models/hourglass_network.py | 15 +- research/object_detection/utils/model_util.py | 29 +- 16 files changed, 699 insertions(+), 36 deletions(-) rename research/object_detection/dockerfiles/{1.15 => tf1}/Dockerfile (100%) rename research/object_detection/dockerfiles/{1.15 => tf1}/README.md (100%) rename research/object_detection/dockerfiles/{2.2 => tf2}/Dockerfile (100%) rename research/object_detection/dockerfiles/{2.2 => tf2}/README.md (100%) create mode 100644 research/object_detection/models/bidirectional_feature_pyramid_generators.py create mode 100644 research/object_detection/models/bidirectional_feature_pyramid_generators_tf2_test.py diff --git a/research/object_detection/builders/dataset_builder_test.py b/research/object_detection/builders/dataset_builder_test.py index 7c3de113e..eb2cdb3cc 100644 --- a/research/object_detection/builders/dataset_builder_test.py +++ b/research/object_detection/builders/dataset_builder_test.py @@ -390,7 +390,7 @@ class DatasetBuilderTest(test_case.TestCase): return iter1.get_next(), iter2.get_next() output_dict1, output_dict2 = self.execute(graph_fn, []) - self.assertAllEqual(['0'], output_dict1[fields.InputDataFields.source_id]) + self.assertAllEqual([b'0'], output_dict1[fields.InputDataFields.source_id]) self.assertEqual([b'1'], output_dict2[fields.InputDataFields.source_id]) def test_sample_one_of_n_shards(self): diff --git a/research/object_detection/dataset_tools/context_rcnn/add_context_to_examples_tf1_test.py b/research/object_detection/dataset_tools/context_rcnn/add_context_to_examples_tf1_test.py index 99bb47979..0f10fa776 100644 --- a/research/object_detection/dataset_tools/context_rcnn/add_context_to_examples_tf1_test.py +++ b/research/object_detection/dataset_tools/context_rcnn/add_context_to_examples_tf1_test.py @@ -200,7 +200,7 @@ class GenerateContextDataTest(tf.test.TestCase): seq_feature_dict['region/label/string'].feature[1].bytes_list.value[:]) def assert_expected_key(self, key): - self.assertAllEqual(key, '01') + self.assertAllEqual(key, b'01') def assert_sorted(self, example_collection): example_list = list(example_collection) diff --git a/research/object_detection/dataset_tools/context_rcnn/create_cococameratraps_tfexample_tf1_test.py b/research/object_detection/dataset_tools/context_rcnn/create_cococameratraps_tfexample_tf1_test.py index be6dc0dc4..3f3569e13 100644 --- a/research/object_detection/dataset_tools/context_rcnn/create_cococameratraps_tfexample_tf1_test.py +++ b/research/object_detection/dataset_tools/context_rcnn/create_cococameratraps_tfexample_tf1_test.py @@ -95,13 +95,13 @@ class CreateCOCOCameraTrapsTfexampleTest(tf.test.TestCase): .int64_list.value, [1]) self.assertAllEqual( example.features.feature['image/object/class/text'] - .bytes_list.value, ['animal']) + .bytes_list.value, [b'animal']) self.assertAllClose( example.features.feature['image/class/label'] .int64_list.value, [1]) self.assertAllEqual( example.features.feature['image/class/text'] - .bytes_list.value, ['animal']) + .bytes_list.value, [b'animal']) # Check other essential attributes. self.assertAllEqual( @@ -112,7 +112,7 @@ class CreateCOCOCameraTrapsTfexampleTest(tf.test.TestCase): [self.IMAGE_WIDTH]) self.assertAllEqual( example.features.feature['image/source_id'].bytes_list.value, - ['im_0']) + [b'im_0']) self.assertTrue( example.features.feature['image/encoded'].bytes_list.value) @@ -134,13 +134,13 @@ class CreateCOCOCameraTrapsTfexampleTest(tf.test.TestCase): .int64_list.value, [1]) self.assertAllEqual( example.features.feature['image/object/class/text'] - .bytes_list.value, ['animal']) + .bytes_list.value, [b'animal']) self.assertAllClose( example.features.feature['image/class/label'] .int64_list.value, [1]) self.assertAllEqual( example.features.feature['image/class/text'] - .bytes_list.value, ['animal']) + .bytes_list.value, [b'animal']) # Check other essential attributes. self.assertAllEqual( @@ -151,7 +151,7 @@ class CreateCOCOCameraTrapsTfexampleTest(tf.test.TestCase): [self.IMAGE_WIDTH]) self.assertAllEqual( example.features.feature['image/source_id'].bytes_list.value, - ['im_0']) + [b'im_0']) self.assertTrue( example.features.feature['image/encoded'].bytes_list.value) diff --git a/research/object_detection/dataset_tools/context_rcnn/generate_embedding_data_tf1_test.py b/research/object_detection/dataset_tools/context_rcnn/generate_embedding_data_tf1_test.py index 6409399eb..836bd59fb 100644 --- a/research/object_detection/dataset_tools/context_rcnn/generate_embedding_data_tf1_test.py +++ b/research/object_detection/dataset_tools/context_rcnn/generate_embedding_data_tf1_test.py @@ -239,13 +239,13 @@ class GenerateEmbeddingData(tf.test.TestCase): .int64_list.value, [5]) self.assertAllEqual( example.features.feature['image/object/class/text'] - .bytes_list.value, ['hyena']) + .bytes_list.value, [b'hyena']) self.assertAllClose( example.features.feature['image/class/label'] .int64_list.value, [5]) self.assertAllEqual( example.features.feature['image/class/text'] - .bytes_list.value, ['hyena']) + .bytes_list.value, [b'hyena']) # Check other essential attributes. self.assertAllEqual( @@ -254,7 +254,7 @@ class GenerateEmbeddingData(tf.test.TestCase): example.features.feature['image/width'].int64_list.value, [600]) self.assertAllEqual( example.features.feature['image/source_id'].bytes_list.value, - ['image_id']) + [b'image_id']) self.assertTrue( example.features.feature['image/encoded'].bytes_list.value) @@ -271,7 +271,7 @@ class GenerateEmbeddingData(tf.test.TestCase): .int64_list.value, [5]) self.assertAllEqual(tf.train.Example.FromString( generated_example).features.feature['image/object/class/text'] - .bytes_list.value, ['hyena']) + .bytes_list.value, [b'hyena']) output = inference_fn.process(generated_example) output_example = output[0] self.assert_expected_example(output_example) @@ -307,7 +307,7 @@ class GenerateEmbeddingData(tf.test.TestCase): .feature['image/object/class/label'].int64_list.value, [5]) self.assertAllEqual( tf.train.Example.FromString(generated_example).features - .feature['image/object/class/text'].bytes_list.value, ['hyena']) + .feature['image/object/class/text'].bytes_list.value, [b'hyena']) output = inference_fn.process(generated_example) output_example = output[0] self.assert_expected_example(output_example, botk=True) diff --git a/research/object_detection/dataset_tools/seq_example_util_test.py b/research/object_detection/dataset_tools/seq_example_util_test.py index ba898d735..fd721954b 100644 --- a/research/object_detection/dataset_tools/seq_example_util_test.py +++ b/research/object_detection/dataset_tools/seq_example_util_test.py @@ -288,7 +288,7 @@ class SeqExampleUtilTest(tf.test.TestCase): [0.75, 1.], seq_feature_dict['region/bbox/xmax'].feature[0].float_list.value[:]) self.assertAllEqual( - ['cat', 'frog'], + [b'cat', b'frog'], seq_feature_dict['region/label/string'].feature[0].bytes_list.value[:]) self.assertAllClose( [0.], @@ -332,7 +332,7 @@ class SeqExampleUtilTest(tf.test.TestCase): [0.75], seq_feature_dict['region/bbox/xmax'].feature[1].float_list.value[:]) self.assertAllEqual( - ['cat'], + [b'cat'], seq_feature_dict['region/label/string'].feature[1].bytes_list.value[:]) self.assertAllClose( [], diff --git a/research/object_detection/dataset_tools/tf_record_creation_util_test.py b/research/object_detection/dataset_tools/tf_record_creation_util_test.py index 2873a6d14..5722c8647 100644 --- a/research/object_detection/dataset_tools/tf_record_creation_util_test.py +++ b/research/object_detection/dataset_tools/tf_record_creation_util_test.py @@ -42,7 +42,7 @@ class OpenOutputTfrecordsTests(tf.test.TestCase): tf_record_path = '{}-{:05d}-of-00010'.format( os.path.join(tf.test.get_temp_dir(), 'test.tfrec'), idx) records = list(tf.python_io.tf_record_iterator(tf_record_path)) - self.assertAllEqual(records, ['test_{}'.format(idx)]) + self.assertAllEqual(records, ['test_{}'.format(idx).encode('utf-8')]) if __name__ == '__main__': diff --git a/research/object_detection/dockerfiles/1.15/Dockerfile b/research/object_detection/dockerfiles/tf1/Dockerfile similarity index 100% rename from research/object_detection/dockerfiles/1.15/Dockerfile rename to research/object_detection/dockerfiles/tf1/Dockerfile diff --git a/research/object_detection/dockerfiles/1.15/README.md b/research/object_detection/dockerfiles/tf1/README.md similarity index 100% rename from research/object_detection/dockerfiles/1.15/README.md rename to research/object_detection/dockerfiles/tf1/README.md diff --git a/research/object_detection/dockerfiles/2.2/Dockerfile b/research/object_detection/dockerfiles/tf2/Dockerfile similarity index 100% rename from research/object_detection/dockerfiles/2.2/Dockerfile rename to research/object_detection/dockerfiles/tf2/Dockerfile diff --git a/research/object_detection/dockerfiles/2.2/README.md b/research/object_detection/dockerfiles/tf2/README.md similarity index 100% rename from research/object_detection/dockerfiles/2.2/README.md rename to research/object_detection/dockerfiles/tf2/README.md diff --git a/research/object_detection/export_tflite_ssd_graph_lib_tf1_test.py b/research/object_detection/export_tflite_ssd_graph_lib_tf1_test.py index 721d2988a..3625b9f65 100644 --- a/research/object_detection/export_tflite_ssd_graph_lib_tf1_test.py +++ b/research/object_detection/export_tflite_ssd_graph_lib_tf1_test.py @@ -419,7 +419,7 @@ class ExportTfliteGraphTest(tf.test.TestCase): tflite_graph_file = self._export_graph_with_postprocessing_op( pipeline_config) self.assertTrue(os.path.exists(tflite_graph_file)) - mock_get.assert_called_once() + self.assertEqual(1, mock_get.call_count) if __name__ == '__main__': diff --git a/research/object_detection/model_lib_v2.py b/research/object_detection/model_lib_v2.py index d004d5354..2f360a873 100644 --- a/research/object_detection/model_lib_v2.py +++ b/research/object_detection/model_lib_v2.py @@ -336,7 +336,7 @@ def load_fine_tune_checkpoint( labels) strategy = tf.compat.v2.distribute.get_strategy() - strategy.run( + strategy.experimental_run_v2( _dummy_computation_fn, args=( features, labels, @@ -562,7 +562,7 @@ def train_loop( def _sample_and_train(strategy, train_step_fn, data_iterator): features, labels = data_iterator.next() - per_replica_losses = strategy.run( + per_replica_losses = strategy.experimental_run_v2( train_step_fn, args=(features, labels)) # TODO(anjalisridhar): explore if it is safe to remove the ## num_replicas scaling of the loss and switch this to a ReduceOp.Mean diff --git a/research/object_detection/models/bidirectional_feature_pyramid_generators.py b/research/object_detection/models/bidirectional_feature_pyramid_generators.py new file mode 100644 index 000000000..b53dc60ef --- /dev/null +++ b/research/object_detection/models/bidirectional_feature_pyramid_generators.py @@ -0,0 +1,486 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Functions to generate bidirectional feature pyramids based on image features. + +Provides bidirectional feature pyramid network (BiFPN) generators that can be +used to build object detection feature extractors, as proposed by Tan et al. +See https://arxiv.org/abs/1911.09070 for more details. +""" +import collections +import functools +from six.moves import range +from six.moves import zip +import tensorflow as tf + +from object_detection.utils import bifpn_utils + + +def _create_bifpn_input_config(fpn_min_level, + fpn_max_level, + input_max_level, + level_scales=None): + """Creates a BiFPN input config for the input levels from a backbone network. + + Args: + fpn_min_level: the minimum pyramid level (highest feature map resolution) to + use in the BiFPN. + fpn_max_level: the maximum pyramid level (lowest feature map resolution) to + use in the BiFPN. + input_max_level: the maximum pyramid level that will be provided as input to + the BiFPN. Accordingly, the BiFPN will compute additional pyramid levels + from input_max_level, up to the desired fpn_max_level. + level_scales: a list of pyramid level scale factors. If 'None', each level's + scale is set to 2^level by default, which corresponds to each successive + feature map scaling by a factor of 2. + + Returns: + A list of dictionaries for each feature map expected as input to the BiFPN, + where each has entries for the feature map 'name' and 'scale'. + """ + if not level_scales: + level_scales = [2**i for i in range(fpn_min_level, fpn_max_level + 1)] + + bifpn_input_params = [] + for i in range(fpn_min_level, min(fpn_max_level, input_max_level) + 1): + bifpn_input_params.append({ + 'name': '0_up_lvl_{}'.format(i), + 'scale': level_scales[i - fpn_min_level] + }) + + return bifpn_input_params + + +def _get_bifpn_output_node_names(fpn_min_level, fpn_max_level, node_config): + """Returns a list of BiFPN output node names, given a BiFPN node config. + + Args: + fpn_min_level: the minimum pyramid level (highest feature map resolution) + used by the BiFPN. + fpn_max_level: the maximum pyramid level (lowest feature map resolution) + used by the BiFPN. + node_config: the BiFPN node_config, a list of dictionaries corresponding to + each node in the BiFPN computation graph, where each entry should have an + associated 'name'. + + Returns: + A list of strings corresponding to the names of the output BiFPN nodes. + """ + num_output_nodes = fpn_max_level - fpn_min_level + 1 + return [node['name'] for node in node_config[-num_output_nodes:]] + + +def _create_bifpn_node_config(bifpn_num_iterations, + bifpn_num_filters, + fpn_min_level, + fpn_max_level, + input_max_level, + bifpn_node_params=None, + level_scales=None): + """Creates a config specifying a bidirectional feature pyramid network. + + Args: + bifpn_num_iterations: the number of top-down bottom-up feature computations + to repeat in the BiFPN. + bifpn_num_filters: the number of filters (channels) for every feature map + used in the BiFPN. + fpn_min_level: the minimum pyramid level (highest feature map resolution) to + use in the BiFPN. + fpn_max_level: the maximum pyramid level (lowest feature map resolution) to + use in the BiFPN. + input_max_level: the maximum pyramid level that will be provided as input to + the BiFPN. Accordingly, the BiFPN will compute additional pyramid levels + from input_max_level, up to the desired fpn_max_level. + bifpn_node_params: If not 'None', a dictionary of additional default BiFPN + node parameters that will be applied to all BiFPN nodes. + level_scales: a list of pyramid level scale factors. If 'None', each level's + scale is set to 2^level by default, which corresponds to each successive + feature map scaling by a factor of 2. + + Returns: + A list of dictionaries used to define nodes in the BiFPN computation graph, + as proposed by EfficientDet, Tan et al (https://arxiv.org/abs/1911.09070). + Each node's entry has the corresponding keys: + name: String. The name of this node in the BiFPN. The node name follows + the format '{bifpn_iteration}_{dn|up}_lvl_{pyramid_level}', where 'dn' + or 'up' refers to whether the node is in the top-down or bottom-up + portion of a single BiFPN iteration. + scale: the scale factor for this node, by default 2^level. + inputs: A list of names of nodes which are inputs to this node. + num_channels: The number of channels for this node. + combine_method: String. Name of the method used to combine input + node feature maps, 'fast_attention' by default for nodes which have more + than one input. Otherwise, 'None' for nodes with only one input node. + input_op: A (partial) function which is called to construct the layers + that will be applied to this BiFPN node's inputs. This function is + called with the arguments: + input_op(name, input_scale, input_num_channels, output_scale, + output_num_channels, conv_hyperparams, is_training, + freeze_batchnorm) + post_combine_op: A (partial) function which is called to construct the + layers that will be applied to the result of the combine operation for + this BiFPN node. This function will be called with the arguments: + post_combine_op(name, conv_hyperparams, is_training, freeze_batchnorm) + If 'None', then no layers will be applied after the combine operation + for this node. + """ + if not level_scales: + level_scales = [2**i for i in range(fpn_min_level, fpn_max_level + 1)] + + default_node_params = { + 'num_channels': + bifpn_num_filters, + 'combine_method': + 'fast_attention', + 'input_op': + functools.partial( + _create_bifpn_resample_block, downsample_method='max_pooling'), + 'post_combine_op': + functools.partial( + bifpn_utils.create_conv_block, + num_filters=bifpn_num_filters, + kernel_size=3, + strides=1, + padding='SAME', + use_separable=True, + apply_batchnorm=True, + apply_activation=True, + conv_bn_act_pattern=False), + } + if bifpn_node_params: + default_node_params.update(bifpn_node_params) + + bifpn_node_params = [] + # Create additional base pyramid levels not provided as input to the BiFPN. + # Note, combine_method and post_combine_op are set to None for additional + # base pyramid levels because they do not combine multiple input BiFPN nodes. + for i in range(input_max_level + 1, fpn_max_level + 1): + node_params = dict(default_node_params) + node_params.update({ + 'name': '0_up_lvl_{}'.format(i), + 'scale': level_scales[i - fpn_min_level], + 'inputs': ['0_up_lvl_{}'.format(i - 1)], + 'combine_method': None, + 'post_combine_op': None, + }) + bifpn_node_params.append(node_params) + + for i in range(bifpn_num_iterations): + # The first bottom-up feature pyramid (which includes the input pyramid + # levels from the backbone network and the additional base pyramid levels) + # is indexed at 0. So, the first top-down bottom-up pass of the BiFPN is + # indexed from 1, and repeated for bifpn_num_iterations iterations. + bifpn_i = i + 1 + + # Create top-down nodes. + for level_i in reversed(range(fpn_min_level, fpn_max_level)): + inputs = [] + # BiFPN nodes in the top-down pass receive input from the corresponding + # level from the previous BiFPN iteration's bottom-up pass, except for the + # bottom-most (min) level node, which is computed once in the initial + # bottom-up pass, and is afterwards only computed in each top-down pass. + if level_i > fpn_min_level or bifpn_i == 1: + inputs.append('{}_up_lvl_{}'.format(bifpn_i - 1, level_i)) + else: + inputs.append('{}_dn_lvl_{}'.format(bifpn_i - 1, level_i)) + inputs.append(bifpn_node_params[-1]['name']) + node_params = dict(default_node_params) + node_params.update({ + 'name': '{}_dn_lvl_{}'.format(bifpn_i, level_i), + 'scale': level_scales[level_i - fpn_min_level], + 'inputs': inputs + }) + bifpn_node_params.append(node_params) + + # Create bottom-up nodes. + for level_i in range(fpn_min_level + 1, fpn_max_level + 1): + # BiFPN nodes in the bottom-up pass receive input from the corresponding + # level from the preceding top-down pass, except for the top (max) level + # which does not have a corresponding node in the top-down pass. + inputs = ['{}_up_lvl_{}'.format(bifpn_i - 1, level_i)] + if level_i < fpn_max_level: + inputs.append('{}_dn_lvl_{}'.format(bifpn_i, level_i)) + inputs.append(bifpn_node_params[-1]['name']) + node_params = dict(default_node_params) + node_params.update({ + 'name': '{}_up_lvl_{}'.format(bifpn_i, level_i), + 'scale': level_scales[level_i - fpn_min_level], + 'inputs': inputs + }) + bifpn_node_params.append(node_params) + + return bifpn_node_params + + +def _create_bifpn_resample_block(name, + input_scale, + input_num_channels, + output_scale, + output_num_channels, + conv_hyperparams, + is_training, + freeze_batchnorm, + downsample_method=None, + use_native_resize_op=False, + maybe_apply_1x1_conv=True, + apply_1x1_pre_sampling=True, + apply_1x1_post_sampling=False): + """Creates resample block layers for input feature maps to BiFPN nodes. + + Args: + name: String. Name used for this block of layers. + input_scale: Scale factor of the input feature map. + input_num_channels: Number of channels in the input feature map. + output_scale: Scale factor of the output feature map. + output_num_channels: Number of channels in the output feature map. + conv_hyperparams: A `hyperparams_builder.KerasLayerHyperparams` object + containing hyperparameters for convolution ops. + is_training: Indicates whether the feature generator is in training mode. + freeze_batchnorm: Bool. Whether to freeze batch norm parameters during + training or not. When training with a small batch size (e.g. 1), it is + desirable to freeze batch norm update and use pretrained batch norm + params. + downsample_method: String. Method to use when downsampling feature maps. + use_native_resize_op: Bool. Whether to use the native resize up when + upsampling feature maps. + maybe_apply_1x1_conv: Bool. If 'True', a 1x1 convolution will only be + applied if the input_num_channels differs from the output_num_channels. + apply_1x1_pre_sampling: Bool. Whether a 1x1 convolution will be applied to + the input feature map before the up/down-sampling operation. + apply_1x1_post_sampling: Bool. Whether a 1x1 convolution will be applied to + the input feature map after the up/down-sampling operation. + + Returns: + A list of layers which may be applied to the input feature maps in order to + compute feature maps with the specified scale and number of channels. + """ + # By default, 1x1 convolutions are only applied before sampling when the + # number of input and output channels differ. + if maybe_apply_1x1_conv and output_num_channels == input_num_channels: + apply_1x1_pre_sampling = False + apply_1x1_post_sampling = False + + apply_bn_for_resampling = True + layers = [] + if apply_1x1_pre_sampling: + layers.extend( + bifpn_utils.create_conv_block( + name=name + '1x1_pre_sample/', + num_filters=output_num_channels, + kernel_size=1, + strides=1, + padding='SAME', + use_separable=False, + apply_batchnorm=apply_bn_for_resampling, + apply_activation=False, + conv_hyperparams=conv_hyperparams, + is_training=is_training, + freeze_batchnorm=freeze_batchnorm)) + + layers.extend( + bifpn_utils.create_resample_feature_map_ops(input_scale, output_scale, + downsample_method, + use_native_resize_op, + conv_hyperparams, is_training, + freeze_batchnorm, name)) + + if apply_1x1_post_sampling: + layers.extend( + bifpn_utils.create_conv_block( + name=name + '1x1_post_sample/', + num_filters=output_num_channels, + kernel_size=1, + strides=1, + padding='SAME', + use_separable=False, + apply_batchnorm=apply_bn_for_resampling, + apply_activation=False, + conv_hyperparams=conv_hyperparams, + is_training=is_training, + freeze_batchnorm=freeze_batchnorm)) + + return layers + + +def _create_bifpn_combine_op(num_inputs, name, combine_method): + """Creates a BiFPN output config, a list of the output BiFPN node names. + + Args: + num_inputs: The number of inputs to this combine operation. + name: String. The name of this combine operation. + combine_method: String. The method used to combine input feature maps. + + Returns: + A function which may be called with a list of num_inputs feature maps + and which will return a single feature map. + """ + + combine_op = None + if num_inputs < 1: + raise ValueError('Expected at least 1 input for BiFPN combine.') + elif num_inputs == 1: + combine_op = lambda x: x[0] + else: + combine_op = bifpn_utils.BiFPNCombineLayer( + combine_method=combine_method, name=name) + return combine_op + + +class KerasBiFpnFeatureMaps(tf.keras.Model): + """Generates Keras based BiFPN feature maps from an input feature map pyramid. + + A Keras model that generates multi-scale feature maps for detection by + iteratively computing top-down and bottom-up feature pyramids, as in the + EfficientDet paper by Tan et al, see arxiv.org/abs/1911.09070 for details. + """ + + def __init__(self, + bifpn_num_iterations, + bifpn_num_filters, + fpn_min_level, + fpn_max_level, + input_max_level, + is_training, + conv_hyperparams, + freeze_batchnorm, + bifpn_node_params=None, + name=None): + """Constructor. + + Args: + bifpn_num_iterations: The number of top-down bottom-up iterations. + bifpn_num_filters: The number of filters (channels) to be used for all + feature maps in this BiFPN. + fpn_min_level: The minimum pyramid level (highest feature map resolution) + to use in the BiFPN. + fpn_max_level: The maximum pyramid level (lowest feature map resolution) + to use in the BiFPN. + input_max_level: The maximum pyramid level that will be provided as input + to the BiFPN. Accordingly, the BiFPN will compute any additional pyramid + levels from input_max_level up to the desired fpn_max_level, with each + successivel level downsampling by a scale factor of 2 by default. + is_training: Indicates whether the feature generator is in training mode. + conv_hyperparams: A `hyperparams_builder.KerasLayerHyperparams` object + containing hyperparameters for convolution ops. + freeze_batchnorm: Bool. Whether to freeze batch norm parameters during + training or not. When training with a small batch size (e.g. 1), it is + desirable to freeze batch norm update and use pretrained batch norm + params. + bifpn_node_params: An optional dictionary that may be used to specify + default parameters for BiFPN nodes, without the need to provide a custom + bifpn_node_config. For example, if '{ combine_method: 'sum' }', then all + BiFPN nodes will combine input feature maps by summation, rather than + by the default fast attention method. + name: A string name scope to assign to the model. If 'None', Keras + will auto-generate one from the class name. + """ + super(KerasBiFpnFeatureMaps, self).__init__(name=name) + bifpn_node_config = _create_bifpn_node_config( + bifpn_num_iterations, bifpn_num_filters, fpn_min_level, fpn_max_level, + input_max_level, bifpn_node_params) + bifpn_input_config = _create_bifpn_input_config( + fpn_min_level, fpn_max_level, input_max_level) + bifpn_output_node_names = _get_bifpn_output_node_names( + fpn_min_level, fpn_max_level, bifpn_node_config) + + self.bifpn_node_config = bifpn_node_config + self.bifpn_output_node_names = bifpn_output_node_names + self.node_input_blocks = [] + self.node_combine_op = [] + self.node_post_combine_block = [] + + all_node_params = bifpn_input_config + all_node_names = [node['name'] for node in all_node_params] + for node_config in bifpn_node_config: + # Maybe transform and/or resample input feature maps. + input_blocks = [] + for input_name in node_config['inputs']: + if input_name not in all_node_names: + raise ValueError( + 'Input feature map ({}) does not exist:'.format(input_name)) + input_index = all_node_names.index(input_name) + input_params = all_node_params[input_index] + input_block = node_config['input_op']( + name='{}/input_{}/'.format(node_config['name'], input_name), + input_scale=input_params['scale'], + input_num_channels=input_params.get('num_channels', None), + output_scale=node_config['scale'], + output_num_channels=node_config['num_channels'], + conv_hyperparams=conv_hyperparams, + is_training=is_training, + freeze_batchnorm=freeze_batchnorm) + input_blocks.append((input_index, input_block)) + + # Combine input feature maps. + combine_op = _create_bifpn_combine_op( + num_inputs=len(input_blocks), + name=(node_config['name'] + '/combine'), + combine_method=node_config['combine_method']) + + # Post-combine layers. + post_combine_block = [] + if node_config['post_combine_op']: + post_combine_block.extend(node_config['post_combine_op']( + name=node_config['name'] + '/post_combine/', + conv_hyperparams=conv_hyperparams, + is_training=is_training, + freeze_batchnorm=freeze_batchnorm)) + + self.node_input_blocks.append(input_blocks) + self.node_combine_op.append(combine_op) + self.node_post_combine_block.append(post_combine_block) + all_node_params.append(node_config) + all_node_names.append(node_config['name']) + + def call(self, feature_pyramid): + """Compute BiFPN feature maps from input feature pyramid. + + Executed when calling the `.__call__` method on input. + + Args: + feature_pyramid: list of tuples of (tensor_name, image_feature_tensor). + + Returns: + feature_maps: an OrderedDict mapping keys (feature map names) to + tensors where each tensor has shape [batch, height_i, width_i, depth_i]. + """ + feature_maps = [el[1] for el in feature_pyramid] + output_feature_maps = [None for node in self.bifpn_output_node_names] + + for index, node in enumerate(self.bifpn_node_config): + node_scope = 'node_{:02d}'.format(index) + with tf.name_scope(node_scope): + # Apply layer blocks to this node's input feature maps. + input_block_results = [] + for input_index, input_block in self.node_input_blocks[index]: + block_result = feature_maps[input_index] + for layer in input_block: + block_result = layer(block_result) + input_block_results.append(block_result) + + # Combine the resulting feature maps. + node_result = self.node_combine_op[index](input_block_results) + + # Apply post-combine layer block if applicable. + for layer in self.node_post_combine_block[index]: + node_result = layer(node_result) + + feature_maps.append(node_result) + + if node['name'] in self.bifpn_output_node_names: + index = self.bifpn_output_node_names.index(node['name']) + output_feature_maps[index] = node_result + + return collections.OrderedDict( + zip(self.bifpn_output_node_names, output_feature_maps)) diff --git a/research/object_detection/models/bidirectional_feature_pyramid_generators_tf2_test.py b/research/object_detection/models/bidirectional_feature_pyramid_generators_tf2_test.py new file mode 100644 index 000000000..cbc815cc4 --- /dev/null +++ b/research/object_detection/models/bidirectional_feature_pyramid_generators_tf2_test.py @@ -0,0 +1,167 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""Tests for bidirectional feature pyramid generators.""" +import unittest +from absl.testing import parameterized + +import tensorflow.compat.v1 as tf + +from google.protobuf import text_format + +from object_detection.builders import hyperparams_builder +from object_detection.models import bidirectional_feature_pyramid_generators as bifpn_generators +from object_detection.protos import hyperparams_pb2 +from object_detection.utils import test_case +from object_detection.utils import test_utils +from object_detection.utils import tf_version + + +@parameterized.parameters({'bifpn_num_iterations': 2}, + {'bifpn_num_iterations': 8}) +@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.') +class BiFPNFeaturePyramidGeneratorTest(test_case.TestCase): + + def _build_conv_hyperparams(self): + conv_hyperparams = hyperparams_pb2.Hyperparams() + conv_hyperparams_text_proto = """ + regularizer { + l2_regularizer { + } + } + initializer { + truncated_normal_initializer { + } + } + force_use_bias: true + """ + text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams) + return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams) + + def test_get_expected_feature_map_shapes(self, bifpn_num_iterations): + with test_utils.GraphContextOrNone() as g: + image_features = [ + ('block3', tf.random_uniform([4, 16, 16, 256], dtype=tf.float32)), + ('block4', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)), + ('block5', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)) + ] + bifpn_generator = bifpn_generators.KerasBiFpnFeatureMaps( + bifpn_num_iterations=bifpn_num_iterations, + bifpn_num_filters=128, + fpn_min_level=3, + fpn_max_level=7, + input_max_level=5, + is_training=True, + conv_hyperparams=self._build_conv_hyperparams(), + freeze_batchnorm=False) + def graph_fn(): + feature_maps = bifpn_generator(image_features) + return feature_maps + + expected_feature_map_shapes = { + '{}_dn_lvl_3'.format(bifpn_num_iterations): (4, 16, 16, 128), + '{}_up_lvl_4'.format(bifpn_num_iterations): (4, 8, 8, 128), + '{}_up_lvl_5'.format(bifpn_num_iterations): (4, 4, 4, 128), + '{}_up_lvl_6'.format(bifpn_num_iterations): (4, 2, 2, 128), + '{}_up_lvl_7'.format(bifpn_num_iterations): (4, 1, 1, 128)} + out_feature_maps = self.execute(graph_fn, [], g) + out_feature_map_shapes = dict( + (key, value.shape) for key, value in out_feature_maps.items()) + self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes) + + def test_get_expected_variable_names(self, bifpn_num_iterations): + with test_utils.GraphContextOrNone() as g: + image_features = [ + ('block3', tf.random_uniform([4, 16, 16, 256], dtype=tf.float32)), + ('block4', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)), + ('block5', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)) + ] + bifpn_generator = bifpn_generators.KerasBiFpnFeatureMaps( + bifpn_num_iterations=bifpn_num_iterations, + bifpn_num_filters=128, + fpn_min_level=3, + fpn_max_level=7, + input_max_level=5, + is_training=True, + conv_hyperparams=self._build_conv_hyperparams(), + freeze_batchnorm=False, + name='bifpn') + def graph_fn(): + return bifpn_generator(image_features) + + self.execute(graph_fn, [], g) + expected_variables = [ + 'bifpn/node_00/0_up_lvl_6/input_0_up_lvl_5/1x1_pre_sample/conv/bias', + 'bifpn/node_00/0_up_lvl_6/input_0_up_lvl_5/1x1_pre_sample/conv/kernel', + 'bifpn/node_03/1_dn_lvl_5/input_0_up_lvl_5/1x1_pre_sample/conv/bias', + 'bifpn/node_03/1_dn_lvl_5/input_0_up_lvl_5/1x1_pre_sample/conv/kernel', + 'bifpn/node_04/1_dn_lvl_4/input_0_up_lvl_4/1x1_pre_sample/conv/bias', + 'bifpn/node_04/1_dn_lvl_4/input_0_up_lvl_4/1x1_pre_sample/conv/kernel', + 'bifpn/node_05/1_dn_lvl_3/input_0_up_lvl_3/1x1_pre_sample/conv/bias', + 'bifpn/node_05/1_dn_lvl_3/input_0_up_lvl_3/1x1_pre_sample/conv/kernel', + 'bifpn/node_06/1_up_lvl_4/input_0_up_lvl_4/1x1_pre_sample/conv/bias', + 'bifpn/node_06/1_up_lvl_4/input_0_up_lvl_4/1x1_pre_sample/conv/kernel', + 'bifpn/node_07/1_up_lvl_5/input_0_up_lvl_5/1x1_pre_sample/conv/bias', + 'bifpn/node_07/1_up_lvl_5/input_0_up_lvl_5/1x1_pre_sample/conv/kernel'] + expected_node_variable_patterns = [ + ['bifpn/node_{:02}/{}_dn_lvl_6/combine/bifpn_combine_weights', + 'bifpn/node_{:02}/{}_dn_lvl_6/post_combine/separable_conv/bias', + 'bifpn/node_{:02}/{}_dn_lvl_6/post_combine/separable_conv/depthwise_kernel', + 'bifpn/node_{:02}/{}_dn_lvl_6/post_combine/separable_conv/pointwise_kernel'], + ['bifpn/node_{:02}/{}_dn_lvl_5/combine/bifpn_combine_weights', + 'bifpn/node_{:02}/{}_dn_lvl_5/post_combine/separable_conv/bias', + 'bifpn/node_{:02}/{}_dn_lvl_5/post_combine/separable_conv/depthwise_kernel', + 'bifpn/node_{:02}/{}_dn_lvl_5/post_combine/separable_conv/pointwise_kernel'], + ['bifpn/node_{:02}/{}_dn_lvl_4/combine/bifpn_combine_weights', + 'bifpn/node_{:02}/{}_dn_lvl_4/post_combine/separable_conv/bias', + 'bifpn/node_{:02}/{}_dn_lvl_4/post_combine/separable_conv/depthwise_kernel', + 'bifpn/node_{:02}/{}_dn_lvl_4/post_combine/separable_conv/pointwise_kernel'], + ['bifpn/node_{:02}/{}_dn_lvl_3/combine/bifpn_combine_weights', + 'bifpn/node_{:02}/{}_dn_lvl_3/post_combine/separable_conv/bias', + 'bifpn/node_{:02}/{}_dn_lvl_3/post_combine/separable_conv/depthwise_kernel', + 'bifpn/node_{:02}/{}_dn_lvl_3/post_combine/separable_conv/pointwise_kernel'], + ['bifpn/node_{:02}/{}_up_lvl_4/combine/bifpn_combine_weights', + 'bifpn/node_{:02}/{}_up_lvl_4/post_combine/separable_conv/bias', + 'bifpn/node_{:02}/{}_up_lvl_4/post_combine/separable_conv/depthwise_kernel', + 'bifpn/node_{:02}/{}_up_lvl_4/post_combine/separable_conv/pointwise_kernel'], + ['bifpn/node_{:02}/{}_up_lvl_5/combine/bifpn_combine_weights', + 'bifpn/node_{:02}/{}_up_lvl_5/post_combine/separable_conv/bias', + 'bifpn/node_{:02}/{}_up_lvl_5/post_combine/separable_conv/depthwise_kernel', + 'bifpn/node_{:02}/{}_up_lvl_5/post_combine/separable_conv/pointwise_kernel'], + ['bifpn/node_{:02}/{}_up_lvl_6/combine/bifpn_combine_weights', + 'bifpn/node_{:02}/{}_up_lvl_6/post_combine/separable_conv/bias', + 'bifpn/node_{:02}/{}_up_lvl_6/post_combine/separable_conv/depthwise_kernel', + 'bifpn/node_{:02}/{}_up_lvl_6/post_combine/separable_conv/pointwise_kernel'], + ['bifpn/node_{:02}/{}_up_lvl_7/combine/bifpn_combine_weights', + 'bifpn/node_{:02}/{}_up_lvl_7/post_combine/separable_conv/bias', + 'bifpn/node_{:02}/{}_up_lvl_7/post_combine/separable_conv/depthwise_kernel', + 'bifpn/node_{:02}/{}_up_lvl_7/post_combine/separable_conv/pointwise_kernel']] + + node_i = 2 + for iter_i in range(1, bifpn_num_iterations+1): + for node_variable_patterns in expected_node_variable_patterns: + for pattern in node_variable_patterns: + expected_variables.append(pattern.format(node_i, iter_i)) + node_i += 1 + + expected_variables = set(expected_variables) + actual_variable_set = set( + [var.name.split(':')[0] for var in bifpn_generator.variables]) + self.assertSetEqual(expected_variables, actual_variable_set) + +# TODO(aom): Tests for create_bifpn_combine_op. + +if __name__ == '__main__': + tf.test.main() diff --git a/research/object_detection/models/keras_models/hourglass_network.py b/research/object_detection/models/keras_models/hourglass_network.py index d216b1669..09fb8ed4f 100644 --- a/research/object_detection/models/keras_models/hourglass_network.py +++ b/research/object_detection/models/keras_models/hourglass_network.py @@ -43,6 +43,15 @@ def _get_padding_for_kernel_size(kernel_size): kernel_size)) +def batchnorm(): + try: + return tf.keras.layers.experimental.SyncBatchNormalization( + name='batchnorm', epsilon=1e-5, momentum=0.1) + except AttributeError: + return tf.keras.layers.BatchNormalization( + name='batchnorm', epsilon=1e-5, momentum=0.1, fused=BATCH_NORM_FUSED) + + class ConvolutionalBlock(tf.keras.layers.Layer): """Block that aggregates Convolution + Norm layer + ReLU.""" @@ -73,8 +82,7 @@ class ConvolutionalBlock(tf.keras.layers.Layer): filters=out_channels, kernel_size=kernel_size, use_bias=False, strides=stride, padding=padding) - self.norm = tf.keras.layers.experimental.SyncBatchNormalization( - name='batchnorm', epsilon=1e-5, momentum=0.1) + self.norm = batchnorm() if relu: self.relu = tf.keras.layers.ReLU() @@ -124,8 +132,7 @@ class ResidualBlock(tf.keras.layers.Layer): self.conv = tf.keras.layers.Conv2D( filters=out_channels, kernel_size=kernel_size, use_bias=False, strides=1, padding=padding) - self.norm = tf.keras.layers.experimental.SyncBatchNormalization( - name='batchnorm', epsilon=1e-5, momentum=0.1) + self.norm = batchnorm() if skip_conv: self.skip = SkipConvolution(out_channels=out_channels, diff --git a/research/object_detection/utils/model_util.py b/research/object_detection/utils/model_util.py index 6a46265c3..bc5cfe482 100644 --- a/research/object_detection/utils/model_util.py +++ b/research/object_detection/utils/model_util.py @@ -54,8 +54,8 @@ def extract_submodel(model, inputs, outputs, name=None): for layer in model.layers: layer_output = layer.output layer_inputs = layer.input - output_to_layer[layer_output.ref()] = layer - output_to_layer_input[layer_output.ref()] = layer_inputs + output_to_layer[layer_output.experimental_ref()] = layer + output_to_layer_input[layer_output.experimental_ref()] = layer_inputs model_inputs_dict = {} memoized_results = {} @@ -63,21 +63,22 @@ def extract_submodel(model, inputs, outputs, name=None): # Relies on recursion, very low limit in python def _recurse_in_model(tensor): """Walk the existing model recursively to copy a submodel.""" - if tensor.ref() in memoized_results: - return memoized_results[tensor.ref()] - if (tensor.ref() == inputs.ref()) or ( + if tensor.experimental_ref() in memoized_results: + return memoized_results[tensor.experimental_ref()] + if (tensor.experimental_ref() == inputs.experimental_ref()) or ( isinstance(inputs, list) and tensor in inputs): - if tensor.ref() not in model_inputs_dict: - model_inputs_dict[tensor.ref()] = tf.keras.layers.Input(tensor=tensor) - out = model_inputs_dict[tensor.ref()] + if tensor.experimental_ref() not in model_inputs_dict: + model_inputs_dict[tensor.experimental_ref()] = tf.keras.layers.Input( + tensor=tensor) + out = model_inputs_dict[tensor.experimental_ref()] else: - cur_inputs = output_to_layer_input[tensor.ref()] - cur_layer = output_to_layer[tensor.ref()] + cur_inputs = output_to_layer_input[tensor.experimental_ref()] + cur_layer = output_to_layer[tensor.experimental_ref()] if isinstance(cur_inputs, list): out = cur_layer([_recurse_in_model(inp) for inp in cur_inputs]) else: out = cur_layer(_recurse_in_model(cur_inputs)) - memoized_results[tensor.ref()] = out + memoized_results[tensor.experimental_ref()] = out return out if isinstance(outputs, list): @@ -86,8 +87,10 @@ def extract_submodel(model, inputs, outputs, name=None): model_outputs = _recurse_in_model(outputs) if isinstance(inputs, list): - model_inputs = [model_inputs_dict[tensor.ref()] for tensor in inputs] + model_inputs = [ + model_inputs_dict[tensor.experimental_ref()] for tensor in inputs + ] else: - model_inputs = model_inputs_dict[inputs.ref()] + model_inputs = model_inputs_dict[inputs.experimental_ref()] return tf.keras.Model(inputs=model_inputs, outputs=model_outputs, name=name) -- GitLab From 6ef140dcd0348bd6cea5d59a9ed807af0f3e9040 Mon Sep 17 00:00:00 2001 From: vivek rathod Date: Fri, 26 Jun 2020 15:01:18 -0700 Subject: [PATCH 67/79] Merged commit includes the following changes: (#8741) 318545448 by jonathanhuang: Modifies visualization code in TF2 evaluation loop so that we don't write out image summaries to disk for every single image. This change will reduce summary file sizes by ~2 orders of magnitude on average and speed up evaluation cycles (20 minutes per COCO eval cycle vs 2 hours for RetinaNet). -- 318514741 by sbeery: Adding link to the blog post -- PiperOrigin-RevId: 318545448 --- research/object_detection/README.md | 2 ++ research/object_detection/model_lib_v2.py | 31 ++++++++++------------- 2 files changed, 16 insertions(+), 17 deletions(-) diff --git a/research/object_detection/README.md b/research/object_detection/README.md index 4821f3e66..c88e88c47 100644 --- a/research/object_detection/README.md +++ b/research/object_detection/README.md @@ -118,6 +118,8 @@ Importantly, these contextual images need not be labeled. novel camera deployment to improve performance at that camera, boosting model generalizeability. +Read about Context R-CNN on the Google AI blog [here](https://ai.googleblog.com/2020/06/leveraging-temporal-context-for-object.html). + We have provided code for generating data with associated context [here](g3doc/context_rcnn.md), and a sample config for a Context R-CNN model [here](samples/configs/context_rcnn_resnet101_snapshot_serengeti_sync.config). diff --git a/research/object_detection/model_lib_v2.py b/research/object_detection/model_lib_v2.py index 2f360a873..0a66bfabc 100644 --- a/research/object_detection/model_lib_v2.py +++ b/research/object_detection/model_lib_v2.py @@ -736,28 +736,25 @@ def eager_eval_loop( return eval_dict, losses_dict, class_agnostic + agnostic_categories = label_map_util.create_class_agnostic_category_index() + per_class_categories = label_map_util.create_category_index_from_labelmap( + eval_input_config.label_map_path) + keypoint_edges = [ + (kp.start, kp.end) for kp in eval_config.keypoint_edge] + for i, (features, labels) in enumerate(eval_dataset): eval_dict, losses_dict, class_agnostic = compute_eval_dict(features, labels) + if class_agnostic: + category_index = agnostic_categories + else: + category_index = per_class_categories + if i % 100 == 0: tf.logging.info('Finished eval step %d', i) use_original_images = fields.InputDataFields.original_image in features - if not use_tpu and use_original_images: - # Summary for input images. - tf.compat.v2.summary.image( - name='eval_input_images', - step=global_step, - data=eval_dict['original_image'], - max_outputs=1) - # Summary for prediction/groundtruth side-by-side images. - if class_agnostic: - category_index = label_map_util.create_class_agnostic_category_index() - else: - category_index = label_map_util.create_category_index_from_labelmap( - eval_input_config.label_map_path) - keypoint_edges = [ - (kp.start, kp.end) for kp in eval_config.keypoint_edge] + if use_original_images and i < eval_config.num_visualizations: sbys_image_list = vutils.draw_side_by_side_evaluation_image( eval_dict, category_index=category_index, @@ -767,10 +764,10 @@ def eager_eval_loop( keypoint_edges=keypoint_edges or None) sbys_images = tf.concat(sbys_image_list, axis=0) tf.compat.v2.summary.image( - name='eval_side_by_side', + name='eval_side_by_side_' + str(i), step=global_step, data=sbys_images, - max_outputs=eval_config.num_visualizations) + max_outputs=1) if evaluators is None: if class_agnostic: -- GitLab From 68d983b9bc91d9db9a2b7cbe1b1a69d44921e210 Mon Sep 17 00:00:00 2001 From: vivek rathod Date: Fri, 26 Jun 2020 17:29:23 -0700 Subject: [PATCH 68/79] Merged commit includes the following changes: (#8742) 318569851 by jonathanhuang: Fix for fine-tuning from classification checkpoints in the v2 binary. -- PiperOrigin-RevId: 318569851 --- .../meta_architectures/faster_rcnn_meta_arch.py | 5 ++++- .../object_detection/meta_architectures/ssd_meta_arch.py | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/research/object_detection/meta_architectures/faster_rcnn_meta_arch.py b/research/object_detection/meta_architectures/faster_rcnn_meta_arch.py index 8672f9817..5c89cf8f2 100644 --- a/research/object_detection/meta_architectures/faster_rcnn_meta_arch.py +++ b/research/object_detection/meta_architectures/faster_rcnn_meta_arch.py @@ -2801,7 +2801,10 @@ class FasterRCNNMetaArch(model.DetectionModel): A dict mapping keys to Trackable objects (tf.Module or Checkpoint). """ if fine_tune_checkpoint_type == 'classification': - return {'feature_extractor': self.classification_backbone} + return { + 'feature_extractor': + self._feature_extractor.classification_backbone + } elif fine_tune_checkpoint_type == 'detection': fake_model = tf.train.Checkpoint( _feature_extractor_for_box_classifier_features= diff --git a/research/object_detection/meta_architectures/ssd_meta_arch.py b/research/object_detection/meta_architectures/ssd_meta_arch.py index 6105aa7f1..d5db202a8 100644 --- a/research/object_detection/meta_architectures/ssd_meta_arch.py +++ b/research/object_detection/meta_architectures/ssd_meta_arch.py @@ -1320,7 +1320,10 @@ class SSDMetaArch(model.DetectionModel): A dict mapping keys to Trackable objects (tf.Module or Checkpoint). """ if fine_tune_checkpoint_type == 'classification': - return {'feature_extractor': self.classification_backbone} + return { + 'feature_extractor': + self._feature_extractor.classification_backbone + } elif fine_tune_checkpoint_type == 'detection': fake_model = tf.train.Checkpoint( _feature_extractor=self._feature_extractor) -- GitLab From 997eaa19c914031c153cbb8690360ec3330a8de6 Mon Sep 17 00:00:00 2001 From: Hongkun Yu Date: Sun, 28 Jun 2020 11:33:15 -0700 Subject: [PATCH 69/79] Internal change PiperOrigin-RevId: 318714418 --- official/nlp/transformer/optimizer.py | 71 -------------------- official/nlp/transformer/transformer_main.py | 11 +-- official/nlp/transformer/translate.py | 2 +- 3 files changed, 4 insertions(+), 80 deletions(-) diff --git a/official/nlp/transformer/optimizer.py b/official/nlp/transformer/optimizer.py index 176b5eb8c..fd5b92294 100644 --- a/official/nlp/transformer/optimizer.py +++ b/official/nlp/transformer/optimizer.py @@ -18,9 +18,7 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function -import numpy as np import tensorflow as tf -K = tf.keras.backend class LearningRateSchedule(tf.keras.optimizers.schedules.LearningRateSchedule): @@ -66,72 +64,3 @@ class LearningRateSchedule(tf.keras.optimizers.schedules.LearningRateSchedule): 'hidden_size': self.hidden_size, 'warmup_steps': self.warmup_steps, } - - -class LearningRateFn(object): - """Creates learning rate function.""" - - def __init__(self, learning_rate, hidden_size, warmup_steps): - self.learning_rate = learning_rate - self.hidden_size = hidden_size - self.warmup_steps = float(warmup_steps) - - def __call__(self, global_step): - """Calculate learning rate with linear warmup and rsqrt decay.""" - step = float(global_step) - learning_rate = self.learning_rate - learning_rate *= (self.hidden_size ** -0.5) - # Apply linear warmup - learning_rate *= np.minimum(1.0, step / self.warmup_steps) - # Apply rsqrt decay - learning_rate /= np.sqrt(np.maximum(step, self.warmup_steps)) - return learning_rate - - -class LearningRateScheduler(tf.keras.callbacks.Callback): - """Keras callback to schedule learning rate. - - TODO(tianlin): Refactor this scheduler and LearningRateBatchScheduler in - official/resnet/keras/keras_common.py. - """ - - def __init__(self, schedule, init_steps=None, verbose=False): - super(LearningRateScheduler, self).__init__() - self.schedule = schedule - self.verbose = verbose - if init_steps is None: - init_steps = 0.0 - self.steps = float(init_steps) # Total steps during training. - - def on_epoch_begin(self, epoch, logs=None): - if not hasattr(self.model.optimizer, 'lr'): - raise ValueError('Optimizer must have a "lr" attribute.') - if not hasattr(self.model.optimizer, 'iterations'): - raise ValueError('Optimizer must have a "iterations" attribute.') - - def on_train_batch_begin(self, batch, logs=None): - """Adjusts learning rate for each train batch.""" - if self.verbose > 0: - iterations = K.get_value(self.model.optimizer.iterations) - print('Original iteration %d' % iterations) - - self.steps += 1.0 - try: # new API - lr = float(K.get_value(self.model.optimizer.lr)) - lr = self.schedule(self.steps, lr) - except TypeError: # Support for old API for backward compatibility - lr = self.schedule(self.steps) - if not isinstance(lr, (float, np.float32, np.float64)): - raise ValueError('The output of the "schedule" function ' - 'should be float.') - K.set_value(self.model.optimizer.lr, lr) - K.set_value(self.model.optimizer.iterations, self.steps) - - if self.verbose > 0: - print('Batch %05d Step %05d: LearningRateScheduler setting learning ' - 'rate to %s.' % (batch + 1, self.steps, lr)) - - def on_epoch_end(self, epoch, logs=None): - logs = logs or {} - logs['lr'] = K.get_value(self.model.optimizer.lr) - logs['steps'] = self.steps diff --git a/official/nlp/transformer/transformer_main.py b/official/nlp/transformer/transformer_main.py index 14177d856..eeeb3288d 100644 --- a/official/nlp/transformer/transformer_main.py +++ b/official/nlp/transformer/transformer_main.py @@ -241,7 +241,7 @@ class TransformerTask(object): if params["use_ctl"]: train_ds_iterator = iter(train_ds) - callbacks = self._create_callbacks(flags_obj.model_dir, 0, params) + callbacks = self._create_callbacks(flags_obj.model_dir, params) # Only TimeHistory callback is supported for CTL if params["use_ctl"]: @@ -408,14 +408,9 @@ class TransformerTask(object): for i in range(length): translate.translate_from_input(val_outputs[i], subtokenizer) - def _create_callbacks(self, cur_log_dir, init_steps, params): + def _create_callbacks(self, cur_log_dir, params): """Creates a list of callbacks.""" - sfunc = optimizer.LearningRateFn(params["learning_rate"], - params["hidden_size"], - params["learning_rate_warmup_steps"]) - scheduler_callback = optimizer.LearningRateScheduler(sfunc, init_steps) callbacks = misc.get_callbacks() - callbacks.append(scheduler_callback) if params["enable_checkpointing"]: ckpt_full_path = os.path.join(cur_log_dir, "cp-{epoch:04d}.ckpt") callbacks.append( @@ -445,7 +440,7 @@ class TransformerTask(object): params["learning_rate"], params["hidden_size"], params["learning_rate_warmup_steps"]) opt = tf.keras.optimizers.Adam( - lr_schedule if self.use_tpu else params["learning_rate"], + lr_schedule, params["optimizer_adam_beta1"], params["optimizer_adam_beta2"], epsilon=params["optimizer_adam_epsilon"]) diff --git a/official/nlp/transformer/translate.py b/official/nlp/transformer/translate.py index 1f9250414..a6e79a9cf 100644 --- a/official/nlp/transformer/translate.py +++ b/official/nlp/transformer/translate.py @@ -181,7 +181,7 @@ def translate_file(model, raise ValueError("File output is a directory, will not save outputs to " "file.") logging.info("Writing to file %s", output_file) - with tf.compat.v1.gfile.Open(output_file, "w") as f: + with tf.io.gfile.GFile(output_file, "w") as f: for i in sorted_keys: f.write("%s\n" % translations[i]) -- GitLab From db39ef826193d0802f644ba30397242a7272676e Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Sun, 28 Jun 2020 22:56:20 -0700 Subject: [PATCH 70/79] Internal change PiperOrigin-RevId: 318755856 --- official/nlp/configs/bert.py | 7 + official/nlp/tasks/question_answering.py | 142 +++++++++++++++++- official/nlp/tasks/question_answering_test.py | 84 ++++++++--- 3 files changed, 202 insertions(+), 31 deletions(-) diff --git a/official/nlp/configs/bert.py b/official/nlp/configs/bert.py index 48b83107f..1db3e1585 100644 --- a/official/nlp/configs/bert.py +++ b/official/nlp/configs/bert.py @@ -126,10 +126,17 @@ class QADataConfig(cfg.DataConfig): class QADevDataConfig(cfg.DataConfig): """Dev Data config for queston answering (tasks/question_answering).""" input_path: str = "" + input_preprocessed_data_path: str = "" + version_2_with_negative: bool = False + doc_stride: int = 128 global_batch_size: int = 48 is_training: bool = False seq_length: int = 384 + query_length: int = 64 drop_remainder: bool = False + vocab_file: str = "" + tokenization: str = "WordPiece" # WordPiece or SentencePiece + do_lower_case: bool = True @dataclasses.dataclass diff --git a/official/nlp/tasks/question_answering.py b/official/nlp/tasks/question_answering.py index aa066cc02..a9ab9dbfd 100644 --- a/official/nlp/tasks/question_answering.py +++ b/official/nlp/tasks/question_answering.py @@ -14,7 +14,10 @@ # limitations under the License. # ============================================================================== """Question answering task.""" -import logging +import collections +import json +import os +from absl import logging import dataclasses import tensorflow as tf import tensorflow_hub as hub @@ -22,7 +25,12 @@ import tensorflow_hub as hub from official.core import base_task from official.modeling.hyperparams import config_definitions as cfg from official.nlp.bert import input_pipeline +from official.nlp.bert import squad_evaluate_v1_1 +from official.nlp.bert import squad_evaluate_v2_0 +from official.nlp.bert import tokenization from official.nlp.configs import encoders +from official.nlp.data import squad_lib as squad_lib_wp +from official.nlp.data import squad_lib_sp from official.nlp.modeling import models from official.nlp.tasks import utils @@ -33,6 +41,9 @@ class QuestionAnsweringConfig(cfg.TaskConfig): # At most one of `init_checkpoint` and `hub_module_url` can be specified. init_checkpoint: str = '' hub_module_url: str = '' + n_best_size: int = 20 + max_answer_length: int = 30 + null_score_diff_threshold: float = 0.0 model: encoders.TransformerEncoderConfig = ( encoders.TransformerEncoderConfig()) train_data: cfg.DataConfig = cfg.DataConfig() @@ -41,10 +52,7 @@ class QuestionAnsweringConfig(cfg.TaskConfig): @base_task.register_task_cls(QuestionAnsweringConfig) class QuestionAnsweringTask(base_task.Task): - """Task object for question answering. - - TODO(lehou): Add post-processing. - """ + """Task object for question answering.""" def __init__(self, params=cfg.TaskConfig): super(QuestionAnsweringTask, self).__init__(params) @@ -56,6 +64,14 @@ class QuestionAnsweringTask(base_task.Task): else: self._hub_module = None + if params.validation_data.tokenization == 'WordPiece': + self.squad_lib = squad_lib_wp + elif params.validation_data.tokenization == 'SentencePiece': + self.squad_lib = squad_lib_sp + else: + raise ValueError('Unsupported tokenization method: {}'.format( + params.validation_data.tokenization)) + def build_model(self): if self._hub_module: encoder_network = utils.get_encoder_from_hub(self._hub_module) @@ -85,9 +101,53 @@ class QuestionAnsweringTask(base_task.Task): loss = (tf.reduce_mean(start_loss) + tf.reduce_mean(end_loss)) / 2 return loss + def _preprocess_eval_data(self, params): + eval_examples = self.squad_lib.read_squad_examples( + input_file=params.input_path, + is_training=False, + version_2_with_negative=params.version_2_with_negative) + + temp_file_path = params.input_preprocessed_data_path or '/tmp' + eval_writer = self.squad_lib.FeatureWriter( + filename=os.path.join(temp_file_path, 'eval.tf_record'), + is_training=False) + eval_features = [] + + def _append_feature(feature, is_padding): + if not is_padding: + eval_features.append(feature) + eval_writer.process_feature(feature) + + kwargs = dict( + examples=eval_examples, + tokenizer=tokenization.FullTokenizer( + vocab_file=params.vocab_file, + do_lower_case=params.do_lower_case), + max_seq_length=params.seq_length, + doc_stride=params.doc_stride, + max_query_length=params.query_length, + is_training=False, + output_fn=_append_feature, + batch_size=params.global_batch_size) + if params.tokenization == 'SentencePiece': + # squad_lib_sp requires one more argument 'do_lower_case'. + kwargs['do_lower_case'] = params.do_lower_case + + eval_dataset_size = self.squad_lib.convert_examples_to_features(**kwargs) + eval_writer.close() + + logging.info('***** Evaluation input stats *****') + logging.info(' Num orig examples = %d', len(eval_examples)) + logging.info(' Num split examples = %d', len(eval_features)) + logging.info(' Batch size = %d', params.global_batch_size) + logging.info(' Dataset size = %d', eval_dataset_size) + + return eval_writer.filename, eval_examples, eval_features + def build_inputs(self, params, input_context=None): """Returns tf.data.Dataset for sentence_prediction task.""" if params.input_path == 'dummy': + # Dummy training data for unit test. def dummy_data(_): dummy_ids = tf.zeros((1, params.seq_length), dtype=tf.int32) x = dict( @@ -105,11 +165,17 @@ class QuestionAnsweringTask(base_task.Task): dummy_data, num_parallel_calls=tf.data.experimental.AUTOTUNE) return dataset + if params.is_training: + input_path = params.input_path + else: + input_path, self._eval_examples, self._eval_features = ( + self._preprocess_eval_data(params)) + batch_size = input_context.get_per_replica_batch_size( params.global_batch_size) if input_context else params.global_batch_size # TODO(chendouble): add and use nlp.data.question_answering_dataloader. dataset = input_pipeline.create_squad_dataset( - params.input_path, + input_path, params.seq_length, batch_size, is_training=params.is_training, @@ -141,6 +207,70 @@ class QuestionAnsweringTask(base_task.Task): y_true=labels, # labels has keys 'start_positions' and 'end_positions'. y_pred={'start_positions': start_logits, 'end_positions': end_logits}) + def validation_step(self, inputs, model: tf.keras.Model, metrics=None): + features, _ = inputs + unique_ids = features.pop('unique_ids') + model_outputs = self.inference_step(features, model) + start_logits, end_logits = model_outputs + logs = { + self.loss: 0.0, # TODO(lehou): compute the real validation loss. + 'unique_ids': unique_ids, + 'start_logits': start_logits, + 'end_logits': end_logits, + } + return logs + + raw_aggregated_result = collections.namedtuple( + 'RawResult', ['unique_id', 'start_logits', 'end_logits']) + + def aggregate_logs(self, state=None, step_outputs=None): + assert step_outputs is not None, 'Got no logs from self.validation_step.' + if state is None: + state = [] + + for unique_ids, start_logits, end_logits in zip( + step_outputs['unique_ids'], + step_outputs['start_logits'], + step_outputs['end_logits']): + u_ids, s_logits, e_logits = ( + unique_ids.numpy(), start_logits.numpy(), end_logits.numpy()) + if u_ids.size == 1: + u_ids = [u_ids] + s_logits = [s_logits] + e_logits = [e_logits] + for values in zip(u_ids, s_logits, e_logits): + state.append(self.raw_aggregated_result( + unique_id=values[0], + start_logits=values[1].tolist(), + end_logits=values[2].tolist())) + return state + + def reduce_aggregated_logs(self, aggregated_logs): + all_predictions, _, scores_diff = ( + self.squad_lib.postprocess_output( + self._eval_examples, + self._eval_features, + aggregated_logs, + self.task_config.n_best_size, + self.task_config.max_answer_length, + self.task_config.validation_data.do_lower_case, + version_2_with_negative=( + self.task_config.validation_data.version_2_with_negative), + null_score_diff_threshold=( + self.task_config.null_score_diff_threshold), + verbose=False)) + + with tf.io.gfile.GFile( + self.task_config.validation_data.input_path, 'r') as reader: + dataset_json = json.load(reader) + pred_dataset = dataset_json['data'] + if self.task_config.validation_data.version_2_with_negative: + eval_metrics = squad_evaluate_v2_0.evaluate( + pred_dataset, all_predictions, scores_diff) + else: + eval_metrics = squad_evaluate_v1_1.evaluate(pred_dataset, all_predictions) + return eval_metrics + def initialize(self, model): """Load a pretrained checkpoint (if exists) and then train from iter 0.""" ckpt_dir_or_file = self.task_config.init_checkpoint diff --git a/official/nlp/tasks/question_answering_test.py b/official/nlp/tasks/question_answering_test.py index 21d13a030..75e868c30 100644 --- a/official/nlp/tasks/question_answering_test.py +++ b/official/nlp/tasks/question_answering_test.py @@ -14,8 +14,10 @@ # limitations under the License. # ============================================================================== """Tests for official.nlp.tasks.question_answering.""" -import functools +import itertools +import json import os +from absl.testing import parameterized import tensorflow as tf from official.nlp.bert import configs @@ -25,30 +27,67 @@ from official.nlp.configs import encoders from official.nlp.tasks import question_answering -class QuestionAnsweringTaskTest(tf.test.TestCase): +class QuestionAnsweringTaskTest(tf.test.TestCase, parameterized.TestCase): def setUp(self): super(QuestionAnsweringTaskTest, self).setUp() self._encoder_config = encoders.TransformerEncoderConfig( vocab_size=30522, num_layers=1) self._train_data_config = bert.QADataConfig( - input_path="dummy", seq_length=128, global_batch_size=1) + input_path="dummy", + seq_length=128, + global_batch_size=1) + + val_data = {"version": "1.1", + "data": [{"paragraphs": [ + {"context": "Sky is blue.", + "qas": [{"question": "What is blue?", "id": "1234", + "answers": [{"text": "Sky", "answer_start": 0}, + {"text": "Sky", "answer_start": 0}, + {"text": "Sky", "answer_start": 0}] + }]}]}]} + self._val_input_path = os.path.join(self.get_temp_dir(), "val_data.json") + with tf.io.gfile.GFile(self._val_input_path, "w") as writer: + writer.write(json.dumps(val_data, indent=4) + "\n") + + self._test_vocab = os.path.join(self.get_temp_dir(), "vocab.txt") + with tf.io.gfile.GFile(self._test_vocab, "w") as writer: + writer.write("[PAD]\n[UNK]\n[CLS]\n[SEP]\n[MASK]\nsky\nis\nblue\n") + + def _get_validation_data_config(self, version_2_with_negative=False): + return bert.QADevDataConfig( + input_path=self._val_input_path, + input_preprocessed_data_path=self.get_temp_dir(), + seq_length=128, + global_batch_size=1, + version_2_with_negative=version_2_with_negative, + vocab_file=self._test_vocab, + tokenization="WordPiece", + do_lower_case=True) def _run_task(self, config): task = question_answering.QuestionAnsweringTask(config) model = task.build_model() metrics = task.build_metrics() + task.initialize(model) - strategy = tf.distribute.get_strategy() - dataset = strategy.experimental_distribute_datasets_from_function( - functools.partial(task.build_inputs, config.train_data)) - - iterator = iter(dataset) + train_dataset = task.build_inputs(config.train_data) + train_iterator = iter(train_dataset) optimizer = tf.keras.optimizers.SGD(lr=0.1) - task.train_step(next(iterator), model, optimizer, metrics=metrics) - task.validation_step(next(iterator), model, metrics=metrics) - - def test_task(self): + task.train_step(next(train_iterator), model, optimizer, metrics=metrics) + + val_dataset = task.build_inputs(config.validation_data) + val_iterator = iter(val_dataset) + logs = task.validation_step(next(val_iterator), model, metrics=metrics) + logs = task.aggregate_logs(step_outputs=logs) + metrics = task.reduce_aggregated_logs(logs) + self.assertIn("final_f1", metrics) + + @parameterized.parameters(itertools.product( + (False, True), + ("WordPiece", "SentencePiece"), + )) + def test_task(self, version_2_with_negative, tokenization): # Saves a checkpoint. pretrain_cfg = bert.BertPretrainerConfig( encoder=self._encoder_config, @@ -65,22 +104,16 @@ class QuestionAnsweringTaskTest(tf.test.TestCase): config = question_answering.QuestionAnsweringConfig( init_checkpoint=saved_path, model=self._encoder_config, - train_data=self._train_data_config) - task = question_answering.QuestionAnsweringTask(config) - model = task.build_model() - metrics = task.build_metrics() - dataset = task.build_inputs(config.train_data) - - iterator = iter(dataset) - optimizer = tf.keras.optimizers.SGD(lr=0.1) - task.train_step(next(iterator), model, optimizer, metrics=metrics) - task.validation_step(next(iterator), model, metrics=metrics) - task.initialize(model) + train_data=self._train_data_config, + validation_data=self._get_validation_data_config( + version_2_with_negative)) + self._run_task(config) def test_task_with_fit(self): config = question_answering.QuestionAnsweringConfig( model=self._encoder_config, - train_data=self._train_data_config) + train_data=self._train_data_config, + validation_data=self._get_validation_data_config()) task = question_answering.QuestionAnsweringTask(config) model = task.build_model() model = task.compile_model( @@ -122,7 +155,8 @@ class QuestionAnsweringTaskTest(tf.test.TestCase): config = question_answering.QuestionAnsweringConfig( hub_module_url=hub_module_url, model=self._encoder_config, - train_data=self._train_data_config) + train_data=self._train_data_config, + validation_data=self._get_validation_data_config()) self._run_task(config) -- GitLab From 2284f823892f0b94fe2df8abbe74a10bddc7f32d Mon Sep 17 00:00:00 2001 From: Chen Chen Date: Mon, 29 Jun 2020 09:30:24 -0700 Subject: [PATCH 71/79] Support create fine-tuning data for tagging task. (XTREME's udpos/panx) PiperOrigin-RevId: 318829996 --- official/nlp/data/create_finetuning_data.py | 50 ++- official/nlp/data/tagging_data_lib.py | 347 ++++++++++++++++++++ 2 files changed, 391 insertions(+), 6 deletions(-) create mode 100644 official/nlp/data/tagging_data_lib.py diff --git a/official/nlp/data/create_finetuning_data.py b/official/nlp/data/create_finetuning_data.py index 7a74745fa..1864d796c 100644 --- a/official/nlp/data/create_finetuning_data.py +++ b/official/nlp/data/create_finetuning_data.py @@ -32,14 +32,16 @@ from official.nlp.data import sentence_retrieval_lib from official.nlp.data import squad_lib as squad_lib_wp # sentence-piece tokenizer based squad_lib from official.nlp.data import squad_lib_sp +from official.nlp.data import tagging_data_lib FLAGS = flags.FLAGS +# TODO(chendouble): consider moving each task to its own binary. flags.DEFINE_enum( "fine_tuning_task_type", "classification", - ["classification", "regression", "squad", "retrieval"], + ["classification", "regression", "squad", "retrieval", "tagging"], "The name of the BERT fine tuning task for which data " - "will be generated..") + "will be generated.") # BERT classification specific flags. flags.DEFINE_string( @@ -56,9 +58,6 @@ flags.DEFINE_enum("classification_task_name", "MNLI", "only and for XNLI is all languages combined. Same for " "PAWS-X.") -flags.DEFINE_enum("retrieval_task_name", "bucc", ["bucc", "tatoeba"], - "The name of sentence retrieval task for scoring") - # XNLI task specific flag. flags.DEFINE_string( "xnli_language", "en", @@ -71,6 +70,14 @@ flags.DEFINE_string( "Language of trainig data for PAWS-X task. If the value is 'all', the data " "of all languages will be used for training.") +# Retrieva task specific flags +flags.DEFINE_enum("retrieval_task_name", "bucc", ["bucc", "tatoeba"], + "The name of sentence retrieval task for scoring") + +# Tagging task specific flags +flags.DEFINE_enum("tagging_task_name", "panx", ["panx", "udpos"], + "The name of BERT tagging (token classification) task.") + # BERT Squad task specific flags. flags.DEFINE_string( "squad_data_file", None, @@ -284,6 +291,34 @@ def generate_retrieval_dataset(): FLAGS.max_seq_length) +def generate_tagging_dataset(): + """Generates tagging dataset.""" + processors = { + "panx": tagging_data_lib.PanxProcessor, + "udpos": tagging_data_lib.UdposProcessor, + } + task_name = FLAGS.tagging_task_name.lower() + if task_name not in processors: + raise ValueError("Task not found: %s" % task_name) + + if FLAGS.tokenizer_impl == "word_piece": + tokenizer = tokenization.FullTokenizer( + vocab_file=FLAGS.vocab_file, do_lower_case=FLAGS.do_lower_case) + processor_text_fn = tokenization.convert_to_unicode + elif FLAGS.tokenizer_impl == "sentence_piece": + tokenizer = tokenization.FullSentencePieceTokenizer(FLAGS.sp_model_file) + processor_text_fn = functools.partial( + tokenization.preprocess_text, lower=FLAGS.do_lower_case) + else: + raise ValueError("Unsupported tokenizer_impl: %s" % FLAGS.tokenizer_impl) + + processor = processors[task_name]() + return tagging_data_lib.generate_tf_record_from_data_file( + processor, FLAGS.input_data_dir, tokenizer, FLAGS.max_seq_length, + FLAGS.train_data_output_path, FLAGS.eval_data_output_path, + FLAGS.test_data_output_path, processor_text_fn) + + def main(_): if FLAGS.tokenizer_impl == "word_piece": if not FLAGS.vocab_file: @@ -304,8 +339,11 @@ def main(_): input_meta_data = generate_regression_dataset() elif FLAGS.fine_tuning_task_type == "retrieval": input_meta_data = generate_retrieval_dataset() - else: + elif FLAGS.fine_tuning_task_type == "squad": input_meta_data = generate_squad_dataset() + else: + assert FLAGS.fine_tuning_task_type == "tagging" + input_meta_data = generate_tagging_dataset() tf.io.gfile.makedirs(os.path.dirname(FLAGS.meta_data_file_path)) with tf.io.gfile.GFile(FLAGS.meta_data_file_path, "w") as writer: diff --git a/official/nlp/data/tagging_data_lib.py b/official/nlp/data/tagging_data_lib.py new file mode 100644 index 000000000..0372a0e9c --- /dev/null +++ b/official/nlp/data/tagging_data_lib.py @@ -0,0 +1,347 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Library to process data for tagging task such as NER/POS.""" +import collections +import os + +from absl import logging +import tensorflow as tf + +from official.nlp.data import classifier_data_lib + +# A negative label id for the padding label, which will not contribute +# to loss/metrics in training. +_PADDING_LABEL_ID = -1 + +# The special unknown token, used to substitute a word which has too many +# subwords after tokenization. +_UNK_TOKEN = "[UNK]" + + +class InputExample(object): + """A single training/test example for token classification.""" + + def __init__(self, sentence_id, words=None, label_ids=None): + """Constructs an InputExample.""" + self.sentence_id = sentence_id + self.words = words if words else [] + self.label_ids = label_ids if label_ids else [] + + def add_word_and_label_id(self, word, label_id): + """Adds word and label_id pair in the example.""" + self.words.append(word) + self.label_ids.append(label_id) + + +def _read_one_file(file_name, label_list): + """Reads one file and returns a list of `InputExample` instances.""" + lines = tf.io.gfile.GFile(file_name, "r").readlines() + examples = [] + label_id_map = {label: i for i, label in enumerate(label_list)} + sentence_id = 0 + example = InputExample(sentence_id=0) + for line in lines: + line = line.strip("\n") + if line: + # The format is: \t

    cs++jaTPp{<#^s_pHcx3Phvo)9OY6PSI$c zOYVY&Bl?KzRv+O9lY-wT4JOSy3x0d%7`AQva7P+r0}kg<09iXTH<`tzN&!$0S*-P9 zkxxJ!2hT6XW_U6Nkh@oty1+6W8NEYta*~ciEX{kE-uM=klth;BOLI?&+UtiNf(Oph z{7Ex(^_{AQ6w>Ck%FVGXMIA^F9=kx6tW6`1g5v2-=cbNN&^K_i7+(^J+vuIG%rC56 zH5R{!4u!qzxOsCKQ^9cL&p^@QbnGi*9qCnA@0R24jb@+h=mK-FrSRDnUbvCyCyE~k zk@5}-{Rj%P?M9DsvwzhMu&TZN6wH^pvZ;FRblEA?`Mc-<+!Tru{p*L_=<@QmP#S57r-8HEk*0^!t$_4s2S5AOQ)DtDJsO1-cq z6qfGm>fLZrIPWQNiShKuDx;)8g}rzGwqxFtwA|+;{L2ix%qBdZ?GC5MI4)%UOT@C{ zUs1Q7LWw!EWfQgG57UgVupZj9#_qV&X7YJLOduQtKj6qt@)18}UbpB%b?`!D2#RPaN7no;i5N7Y*avb6Ct5-AF|iC9ZS~J z-qLisfT9s%znyKNm`noD+3I6>RbS|0lqVtF=i3F|r;GitCJO6Z_Nl9`GZhR<4=dZj z6?l={RAg6H%-K?@SiRfE!aa0U0KmWbA(^(>Jxnr6x@hO2-gVD=eh4-Y0ZI(G>4sM~ z%Sf@$D?Oy_Yt1SINlH>ptLi)LDXR;K%@g+RVjuz#pT#N zGY#2j8`D%fhV4f-ti*kk<~K%2YLzN2j0jmNaQP+>dthP2xbp?3f>J)%6ui>*Y&9no z7DS!MwiV0`D!mMF9f<4^;46UBbfh)e8~xh&Qcn82W>iwiG+Acf%ScUPdr0|Tx7!E; zDk!*l9rw~DRW|ZcvU&?hN{$a@lur-4m4cCf=dUujEpFA_XeTHkESG_5rkepA{|9Xh z^;I~<|AL!JW}(-?R9Md+60s+*Iz!aas^(<6#aCtNEzP~EtYol1$yfO&y^DwGJ29ps z|Kj-B2&I(18ju!jDkhH@nr-Sa)j`$ z^LDvX2i}76BEuff6zKb{L~J%UB?SG593pEizt2!8HUD7cX;+I<_+Ac3xq;c zl1!o06t(6J8_$HwwqaE~TgDx-_KSMkKXY>xE;+($n*~`|8MHP}GGB{z5aRr7o&zHm z*KKUZEcR>p#?@s75JuDwmwTRN%@@!62BC|!VaKq0pb+kzREq|o)&L#R>(r-K=fn;c zt@eW5oBj761KCc*pzq*hLE!tf3 zcwtR2fd;!(mzIiCScV+nFdRZM@^L?7Kmo#(g(idzp98One|#-EaYIF`F${`$Nw-Ty z^oxF-ajy?3Y(N`Ef?3YHPfv!>op%MzVhtfNE=E9MVE<^AFb&krcZvt<2v|knp%?9L zq;~z*Tq}JT>+fqI3CLj&T&C02+%AX3C-HVZ>V-_9o6)-YG8Pl7DU`3nrESnRL=og^ zqIjtcq@E4a6a7|hzoDx&#<+0eW6Jl7dkQ4_+(FC!)K6}r&V!@VQ@`tT5U~wBc>)3Y zXD~Wy-ZeI6N9*4sHDCn5pr>jwRiZfS5>xi|6Oj^M*SV4~z3>soowSV9T>p)2wWRsJ zc*t?|UPl881bV{f<2N_$IQ^&+F6y6 zC0Fl5wiuLFl{j60t9IRlsI<#^JP*_nUHuvz!26$bW7wf|eZ8`wdHC2OAfFNBKWper zG@Pj=W`4k7tDs+@Foq$FAs){8{g`l;k$Llo8w5VX2kqA>J_PxZ$NTjeYF?q1G9|!z z5T~no9hYN;inrZvzYG2VXq#t&tK6~cS}kePw%1kXq20u1mw!X%$g+~tEU=}J&G-86 z&3(LM^g|7OA>I@kxqDWy>lAupgMR1OEqz%Wg*Vr0oB)ymTG-b~R_OSQ0tI-J7Drqb z#H?W!7n=T$qH~XD`v0T&XBV66TtZuhG38phrOkE7HMuL53Ug0l%B|ZF#dqeOXwAx< zE=VD%xit4nsN@#8jEZtCUB18H`}@C#2fKXUulISK^E|MP*V;A;_6$ObnSEB|v%I`0eLu^!l0P9qq6(+!w$ z*uM_7&*6u)0@=+2YvoFo^Dk^PBDXR3>C(J3iKAT?R3kW7XU2U@$O8Zg=xBQpjqB_r zRkj-ZZl6;F*>Z}Iw4+M@@Y;OMxXk&vb(!@yv50qc!Gq+;POOUJ38L^?e7z_tk|MHy zV!mZ=`A4a4p6#5=Ems(F>l;PZ&nr=J_1vPF{Ugd+&v}wtV*NwNv+jIOv7eNCRs_S% zcYBbrBNI@5Yt!g!h8HSCWV|wHRSE5&;X^FT;4<=eS^_q1gn86<&2cIRb~4oJ-Kkft z;-*jW$^N0ScWT2S6_Cx?nQnB9JpF6r-Ot;VUx}IQ2~fLdd?atoj!!Nxxh(dRY^x?Wd*DPpBz3AuX)`EVhWbNyT0q*6q$)_rdW?>XdMsT<7d9~0?^_g7kpjXA^hUX{ zm06T%Yu)#{p=CgR5%P-dJRUC|&z=~bN%3E|XB-j2DHH*YY{EFPAEv9jeA>k5t>=Al z5zUn>uWgq2JY`4c$^Hp358-1mC;rDsLMKuI=yxw@CAH@l#GRzLpD> zwJV4k4$4rU2CN;$n|*dzty;kec-b{+Xy~{{&M+XxNuDM-&*zSlh4EKQdpeyQ~r>Sk_>Phz!BS19~_6 z<;agvh`#S&pyCP0X3QBXJ~zt_(Y@p0htB$atN_X#@ALiry<6L2c~IsASqER z6M$G4tLW-C+x5t+)V%_{+0Br(aMJH$czBSoqkZJS_5vO4wSQ%#S|PdMOz605Q43C&DZU>4kacd%6`W)^xIiU{nPU`fbNa^M)Mh(jROJ%;522$C7 z#aW~8X_E2G=HOyK$OgHAA4KE@*^Jo{lh3$K@fYixO3G|obcHh#o*cD z1|WYsR3(4p5*gv-;`5u8egF{+*i@iTlTwvL8ncD2Uf}Y>@Kgpc#zmdPsWLfCE5IgY z8YP12NReDY=>&0_IO@K2fX})=tOKxpC^F`4ehj)C4p+LzrjJ2=&jLD=*9NH{eKq)} zMth(tP8D=wBdG2Et@H6<%d|U1<-#YrWCJ>ejLxt^!nnq>3*4pcLAa=2#en=k*+Vsp0H-Bf%9q3&ttq^CGdaHL)-BL z?QD7+FocVx*@bGXGS-3}U5MQNXz(d@6vSfXx4x_4Lh(|z(^tV&{SVDEJCjMOuvq@a z3a62k=~4k7BwM!@(I--z&~);mi5jfaP8B^Wdg0TZdNTvmP~0XEK0;k>c~^AFrHw(I zYdN>Ld;39>)nF-JqsfjKtom@z!E>qR39L=nnwe!1P&^3$0m;t={!Pm*ReGa3(gTVB8jL~Oat zSf>ZUjj`c26biptya&A<1q@qAKYX&)Zo9u47QGLzdrZV#02qw`?w1myf?0&ggz0Q&x-IM9+^-9}~#?DAJ&sQ1< zolj-^B(m%8Z*1zr)cs|$+-{P7!i!zVi3H2=HJ(0FK}|YfF%~A z`0idzZ=mvIwLzM z!gy1mGc&x%cLf}^QM4u4RcA&%jXxbhzqX5d{rJJ1ZPr2XK%-A8iuu56`AD-Q3?I3txgZiz0o$+^tL5NnaLh-*Sy43!Utu)gap6#+d z_Yb#5c;~7Pfb*w2grRKI8+*ea}+nZrg3YYl0i_iJ^HL+&7S~VFJF#bej@vC zI1DVUV5hI16!e_)TTeX}*t$Y9_<;hzD!IPB@*{7ek$7QG|5xj9xvUQzT1L(Sg>~&~ zwYD!$GDXy;e-GV!cQ8=&cC54@&T-EvgcpN;Dn&LSh(z%wlvrabl&M>9&);6*)LvGh1Gd2Ywqd=Bnjq_oUKS$ zfJ(*R5Che&(V~2^Njm4>I*5H-vJyr|&pI7#;dc7Ak>%CkM&)jiw)ak0kr8G|R?)rR( zixWCoaX*SRloR%J&O2VMgFYO(YU|w*X=<=J`>sfcUab3FdlYE4g&0(?Xc+-`OnQmE z$=H(&|2ql^bmDUu8}=iA%`*-YBG(kr_yIFm4C(w}n(^oam-=tsevE+9Q2QQiI7%-Q zU_u&1VZyx;Trw{NHStw)R0z%l487rM)PlH%w+ORg^J_@HgN@3oc15`e0P-ykai;Zv z(hBQ6{>MZ;`K0C7kggkpL&8nJUGCW`zw3^QSkGKy=D>D6IndL&T5(j)jh(pUkh|6W z@&v(O6+Qlru6+FMtge74LMM5&H3@2($L@8Wn|K4boZrQNd**(ddk&Ru%Zz{9*S4; zn0lNnA8t6}xY|9QSX)E#_!Jl-S_mc5`RigFAN}Xa+nD-6C85B58byo2WH$B>CTvwwf zltVr5G7jX%N{sz8bNLE`#HYWh_c%hLmxVmyk9tkVF`v5cXv$m*6BUE(2rxSp1mls~ zag4hkP8`>9<{@+#JGm>+Or5AYvdik+s(d2t?~o1?5ZlzV(`JMdGmKIT;0TQx53vpa z#qD@SlMohnY$#3qUFP=Gd^U;(K!@Q+Roq)#TDs#MoNEn>k`J9(Q4r#w$DeJqZjkj0 zc_|qpYhM97-7VuqmUs9guHUbKk_JL=Wtj+&69u9)n~*35AogrFrua_E$s)j#pijDk zQSpV{K19y6+do``CIim2R1+;SKJ#yg#S6F~b~>>S+QIL`To;%nxJp<%({!>aBDnDo zJ3=)fBj!^I{Qm67MUG&`tsFk-M@jxkv)Avl*>%v{Nrg9mR4y0~GVa{4!bwB+b82Vu zKBI(6)gX@dYfq{QKbQlb*(DUNZHa08#O}3BgzYx;Q}3 zpG41tO{L+dr0qMAqWp@mAlS~dw)0^*@8)9=t3YP?!0kMWBM~9j-#V&jEZ{Tkl1Mt$ zW_Chn-H)N50}c%F$%#mI`}7d}U<3ne}M{$RwVw!G5E--4@A&ephPB&M)ke$u^ zvj}0woOaiY*EjE{MFEhe)HC;460W7h!j?cY^xFl>-%O*u6+?$1Uq_O!?Ho`@XHCeU zn%G1LnGyRL1;VUP1orm?mIF02jnNjdu>=@XQ0`v9cC3GDeBVk%P_%%A}m1LaeI(VvZ)2naBdEKG0|6ZNBYPVx_d*KiHetpbB=cNNjeH z94(fM<4S);6QnwVOdNw?5(2r$ofO-zl&7I+w$2zdkkKuK(tCvM(5vuF_ZrdhhQ&w< zrr)NA7q?@{zOj=!xY@;lt+}MAtbQ3D%O}R(J(i%?vb0P=$c~jbl3EmU7Dp;EG`s(N zy-1&WP3Y8(|3Q^S7FJCwUJ8kw)fK&?w@ zeL}aH0t<<}Ago3aJEVwUhgO-drI^-1rV%Z$XzG{EvXhTK9k0C_iUIp*owc{)xByjz z$=?vIKQj#`4aQ70*qiY73S=OA`@Uxa{Bt{MDSd{6($qH7{^LZh%DY3kF1ap)Hi>x+ zS=>C8cb+IOCS|DZj`h9v4@jPuGrL7d`-~QDd7NAIsf}BpOBU_dln~?_r(WR(-LpV_ z74@adURbB5*{ED@s}D)lZE4d1j7By1h4Y58M-jc2!-$OFaDgVZ-({>p0l)EuSRba& zFuAKFW!&2+-0XP_M+9@D3t%u+ImvS31UYL&Z4A z71x8{n#6v-Uyr|IASdgR~yb9Vgj zuFbVB>{lDJAvXT7OkG9VqmLva2s>3f?e^4UD&?;LKn zS?7Lx;fqX=n?Zw$(R{BRQY#+#ECc@c3V+E?mz-;u7!KM>Lfaei#Qd$;yPmF2-%d7V z=Rh?@I;OtMl6(fgzy%??A4vtP^!`Jd^@#B4z;EtSD@Tt_=2>Z=&!2$Iv*Hi?twj_R zT;A4@#@8|S%ewX_lSN>Odn)xo&y>F%_{5{eM@AHh@QR|_Ea zSMBFBPI{V) z4T~7|x=WpW6qgFcU9NhXPg%r}ioJbfGBR(?rv<5j4QvE(Tg6anFZOg& zO~Eatc`x17x;32*j_?*O*0Ix8Cp$qrF8uOoL%m2Y7l4}WvYXf_v`-WTDraZ@59sdaGHEGG7b2-!)!s-cUv;3f}`l<7|BtUM6SQ=Vb+qjV;m(7P^!UiibQtTR(8?Ahah2 zT>9k=ewMi=MfuJ7mK^W6B~mf7&g7&*SN?oF(C|;uU2b|D6Zi~Ey5d1K*rEVr6XjK< zoPm%ys4y}*$+)C4rs~0)jv?%VQc|C@px91w>HX&?$=IIDMd;EvpyIR~=Xgv@X1ggr zx79U!hxiCiv2)q6^(3EUvO{2fSn$Wsd%Ut4(;!LgPYZ)^=qLfR0FNA+1wU}M4|2#P z#IGld>b{)tIG%B{Fi03Hqk;!Wnr$V``PygpkIYxBY^RgG(-OGqW)&PH<6B`bB3$fa zwTwWI?Z7T;bK*iMdjtAl7Gpi|7(YF1@Ip5mf2Uql%1O~4a%C)jz~R0IXSO@#lkrue zh3jC-dN6VFlwP3Oz6He4gEN+CuVd_TWQgzRN-talB{2r&__z_Mk~buQm_BDZ0`boc z%vT6v0HF$)5QM09$q_h0;NT$SNfr5#k)3|xF+p8JjFTXU;|R{?@IDJy7AGRJUms4K z7uSoD`GtvDqU<;{{`Q0}Y){4A#oxCvo=ij0L3r^USt(QR-e}?DBP!P!lEbDs(3Hcf zWdA&&@B}q2E2on@E%p&a-wOHsH{qQ9{oK2gH)bpjHF5<~NES0<0C^`F6?v_P+M89e-q0QWTgEuzBjEf z!gKcPB0=6DnRSRA@J*dYpUyHItUP#`>m9XB5sl$a=8CW*nCi;0%H4*EJ0j{E0Ffav zNaJ?~BSC#ycCuNAB{T3}1+-UHg7CCIz@D>);+3nyAC24n)uTw|nEG*ZJDt_{nFr-O z5+uzjS3qw7KY?>?kin3XvzL!?%HYT zEt=L03`;6Krh@&3e#1rEGo8?~3fMxF6M$|F+4dbnM~rdda%<8MY+ZXYt!|5&P$W;n z;r|A~O%j7AIPn9glMC7+_drzEB@Ccw+@48*b*fIdiCk2B!HS;%VS`{JuRJW0YMQg- zMFb;z1lR$5{t1McE9!*Aq)65aNiZ7#3)#anu~81%C~k47!;o9R0?)EHlqS6oAV!xQ zy#z?mOk)d}>f~tU1dMHekX*93msAV~1$3Y6;aBqNX-!xSL6aV52>9%QV^kUyIjq;P>3Tnl(?hi^>joZoDi?JThbLJJ#4 zzGsgZOy-jVR?Im9Eoiw$p3_5pxOKQA1>jG%pYaj!8)xniiN=h`0w{9 zO%`D*(zAlncz|0!@>?4<5eRgE%;aMViAE8bsW%PIJemi6bjZ6jVTNMp`bIGXNADKm zgKh0nhmrNv&*g8n-It$EY3s3)fVZT{Gm`GEaN}{aC_1Z~H0SfA6C~6pn?#F9Ed!Tb zd2*ta+OYm1k}_R(N9P0z)}k=7Q<$qTRR`4`yw!w0?c^wSV1$mb>w~N=RkM*2^c_AC$%64k)L1R-&yGl~ zh&6l$=Z!j~q1$k3(#*xRLo@c!IU)nhfmzGpW-rjC5d$Zz6@W{)Fb}k1T6e?}cCbhL zPW|L2otjlxQa9IRJqEEK-dZmp^g3-{SgN*DtGhuVES`|u+4KI@C z=y|h#yWfkzc|#`QU^Qw7uON}I*w+;29Y4}VI)I4!{n0{FTi^nJ{EKUsp_Q>+y{-(( zauHNgYIKFYv88qrV_LS#9w_)`=t^qqzN~{q9&y_{M^9W&_DU}~Hr%7XTSCIsBI-9N zRJmo<20y4nDzm0Ody>|jr`n`l;lJTfk-i6P%aJ~>>C9u3UQf$*E)o~i8Vs!xz5PF&D6j^t$TJ~-0Tcr=nSaDjx`{! zT>_sN2PcGeR^Xhxqu^$Frdd=mL-Bt@ER{SFyspr)foQzZ7DM!@AcDXXR`Ok`yWHseriv5|JHjnQIMEIs9e_Cd)_oYGrPi*3GAlMJRcZ zl{_y2xKo13`jG{uC;xG1q;q@xjb80aZkCBzKVW1{`9Js|5bgaajLN%Hp|Ev4%~g>u11LzSxJ01Y(Ayq z1o>+ zw0@g-c`ok@n|H_HO(?~y=T$kDBeVdig|e-qehx2> zKjro{`{L?WeB}Ejw+Y@bK_jY`94ZNf^Bx_Sr`$|OYZGS3rIK#zONRj*tpG}%@$hO; zm8|Lb2~2Y9_t3;1{`(1M*jeRmJk^_KxFOxuvOZ6_0gL~&`d?bZZ-R{dQ{&eA@2T!b zu_D>uGAWYZ{8PBj_nyygdqnQp&W(Xs;0BIr)G5-?L26I5y?Uyn8lZ*lzJ5Wzae=O) zD-)c-zLA(}d%_|eA6lF@3CK%`B6rCn#LHFn=bcV81{0cwPrnt?wOrqG38+%8uJdhK z2Qfu8l|y?7-5KuTjo4Z8zjZK8DM(mE$608={3!8QYHQFSE2|7_-;(IZb0i`MdeyJL zDyT}H1pSi2oewNty|p&8hxyxBB%LH8e{ckj=7cB4e>8RrgY>G;S-7uRg zdAWU-Q(ArIK*o*+MuH7!O_@kh?b=;J=-M z^;++<-19D(EOAZ?b(J%SkU6=mxwGdhm&$xG9FRzTb-Q@Ke9WJ6SgW4$<+6oQaT8sQ zOTM3{Ja~QCin%w+7wAgscM~PZncZcNWlqyl1;Y;0Mx~)YRh_?8@YQD8pM&iiT|L45 zUNo8`MD(pT%elvr68g1Of~hq4>T27inpprHE`LVvdMc4&7f|^_6Q91zbg(@fWC`|B zx`@s--GCTFN<5jn=^!jB z-=6BVPpG>F4O&7Om+v_^d{dqVHh_q+zUF?#%o{6Lf?#`3xEZzSdm{Zwv#FO~kM8@J zd)WINIBZoKjvl9yps-H#@H-Z49V46rLW-ddl+$f zR&u}vW}No22@FMhON0n&20P7C%Dmbyzva%;pn=FKx@J~6(It!HL)knaQhl@s82`oJ z>qD!#MmFft>0OsSGqOQx_S^*y%G7+qdS$fVXb8F8>1>eM34Eh2_e6=m?+)v~bEpW~ z?t=VS|6!#3hzZ)Bn8d>Vs+U_9!ncTM!c}+JxosvHBGM9~Bl-*i+#t;a&LC3l*x5iR z8eO8%IK48xh5xU2o`Or(VNf615uX=k=BZG@(%w-h^XAPg@iX?`;gv#N1E+yWCxcP# zOxhjEg1L^D|NZQpVZ8!k+a;Z<__>Rv`t0KZ(%vkf9ZKk`g*Ix%?SH5t`P3609Wemt z2e}X;zHSro{$4gkGGrh3v{VH0hn z=|LF5`pPE*ED2{4oJF~5CoG6o1-o!wDm|3{=&LZ>z{agH`B=3yOqg+TE)W|2$Ja)f zqi{pjq!Hv^T9r(6uQ;8c7*H=oj;e!7Wz&OfZ2=fc+-pZC$eHBU!_r<%<1Y=tcitiq z`1;Uf5rN8BFYp*2nQ8cHIEbJe<$`z+`~id%0wiZv>d~9{xCGxBQ+lGPH8;l>w>W zrt|(P-F4Js6}EycsDmvx6nUye33vTM5D|q)0KGqwB)$JQgTJhB-91*mB9~M z^!Gn(F@B7zNWL0YCeK|efNnlfGET51eJW7NXW8&EZaU6eD7cK7Tv31YUVgs`Xgox}6w3@Ta&MKZ0S6XNOn^H8zk64f+rh z7Qq{4@^1eR(v^Q80_z0dHNh+iDn5SQdw99TVcJ`BV~Zoif9hk%SaIv3+Vd|&JTJ^c zUIo4sDZNmI2a^$$Q!jLLS3ZEzttS_j9N~J%J+oO5nRW2m3CP`@yzTxd+(f-LeqKpD ztrys%o{fH|U5j&!7`me^CB^lFF_;Y;0Rg11LFlbH#PER#OF3>&KC%ns^LFDdcUw>d(?KeC95pH8=>mK$VHy|UW* zst+MYLXc0CuB<@JES{~Q>gu1P&U-78{V(uJ{(1Vs75)Gm;MI%4KBlMYbGR$FDCja^ zI?*~|O&u8{T>S$373uyUW&KR>+(81kvFvl?U8$9jW+&WUS$kM3A^hl6Gc5?_cQot3 zXA4$(NWf?p^ry@$W&`9SZ_cOXf44Kw{^nd8dv>8pzKUrBIU`q)&x7UapODU9E=aMj zy`CYq`u#EU2y`;-x@=@!zPJ1}Njt2|LZ_W3K>!VWHP9g?AuqT)HG6pKe!fFy{e1=% zlreeJ;8GolP0~M{Eiz2cai}GbL)FVp%mWo}*Rn;0mYZsnQIs^~r=P?Hbt($%P0q*X z)v?d>O^*Pt6s7C;^`r9%EdEoQJ(uek;o@Ex0Z2udwf)ChFOgkZh%C0v>@;^c2LOvw z8tRPg7-uKp5Ls)%6OI@fOkNE(^FA>%OIK&JI4yM`%N<1m^`x~bC9OR8qpwnip9+;# z939SPIg2WQ{WPrP&w?+-g*B_c#t%xvqRzjF(T5f@9xSph_sTGXe^jg|VFhYHXta*6 z8|IX=zh~LiT9~fG-hK@ok&@3-E0+DxfR8+UKH|;|YYS5fm;cQfxx}dO)vKF}E4ZIl z`w?G@-$w!9=jx^TdtR3x_B(}y)G)`6!{+WG&Z!HHtl|DbF~mlvJauPzTq&!+;&k%|Z#FAAAN9xaO8 z+aO9&x*-kQV>4DH9P54Z{7{Bgf>Dyc*$c%zYH-%-eLa6`neS2ToF9{f)8?XAosFCk z`|gh(Tutn1*($Y;=LhVLAH&#w4uZeg%O@fd8Iq2f?_`hqI-Ld=B*VOX4_odw(Y|rEYMLzJ(#maJjr0u2eA)6&bha>evpoaMP&}$Na-LQ!rk@ep` zc8&DQJ!)y8JRmHQt@G1a;1bJfk8VeJznEQ}bYOtu1wZy(HCRpe(B&G9z(;F_gH_5k zv3GAPu52yPHS6q}t`5b10XW8Dmsl%F=3+I?F&e-Npmz~9nX3(KN5j-B#YKJoEuB4} zMNcd#dfbq}ogCoz9lv{sdHvw8*I4ojs7tVxiI#HwdR6~R_R||REvm1iVff#~Orol4 zaHo8qvMu$a<{|h#qjewx4SMM)-Q3?~a`S-1&Fu%Ijx&yG@L;;~kp{VKh0*e!SWz#v zKxZL72dq!Jj;AHu9cSA)3NEFu9ob#?E%*XH@W-`F;JS5=B3K$M{U-}reR5%;=%#Gl zeig=?T|N4hH1s81M>}&_r@D=K{jZq*GLz(E74GR@ZvZw&LwksJ#DIR!Yx`GA{DcHE zg)&nJ%-hm1<0$`jr=*KWSoU!0txNm-_>e-xJGxqDyKSd*R$SC;6?#*cMjK769nd45 zt%?ZDpPVNzUa8c!YY^ZGVP`XfVA;&5igO>P1F_}rFF*UrNn6D<1M}Sxea?v!Cs&l7 zh4Dcr0<|i6Tq)pVKMFHU5e*QvIDEH)8ye|qI>`|-6cuH7JHwJ~(28wY5l*86WY?(M z6L@wqT~6iX0c`8Mi=xm3zj^nBeeGm*YfX5>CI_w}fEFfqQ3u@u1XONj*+zaDO@$;= z&Gk_7DagPB2-~#K7F$a;%21(M4c<4Z_n~;yB?IR2#be%R>)E%wF!VDuxNe(*n_ecn zDW*as0MRIv9{u#$9K+P71knIEQ^MbgkqUsEi@VA_6bpr(XT z6^2{`*-VOLT?mmM_c9h4k?o`-zSo!=#g1G8$0egyZCx%ykF~|g0iBXqw$Sep=n2aA z#d(h~lcOn}H?oeQaea^hW#&6VIH!8UD3=nd(8xiK-Ft(9oR8P^9au0{W|foWpT{{v z+Vd0>Tjg6{q$ac-Dq5!T;G0GxjzO&~YC~zxDAu}>+oe!=0t&T%jk{fSfnR#9l~whG z#>1?zft}}vkqk2I{E~y%Gv@&o0<1?xo4N>d7qLqgFDcoLv!Jd{Nts~d8kv4#=LLqr zKGV}D$$gN0<_xxq_CRL?jAEqUbjFTBptIuD)f%A9frq+Q1-mts#oGP60j_W06Uin1 zTOag*f%#i})fcphO!azdL+fm_T_MlE5iWeTGy4ln>ErZ+`@YT&6)Mt{z1wzTh9eeRH}uD z@c@H>xI}-T8+_-WZA-dS8$cl5-nO1bLEDvjYk)HZvbly9x;PfZiT=cD(=e3pKm!xT z&W~OP&o~U>+t#gx&U})FW}aGqLJ4)Mmgp-arK^_zTL-;9k|o~`8L>yccfa}5NMJ>D z^+Cjy^?23buKr*lIY^Wuzdl9Z#TC*%e9;&&GRO~E!w7N&NB?xUz0XqTKGA&^kwrpe z7eFdlU(z(Qu9pFKSD1^Zh^q$mf0bmwxcZa*53?e{`{Z={Zp%r9iQ&lMN0(NI==cb3 zH)BphT{{+0#sR+IY}~fF>7AzZ*39^0SZICpng&G67*$ouW>EhJJ+`I6*NT75HbZ8y zAmNgk=!k70|KNeLJhY~Kx7_f8@m2TM@spK_KzYqVPm#}rX}JFn@{gEU1%sKQfY5e~ z25fFuumF{J*qe`ZhCiCnGYF3p7S1MK(4^}Pf-^e6-(PjNquyp2k7tmsnSl}N4}vYP z0os#Z3x3s6HAIKp*>T#i@H+=}GoEm(rE5Sj*We&gC&51Z<5OsV?hS8Sqe~mpL+Q^C z2VxhmsKF#|_*x>Eu+3s`?(*IiJd7fU8LnT5>JW{A+_s*`vjbrNL7`QSwEXLBYw~*} zF*uED{?yS%8n*jENY=O?T`$W?UPCTpXu?P>0DlhpHHeUv!D*3IA?ksU_mW+=%#)GY zJRrj*Uz$@5`clv&<~Mj*||)E^{6k55p{&lQ;wR9Jq_P_(ujCib&>xVg{M z6vR5XLpv1aCB7OjG4w{LosNzG-WS2G#1}p{^zXs1yhE70hR9cgQV1-(l<8C4nB62j z&JVn3-g#LOhuIc z&0x3-*wj$-zbhRlHhz?)Eeo}9<%8hia^0$CrmYb?r=i<}^W3RuZ|_TkNH@!W)1Nv$ ziWn(i)Qrg__k!EPtuuLYp8TJAC0_N}P*4&wtLEp6I|kRoVy(9GQ9 zW6*WhpP@kGr~zr{x7zXOwDF8T-3{MTZ+TOn*6%j&$*{hf1M$!AKYgWacWX8Bl82GY zE9bEISfnB#fUo%aW4g_^Yj=Jxy>d6;g4PT=w?XEs6=X=Nk5<(<$=sBc^ErA`w;ELb z_*qNwOt>raF&pLl31aGQ0)B=19sus}H_kOdoOEsK+WnySeft>G83*@=MsXmu-|CE) z{0Ut^b`cW6xiJDmfQhlhQsY^8EcctM;0G!=0p6SFfOw}D+hD(2pXEZ4k<#uf*g@ph zzkzu~gGmrC`5gSt|409jTEBR&co~U;eRz$*5vi+=kW{)9?SYg4FF616XRjj zfZ!Ns+qKyx*mjtmfFAkj7HY-k;JokwS?5{bj*huw1cCK6IqC0 z+HAo)3q(DWyPiq92!ub%kBU<=H_uJ3QGi(`_0XM`3H;n_MKQkX7L83!1#R3XbPj_eZejtiko8G7I>UUdM zG)gZqn1_;ZcH1@J`T7&HGfdV>QTuCz^PzOno7jX(gUoanir8&!wyZnaN3QvAW$XCY zloY%zt>LR0jBxJZLX(W2g_%A39aYV|#b*R~wf*75rd;~e>rNH6&eX$PUf&I75sriE zd&>Fn%6OLj(4{i>khirRVNigB@bXFFhTV9&yGLEvq~9c68g^W8U1-`nvLLS)x#tt@ zMmQjGCwx&?@#e2f*Y9Ss$@9gtpjG|_>TseC!dT!J^-plCx9`M+WXlH7ZO;+E$vHQP zt`8Ngeal`rmEJN2o=RUrvJb7YU+Fzow3jp;cqEI4`>S1(q{xO9MFz;HkjGi!G1yW| z!rElSA2I^mQt~b5Z|;5$v(pMVgx1n-&}6H@wRuLgUaSmq_T2-rg1v6J#<4;#XQXWc zDUpl?749aLYM$k;k>9{4rVq2+*kuBq!x{#U4jLdN2e5=|zZPLNx9YdO63B$aFRq8+ z9K{K6@m9$&d0#HZatk|rK@|aB9+i-I9A$q)1$8Mk*l5b#ACjb-1i?Bj?}w?*L$DQf z`Qb}c5*5f1nepiJQ!PK1Tmml+LzvzF;7pjhYH-bb;cZDV4yQ+ogdEvpfb+F8ExN}M z$y90>E>1qfK6!qMpg5`DX^qm!{bULL&}ozBWI2(-JhO*3Q4iiDg3$pH6ydVIIwO=p z%|s+~_I#49d^UPU`P{?o(I2G;7S9Ie>hdh-8C%M94c>8RU8jN1 z{PU*yO^#1G5fBSRXh)^xzdI>xN?B!Up8x&I0PPzD%agPRhWP3Z@|+uWkGhnrSybCu zpj{|xAag}7{|{25;Nzmpo?Gv|zc-ht8a+#q%yamYzg_bta;Tt;@U8tiAjEXl9MZiz zs+zfU^txr}lYE$D(yTm?+Z4Xj`l0^6A5=;IgM@EVb~lZ+UrzkNT8jng1DVnrp=hI8 z?JLjUp3FC%(JXhSdgu3p=wB+`0yW>WANuX?yZb`sv+pQfvsSt+R@i|1u_+Mg%}1j+ zN;P{7)F$9rBru=1J{Wwdm}Fvf$>n#uyDKLdVmQN8E+I7V5MS>3RsH!i89OUz!@C)@ zAcoqUQ(S4y>WCkJ-<3YBBFB0zJ1=K)wGPtP<-x0@$f~76Rj5Oz_`Mp& z91C2QvvjuaKq&4X4GFmr`h+TJQy*%E|4QVd3n-WP4Q>mubP}_J*nv0b(xkCCx-}PoD12r4wX!2Qd-GKMwzy0M}Ia8P8Q!VHb*1B-1aZ23I+ z2a_KSM%Ba}wuw1tKw`eG8c9oa(uW2;30l$Aa(%V5$>@xGH)ooz>O;k`Zg^3;qi&4x zQ0Ye#ZBD%kc$1Eg?@DN@VN-q-^S;jHL=MjCL)+#7+ zn_wQ%uU5R$+~cj-N>{laPa@`+#J*O^x)CexQ0O=Uz1MK|+bZQ15_}`r9qc-2=11~W zv(QGdcevxHlO_>UbfQMvf|Xl*B0ClU5_3;u0MDcjdh`1kY46jT45M9MGAfnjW7>Ly zg9vNgN$Kp(J|#1?3DfVmaSF!D=yv808+bu4*MF4xU+bE;SD3uGHBSI59*D>22cANbG`k+} z+3LUQl*y3B4uk<;Xt0e-<*sg4A>k5~?J&Tx9xB+?%C z8~i$PXA=r>gs(5Uj0;WBJDr-UFfqN z7f#1agMzAMbKlwQflyWeT~;@0zj;%|MhOkGDPS(r)gJ`OQH;2!Cawg>Ou6&1U)Z3H zfv6@ZP^;I}ETuS!p~$Zp{vJf+KEK<>Qpjn0PIiXE$7f_y8&`eR zie%s!gUEa9b?RNh6T7@3ahH?(F@O*!Av?~(7!GL*G@fr}+D%i3=zH(yJIfcv;ZdeJ zUA04+H2~PCPzS?@m~&mPIE5UAZhct$M$BFYc%97qcuZ35k$vyP=$%pt4J0L-0#mYbGbdjZ@0W|@UdgNM!+h{_9d-(CVM?r9F4V(l^;$ZqisaLm)4=Q|j z$0!o1bnOO!UxE4=eLE%EK~aH@kjGaMu{sq5ALf-sc48mzb2GM?0lV@WF0t4`NrQw1r#Z5b1p6cOceiifph+}l7g{qmY~ z?N>+uOekA;K9k=H``88>{pChvbmQ&YLtWNVp#bCBM>e~kur?HOx;`O|49idbx6n(5 zZSBCP-%Ld9W#NChwnc_DQJtfd2GH*-KvSh;?|TIoXVhwXL_vL}e4ZU|^tCXinr*ZF%d2c!D}vp`oBzMj^&jS@n? z)KJ}HS5tqtVRkf;^)$7FsZ#H^}E?DHd)A*d6kjdZ0OpDwJ z6h;lMmONzC_noAM!7c*c_oevQS;4nLT8iBibdL93U6FbQu4S zw9B{FbzlJ_?yJ&oI!W@$dhw~ak#@7TqK!%b%PlGIKpok+3?cbujXABO-^2hl6Sy57 zUfeUF{Z)%23j0z}u($(nOPa)yfNfZZ_e4v0Rse=n$gg;7lh+ut`@b4!CED*34)ygR z&0(gVKE!!Z(B(Prp|0)HshS?5Kj(Qg)^pnDj+rBH-ur28M)is~ki*u;&K!v8 z8czqijr?nIyUmjpd7zGEttiatJxI)ZQv)ymT_8atNAizqPy3%KH+==rqtPG6`*v3! zlmBeHWw&r>C#FFYL|fWLcRA@m&erA%qZMK8m%Nj9Y62Fk6kUE0ikgFhS5Iu$^b88P zfbdtb@a{8H)t&fHcP?^ap}n`K)|yz#iBHfq<)%S*?K=80A^^*3 ztb>ZeI!ybr9^xN2KoB34RFqB^YtFmNx3gpKV<#6bF%WxG#Ji;L6;73&_IDz zCrOieEXztn#Hisz62;LozF`oV_@g}fx^GcDYuzcl^n)ES0iy%5Q!0AN$B0Cj{c1s? zud?W~AxG;-AlKVwIDAx^();QDz3Ai#Cz0B;rTFbtijaPf+_HxCa#PACytvNnRj|CY zzpfxqdM&;*{;8FrY~S3hLW)m(ou_J`9OjTvwM_ z_g$?}wXeGGbr2w6I^J?urFi3XYG_cc;tcM8=OF(CT`wD;+>5c=!lz zceSkrCz}&{wQ1n)Z`MzjNqq3(sI>*y52|y{b6aqoU0}6viC$B|B_Somo0ZNr6(Z(a z(|ZaaYFKMBR`qTJ?}&1ID{HP0FuT>2qW_cy;qm-0P8S92LQz~$yVE$~sH=12_XoM; zm^slwWc>FRJ}sb){f~zB9=-Gbe1qL>1{S(Nu?3$fTS$RZU@7Hzxs4a@Xo#SQ`mg(8 zKDL%nmR?(__rLcU4L2Nw-@Q#ig*&|~eDWiVGRdwqN`*pKUWU6n!@VQ=O@gB{vdjc% z5Oh%=dTX6(GQj&Elp|yEs$ZR`_QYU#Toqu`q0gWsC?b^s+K7H?O~7qJptKb1m5IkC zx(1@17Mb7_`uXEEWO^tjo=V6(EOktAD1ZH%s5BMZ4^A@D4vBKx@qSPD-N)nS8@4zw zgf`E1{O@dmv!`SWA`FAFf{e~en0J)P?D+E_N{xhnMCyZtvOd2)ZXf9y8)F^dD`x`c z0>R0*6tMw(#L(Tk8j?{n1n&EbP{05o5Qix6QN3ogn!Yqf-*M|Ql^8C@7CS%}VK6Z5 zgOD9*I&rD+T|zjVkHZZ-ehTS4XC6*u|0!_QrE65nJCgMwKH0p1)*FjJV&3B9j?)e+ z$=d{CyRCpV1>A&5Z z*d$MLRD%_p&UAKt*(mYV>ygr7=PT2teG2D|{buz=;X)*NE|?jGG_F$Yhz!=yd8Ios z9VRy{DF}QRpM%Dha79@7Is7c}-#7Q`Fye+ZC0^4^8u}n(Db82p9htm{jS^>s2Fkkw zly|rMGsjP#Q7*96d>@AO1DwsBKc8))8M9-oX;BCh(*?Hl;gGN)KEpG6jTzKU0pxZ=}M*H7Ywiu+QsR*r*`gLBLaI^Mu z{4lS+0xEXYTUFt38|kHItQq&}rEe5TYmVOkIXdq^s{a3vpF6n5HKXp$HSe`0D_#3i zcDS}sU0Xi3B9iVUlCEpdj4OLn3QGS=)KfizdadGZB@ADeZ=i@2f zee!jYve}47mp!ThTA4tcQIfP1L_ykhl zCFUp2hkiU%N|961qE@Z>OBN&ef8^rB1*>gmFL!XEF;ze3AiWQe0Cv>|4FcpKK-Tyl8+Ym)|Gb+D1DEESk*aILXVB60kH0Xm2WO>n+I{(T)??FGu zcVK?Q7eg+2R;29E;@-#dWIJU#TA5`4xU4!&iSB|in>Q?sK6(0zMnX!< zGt^VWgE1WzuNN!*n!4C-|Lfk=rbgQ74E*&ok2D+I zp?jcqfL%N~hiS``rWN_~u^|2zUC^MpQ!o#WxDF+gW3C)fzy6 zWQi?e921Vrk|BgW;wew|L<% z28fy`4So7?TR@s)-o8kA>LUEdHb^NDZ`kFA*(jJmlsO(#{1evl`dit}4b&~f?Zlnw ze5k6aIh!1ekjCtds+-@;<$sTO1R3H3n&JAiBP;xJ0X9BKSKE?L6RH{j;s9YL!2i%9 zW$y>7Yp>U5Kre#tOE@bCbqZVkrhqs(FI_E=Ud3092Ll5MDqb=x0fo`NvR0n_eoE;R zD@YPvJnXOjYQe!2n5D9k1fXfl11v|=K%dJalbWg*+DoP&`c+s!UVE`=KpI;?(r%yj zdiWfp2Boe?PsF`!6pdGoZMi8k%=7dE=s+Y}0#b)&l{nTL{4`$KDloO0fK^isb~2+i zl4RLC=7%&q@hDN~;WmIq!SDvPLIK(taTZ~5!v5ssY8^hjK+W(U%kS)GUNY{H}49r-sjBh zPBw|CXoA2;eRSE4W!jhl9cHeo>YC*{%pbnOy$cMH+aCyKLLrck=t;!eN;~B&8{I$% z`j3KcKdX?#M~h3XEMhT^z_>n%uDq;)0%jbSdwK9 z$TH>!f(I_OqE5LIZ+kR~Gq|sZ12uP`Q}adMWmMzE%ZH|Iwfya$UxV6FhJ!QEVYgdA z!*`66AG43xVY0RawLGBy`lP>Be$Clpv;GyS8~o}BaodLo=6#e!G2XM1O8Wf#1`U2$<^RyvxtrNOV3%Eb_c zUBH%WdV5R7)SX9k+4|sm(&8fciGR}B2P&zm>8O(O+YveOI+h?)*f8Z}1hQ3os)tDp z6yrtUj)TmFQQ;@eFP@z@j;e5C2;K|V1kIRs!>#4g2;mx2-il)pBL(12nP$pFQ`k{!g@ro3=GRYoFh)^7HKKvhKB(K4L?TR9F3sz0K09$ z#CH2o+G|#-P#eF?ZSu8&bv6FzvTk65iHOJGVCf5;ugzF3hPYj-^#C~9@5X4`Y+PG| z%4JIvM37EQ0+kgZtsS+pTT%6)aIR@W(neG36oxnEcL6BGP}qftJCF_rZ7O6d#6Exn&uDpj4fQfr80$R#iXc%L~0|7Qo z7wRmHv14J3%sN02+5oaI5HFLo_4s6Qr&5TGOEd_Ij`T(Vlpd85P@9$>l4KZKb8DyQ zKMblZ6*2{$fG6G}z@<@A zIHO(1GfLGv4vo&?Y*BsU|>U?4DQtCz&bB2}LgJ-MB1(fzrPm>{-N>^OL)$u3^ z)-z%kjZ{n*!5;4MYGvCTw>3AbR)PCHC9fMpF&E?LAE_eEB_SQz8Z*ipquPplb3JGZ zjZQ|A4_m2nO9ZgXYJJ_wU$u^k4FP5C-2b3zr!JK5d`g(%!&|&YpGP`c`6l>Cylu*- z&sE|6aKxzc4?tepar9*Yn9LgaC9!YItykGvI>Jzndkz=QWGgp#{73LHNT>SwKS-ip z4xT2n(ukTMBl)}{{&;#$nLbV3&^R#x7At$U*~CR|=2VzO8?u8183Dpihb79K9!DM zDI>cpOwLw*orVGzjjI?m%A?nQdixBo#&B>LiRWmqhu0hmU#?UWJMA91zCe|LFd+!2m(*2aOKOM8j{XZz0jI5fb z{F)2BT&q4@7GZQ@MOL~(mBc3)rt?x2>WtM*C}3jm=ZJQMwcu`oUgF6lfIX|c#2(h9 zOVpV47YwkX{2*h)JB8bA1H&E~A0O^v@h;)1c%>cX;)^n{rZX>|EZ!DdpH6y}byo5S zhFqfm?-dyG6K~G+NJH0=;*QUNOKEet(0FM0oiM2GG+QRb!&(W-UAyJ2@!qCV?HprH zO<*~}0-e_?8c{1~5^;@tH}S9C`@0J5$zh`N{~a~*@;6HHYh+?Y-|Ae2zSkm%MStku z_Iw6Ha9h0-w5I5}TlD|+I2t_xKWiSedYu=gf3}JVPYVUPz^^m?qUP+8PB{h7B zT^*(bSh7GyUo_axKrqa2Z~8G$4!zqSd8pPEJ z3NOrnQT4MIXCMvSn`(I9vhP5~iL@<};G1*iPqec%&^%_*NVdT~z^!DmyDOZ5$Jj2^ zJYJ*dccX~HVWgvkg@|vkOm%R7*_b|(cw(EX9^G-9WC1xmp!{nqA6>N`{By%``P!!B zdx-?V)+8e%Onz=_(w7*m89N8qPZvrr(&JD*rVs&Hc9{PFZ#5tY4xBJiP_2H5%(cP7m4 zw$r?)*2b8|ze~Lq{H)i)vpA0&)6nK)OuU`(U5fcN?$6&$_RlOEZ3mH>pWOj|! z4IcMgSLiP|4qd%ptf7yr<>$6Klb;7W?^??>)=R^eL;CV299dy)av2lR(;H{Ze`Y#L zMj##mCn;p~V^k;;BR+^^uvL!kYd?Kzq{O2JWxv|YE5=Ogi!sWslqcsk;@#C-4UEen z)6=6zYJ}^POaG=@IEFkCbFCFz8q*Njg}Hz5{y0Kw9w1hG4lVnq}&|? z-*$~hH6Vy$KC6UXuz~&ja#VoLWs2{Ml^Hcoph9f8<+=}&j7;m7$o7s7>$|~aw5`ci zo|O>AEd$%YDhHig*&Fw(7k7_Gdob8_;vowh-88gzHa&zCCyQM?8uq#x%sy$|L8@zE zubL}>6GolunB|&%N~m2m7-CgF(w0Wz zaj!5OvwM*6nLOIYZ;%!j54_PVnQr&|(%IG-nNuI3U`>y5NZP>>Q9Xu=N{6kL2t{h+ zehR>ZU#620+l{#JjdMZ~uyM@XY_KwkgLM#9Y2B-kdldyDc5iQy5$qq1IvoxGDV1uV z!Le9lYZ5E63iU&#d&A;tkjg3Rj)xvV@*!81rT?|y&+7|DAKAgP*JUEAjP%jKR@P+p zz2hc=Bc0zlFb2 zSPxG^Z*7Nspjn7Up6}Sb6g`1J1ds$amyuEA^gtx8@gaH#=reX|!{Xjm|15D59J}DM zS@08}RZ0~stwOB?%A&s*SuuYyS%-xlYBhI{3m)%p5n&%cw-3~F5*F=}5m&q1dQa0@ zF<(=`hKRvNbQ=%cO{<|v0Utl+f6&*{9@zdf7Fv+rHJ07~?|xzg;uQgKn`WAY+wJ~$ zQHbfhxuvRuvpg~(**%VdA;^@ci z4$8Q@j$V8ZiXpcCeLqgL#z1ANQAl?`68z2*Q>b|cS{IfT1d#N>PpDNAkAk4SHluAk zDoy>?oQsAw73j$K{3{{{W-d4KKNOfjjTV53638X7JF(rUpOpRyDss~6B>MBzLuVhj zMJO#<1{+)XF&xv@k$po28%Q8tOq`jX*6;dk`ME$qx`+qJ<0Ar52!eaO%_?a{#8dzh zX*;)g6*H=lRgB|L3RUj6a)$B;STY$ei~Zl^kws;i2a)Mo86?gSwzSly;nRB8Cv#KL z@Sg2!;H{^w75~p;19r=msU5k5p+)p}u%CT)T7VnUY6sYloYm7pPW%<>hrX~{zh@*L z{9kXoj3PgmtP5Sy&+0%v_sGTbl*loj;mLby^`{P@l=)s7J!%4ZM=bq|y@rG>&HUsh z5PTpjQ$U8ghgIrkzje2cUx(RbVR>-dP~4a=TWT>Sn@6=PU2%ea>5nsPS6i;ay}zfQrc= zVz$jS`tbEiW(Q9W+UPZ)`%WOg)r@o$#OjZ`ZCLkJC?7raa@r|sXR@z?*8LOdMW1rU z)LvEU!S!ij8e{JCxsOz-mbov}c(+7`j^CrHAORy{n?i682Xy0w{MoxLhqq~ezc@+q z)uPK#zgz67IHHU9as zWT{}KW70(xrG-Cd4e5t}Qr@XSPp+K|byr>zyoPGut17b6{FdzszqT(d12S|*bF|wVyxd?{yJ(CvO+M!Xq;H%1Mx$`Qv1n6G~O|duhw^a z3{uN1OWNkAGDyc7%UxXl!OX!gDZJnP{MS#)y7^A9!5;8frMXJX{zwld-9PpL3yWmUefhYY+USByDii>ZdQ` z&)w)iJn|12!AqB!yU{#qsOpt{8k>ry6#>fQe9w7$8zN+J=O8~YJzfFkk;bY1;`&D_ z&ghpp-!Rt)GaG^0%}dY4q=B9%3tmEmRDr0~p^a|#%=yOr^ob1;5?wPZ7;}*-GF>sJ zm;cAH&Qk=GQs=Xd%Gmb_CIY@VVz=t8gu>7!F|2SNv@={o<{9eUkC}oppq+tLC|EdJ#aHcr|Kx%w;9F%Vu`ubs0Q;> z-`DFaN`g57RMCxY3&+Q=(C zLVrV)#uGJwsS$Pb;72owU|W2mXjBLePFHWL5dX<7>;Ox__6<$zrut{yh2Q>!99991 z#>Y1v?k1TFqhP>1wFmKiPI6U!|gq!o63_p6^B-d>5Z!rI(0xH4UDp%rSiR5W=t+4#xGr|J z^5(Dv3SxkmWD{VAE$v&yWA2~FdXhg}Ny!CYxmnrINnAFEk)Z-#`g5T42FIMir+N{S zW#s-^?K#@yg|Y-hur9F6y`S-oUVvRi#(kXtN=w$YTl^5`4jP2a2xSYpZg}r4PKAx% zNjh7GnToA^4tQl(Lb31`zu@64=|@I=4h3a^+0+--3tc{cIH62rNl?1!9P6!eMh354 zll>Fq7~?{^6=6R}E4n#Vl@lR*x5rdV16k&ASUZ>|9c*Cgtd2s$|NH?~F`XyD}U--8Dwi0RgIaY|M{o9DH1t{!AX|^PS{NO*b%g z+XKVcPm(kDWf3S#cG0nh9^=f;0Em(Vjb^LGmuFqtKM$jU=%a>ac2kTUiNv`n18vWL z^*FH?n!1y%Pkp$lRzu+d^v&xl3o?b+_e)IOjyHJ0n6~#+Cje3uK;FqzdqU0 zY5WL#QwMOJa6z(GS}Zi=k=Jd#1`{n0VMG1%*HKo z{FXwVo*Crz6CU9YQp`#jQ89jSCY}978&W5 zs)!#GEx004^aDOUXU-iDh7+cFos$I(OGQz~6HD*Q_l@*^@Mv@gZ;PHGsTpIi_uRdI zKHL+q_LslNKw?-=e8uTd%P@;bLA3N*H_;#XC0(Etv5o0D(dakQ7v6qb7H{Q^{0h2? zQmBbzY^5MHEU5{bZekZtxwU!ok{Nh5s*}N9d~p1+AO6-tv6d=&Wlw_2dF^nD6uC}3 zB`>f2JOZZLA~FS;a4Mr=2O+eD)Bu*I-rIVz0NfMWr;Qf9m->9!K|+gd;H3u!kUqU< zwS?Q}2M51e*IlP|$+xUuMtr@EDLw*5tZ4#S=gt`+aLQWu-tz@yMA8b%j3kI$Q5yD}gEfm#b+rA#U@>D*B&Sy~U?FjJp zF5xe2-VVb!+z%4R172?BGk5=mj)G=mI3!Nj=Nt9xbTru#y>eAJ+RwBvLU ztS=-#Yj*!EFPjI@t~RMgezHTg98Sde%~MDEI{jW+xwaiv0pVB=$5A5 zxO_YPn%J@^0HIs5T9UveRL#;(Jre`6z|PUvD>RcV07wUL5Fsh6s^63RJk|ra9nz6T zuK2mQ%8Qp@ilw!o!wSIsGl03sALIXfQa+H7dKyHKz;{~IO|crkr;?V;d9D0;l}T{4 zpyRnzlx^{30f~@pub>6`DS5#dO_67Pc?CFi6m9|%T9@zY`uatu#wk2EGeG=a2FM5z ziBnneD7v2?;kF%#63GR&dqD-kiFC(;BQ_86Jb{J|z%O5s*vbdR$Jlf;f6h60Y!J#*J^y+nGGDkSkJ_Te%Ih{d5w7yNp{ES`XHrC4lv5%}8URbY{6u&F|iX1C} z7z7w3(O#&a(fp{1{3l(xv;W*+7nXr=O!>AfVz;(!xq^p7H&XP{7*%FaJA}U~bbb)q zYZbVmhL(iPZm-JEkr79(d@l+g&ni0Vnz zR&mKJEE(Ul2AupCuiDYSP;m_1mamk$FAk)`Z9_=iwVI9d$o|8mhh8%0C2=V#Qc%$$ znT5Z@Yy&D@^RpF#$;1HPbP|*<{Lrwud#EVn@XN;2CuK{sY&9w|MJ+Iv?uCb@{vdL>tx|d%>wPoN5)8F-#7;fo4Zk|4|3wyWkg7NV zx%~#`6&e}B-zBfP47oGgV8=OwdFEE4_L;0O8Q2Z|5P?QLrxJIguf$@ z>U7-O9If^2QxNZs>l3gLEWg{JJS{026-g2}aGDVUPR#5RTFsZTa zUOso=;Kg^IueL5s5$ja1EE<6Gwb&?qp_>}?G`0$Io+TLvlvQg{aex->Y%cT2x6D5? z&9v94&GiLcvp976=Xad}&jT+?% zdJk&{V3+moSm~3FtU0EY-Xo@kvaMCSib^cylOl>!MT7hx&vdQoJGbYtgsbI`FDV*j z(hn$2*sq^|Pq5T`bP5-e1__(4(BE7Q6<0m-zz~M<5&I3~idwjjr{IAdrc2K< zr!K!9@mvm6F#&pLxbI4k{0PT@)x`s~&(l4r0%OXtPk~#W)?*<)h8GLaU2D+robeEo zaeXM#^Is+j`~mcCF#}{~`aG|$l1^Qa-dvV6BDseYmqQm`FS6Gd$}OeKbQ463h2@P* zTeH2bTXAvIK;*8_?>hwZj&2?Opd-h(e*W1f#&^v)AUZbicyh(}JEJqR3Ty6amW<_U z-BSAVEEDEgovxC%9OuBVr#Np5BtVac%1Pa0GXXDXYA(;nbXQSV4yaN~yH%;r6x#G> zu*%|n?5=upwu;&U9iJ!br~7-j4Dg=K%QQ(In~MWR={#!w#4U8!;6PR6>-Gm4TiryjoL1ji#eQ zEb{eK957w)f!43T^T9Dh7)7*P#fwWX<9#lS#HTlSL#-^d-;eg$ns`~)K4X)jzVwzV zQmP!+7G27aecUY3GsLcOU<$AehW7e@ziU~t>t=rcWAE9ebq@g`no?<9#9*lJTBWhF zuM1_Eo0Q}fxdN)aVq zKPB!?NakUxBuho(AqP)oH@ltby7TR3&l={ixtEE3m_}7966gN@-vhdFpMl z4LnWqMKjxQ;e-j~+lY$U)k01}O6#yRQRGq7noQZ_)LueyyT3Z zo#(a1L1xX^3!|98+4E17r|dq$+!{h)9xRBd5OVsqs&6A)0ZM} zp1|QQ{v?JsEw*~>hHrDu*g^QW85NaCHRwBAM9g~Te4m9zDV?`t&>n){bcW93CHO-_ zg{roC?GGxlY>6R<(iOh&*+Fr*^rhmhBSnu04nyCGT@XF)^C(2V9&PeMmtC8xoM!e( zoOso4cH$~9l6JZ3hH9L9lwVRfiEiH@@ecD|sDV(J#h;@Z73Psbfgb^2+80)1 z&i|6DpS7IxnNn!j2Q?TwjC~NHO)bS%c9}7G3g<%mwQqo=UcVg0xgRZzkA=v4X^cHI z3b>My=e{v`^yh2|?vVq+*Y<#wD2E6CTrZOKL%mv^Tj^Y6p9xsoNg!|!*kOH2Wf4`S z?vcd)MnJ2L3(EXLvJq{4diK)`axmMzj&3Flyc>R2p`V%`nY`b1hW>~4-yC%ao;tnt zuUh)Z>iKOpd@Mltp3`jD*+F#m$LtPH$oPw>E>ML|tH@sZsG+e5DC2IOEHlO^OrH$A zvp4;U{|3iPyvXk7rn0bo3T$|+jfLavL!Q+J z_P$jn(o@%sr474sJYBgB-gAHPBUM}>l7r+nfKPIN$VSD-=tN3TrRD~R?yOoN)(Jbu zIOmQOwLsrX5g}h^?i`b6P_GEUkkrB+uIX{h5Ak{-X%CEbOuvpor?LuLWoM^f^JwKp z_v7Q$pt{;_5Uo5Fm`j4kz6Y@q2Bt08@dZ4p_0)yg%Wi4!z-w2Hj*!Nl+#5B1`X+(2 zFGfaWNKSpj=m0(p=IEz6%DS)&%w&jn`tvw;RJp0S+6(D}Oe3)e>&1jGik|t9;!Luf z;5J31kq{gG9h;zB{WE|0cJY#SkuUV8QW}A;2s8d3%j4F?gm`b> zzO1_|(5L)A=&b-$u#2Sc_4`@=lBR)8f)c_Fvzl_1H78y3q&5gR94-iON5xE)a+fH_ z0ZR%Oy?VyTMeO(M>F>unuK(9yzHxWjTjaSnSTzRaD%3AMSw|W&h((=S2X|$t{rh{(16>tQXz~mbj{-0dj->_DZ}t@ln+sC7 zS&g4UDwOv&_w==QzsOmOUwiSRd%S`t3Q8td`^;D!a$>9N89z_NMxfDP47w?_@1|9~ zcK^=fY-?Iu_}`m_kSS*ROId*k5KL@X2YOQ(5#k`nmQ_J~?%uUD7vg8>>PT{qW5-gCBy+e#OXct z3X2cw@xI^cU^;V*Vq^)fbR>VI;iVc?MXS%`!a*KH&}bT%b04Yr9|WwQir0X$oEh(R zJ>fR&co9Edf7YPi(+p*3{IzIa26l;mDJ`?r;bKCugj=8G{DJ-8E|?O&O>8QAPe?$V z!ckh7%nOy$#Ux+2{Cj@?^Dj`@+)nFT$Fq;lYSqG8&@iB*YbAXoT)?R<}#K~;~~v;ueTjamw6WKT}5s)nR1HEf!f>; zE1^*KopB&bU0le5(Q3aA7j)uH^FO(TCic_9v%>wZQ$lZ+ z(Vb~s=>ye6l-NjqlE0Win)uuUy8+#krfOb`l|u%AE6V~NXY#|Y`6r8-8D zO!jXyn&@iZn&iX28!To5kwpJ&cc8D&+`!_QFcL0b@W`0gNg5seYY4;U&R@mO82V<;W(Os1>#q#l5ORsG^d&S-GUd9AZY}Nyg1Nl&DbsT{p*&(${c6u#GgJ@1 zFg*i=ACmY-iahxO_$THUypt}X59(h-r5)fD{2BD~c0Iw&Bj02|h)o<705Z6jJ;`jcpBa&r z<02RCH7vJrKu)FG{!9&|;d9*&>uP@FyvLdD@IDYvW6I>93}ifLKS7IC&9_c_I?qAp zhcQ`26|b1gMqmcE%mdjl^G^i|nc%2VNSy3ADGjjbVV0p6WynqECEQI=%}n<0S=>4if)*kcV_5{`H?&BYZzy0$P=o7JiN58>q;W9NrC$n@QBeM7 z=+r7N+@WwcGnJS9^CXpjKmjGmh5+6cl?{b5M8?p65YEk|5nAzUjoo@K+19~7U1tQ9 z+Q;p|6JV2R{Ex1)f%fAuoe<|7-m;z9b0{u$FHlV6op|LP%l01Y!Lp|FG+xF=lI+PM zs!)CdY|#4s6kS!DDQ!AWK$WK!`#`JWyw&I~*d9hd@=&0MF_biU%owkt2Vb#Wy_CoJ zZ;4FfICwuk(wR2CZ0Q^Qc)Fpde^3vsk%d^N!*&FrAMXSuD@UufSaEpDkpU)0x|PB) zdZUOzJX&C_Ts3HpSN(=EiQ8150Rz!3!=^-?ol2C(W#$W;m5nX}t+UjlJSiH%Uskql z{h8()C!QHE#gZ(iEbPCEzOZ+HVo3PGPyt942q}5;Ux;Dt5;9Jtbby&4dgI_unzgQ@ z=vW<=^~H5=Ct%oe+L}>k7>m8`{`+A_zY9PnA;wqMo4Iskufd%M60`e9$MF+CNJlp{ zY?z@v6vm5^Kgu)DpL!eO@z4qxIr^Arrg|OyHd^Jrg~-65dsoo7Gt{Ap4O-QXjSF0Q zEkI+e*@vytVgZi}RtXj}`mLO54TN?u7i1pH z_>7w#AgNrRqSy0xembqvBnG(MwA5^&JJ6L=5|7_?A?e^CH5mLeF~Tm~C)ce!hq5Sl zIOWPWLzUOvXqV3z`Wt4y08ffgWdHXEVDD zr<>nDZzRoMw&S(Rza<5ooQeY*T>3qO)y&35?`oup*hTK;be<|;$xvTRtf49sk#8z5 zye&A14zAP~2fb4#I`5K_PjSxNR3IuJ+r&;*B^{lF>4uDj&e`Oah94xH2@WBloox8h zl~8)lrJ?_ZyvV|R2lh4i^Y`zeRm~{(d-3DBg6C6&!HdWAwoMJB5W;9bMG#BLVmKkv zfJVl%WY9t6V-Y+-vgy6BK8V?~v0T*uI(4P&Ad() zDw``h6lDgTy~w&x)T=I+Z}S)#2?VFWrb0TifTqy?t5OcXm!;d$=tlp`u1vc3F+i3~<8B?$bP^kTdX zDnYIbKxOggs7i)f8d^pg%M30^q4Xa)Y8eF#y3#DkBdjp7UEKug@ekJ^$Kd66JC#eJ zDyMOaKY%ZLnjCtI;)z~Y`!#>I-CKuS(XZ-)W*n;jjmCu2X$H5d(Q(ZL2S%iSPeY@Jy-yX48i=6%t0 z46Go|#W;E`u8Xae#3{5BxSn5|fETB|6un1K`W;d3DzzsjJQ4;VT9>jsAlU7IHr&tZ z-M_DnB{$!qfC%Vg#*!=Alwkc){Rj~^Nu`n|ePG{(%e=`yF>m(Ws@xn|6q=ixzDxsO zzEQ`SfYbo&vPjEk>;shu9s^&v%Xzr`V`5+Mb-NrhQ=4muJ>YcU`ns+wz!$5-r7QDb zfg&mk2GsS0yr=2v8R(5J=wqGL_S~%*lQZ+ex4Jk}Z+4KI>|YpIx)j8{ zr5=CgE~8R7w>UNM1)-}Pm-4h1xbqq>2>5V+sK&2gz+T`Mb90HH75nM9m6-%=xEpg;>M#M;l1Y?jJD@*G=qSuE3@8}Ol!vVsj^!; zg(%yW;wmOPLCQp2?lcld*%uUkar>d+^qC{WJ|H_o{LG0yYZ_s@vPws;XG3)6tXN8# zI;^O&9nc+r)_62ac}#7fh`oAi=A;q0YRra?C2+0SBE=3YO|NQ;FkjP~`&D$`N;7vs z-8kU%AlE5&4t}jVYot1r&ox;83s4Zcn!;PP(w_Ond+hLeqE@?S$5e}aVT$BL69bi1 ztBNq2b9FUJs*E;m0Ft+M)>uugxnB_>YBtWN@bKWhOD`T04%_cIlV#&QaSq2I`B-wGGSW=_VZQSJ%#r#ilCbP^tE${0T44;wPPm;1&niTnFXK* z<>c<@4*-YW1=3`I0)*CZzCG-H53Nfo_1ywPB@O5f{tGl`vagnLTUwd6EwS=@Cj-22 zd43-sDtl0Z{Ig?!(j!xDF;%&PaSd$cF~3!#+Yus%c4yZ|hC^BB$jA-Wk9Q|TDktL! zj}NJKp*KFkJmsZ+Wv)=U!_KwOQMPXr^mKP&NA1gr19Ktyj3r@3^_ttsmdbO*eoz>R zd~$R%fz9}-&??HX-U222%OZruK z+SIaE1l(u_a`a|gn=XiMTihv_s=A{+60!qJ0Lr-VPvXNq*Q9dc2KR7Ns4cGH50%bg4<0>xmHIhe z4d!5jk0}lAI8?xcU$v9z}SM{mt073^C_= z(suAp;ptNu`1R}8MxHCG4iH3IrmfNS!2;P{SG%-BCaDrLX{r(pF1~Yb2)81)T0%hR zvT(p73Nn>5G;AT?+~if$QY^?P<$})Mmkw7h9RK_J_4mp~s}rPUEFdwY39wC|cLw{- z?jKRvB3=y_1C@tpk^2{O&otYk60=zc5e56paOx42hrBhpwjVUAUUP7MOsE6qovP_5n%n<+!$;O~B5kP)yi6JOV83 zggAs5XYW2@eSviHq&lLO6L8 z7A#-6^4?!Orv0Gg#`6UYwHn5V84978PtezdKIy%|1HY4obUd?IX`pFDgg*W zKkqn~wFmHYeZ!qqZf)!I5E9?Qi;JHgY+GrOwvtkO21HCA^I!h~7D3v%sVqTxD33vb z^ykCk7iOD_p31;R^LU%PK2v$Dlj@F2NAQVpVNQUxiIt{_SC^zmuqb0)U|?Qdr8ooJ z4pJ8>Uc$dvu1~lrbigSYK%{cFFg^OcTqRaeAxV30S?(nvgPsp%jM)YeA`u0YA7}s3 zo(n+vV%?*DkT_QpJ~R)?ME3i+xu?tCEC8EdM={x%Yua%WukGLJsY2B0bqyxcM=?1I zS{IuX803_$Fobk9@V+ma(fVUy_L2-MRl@fucdEi!Cu0e{k(V%2fqw7BjMy7b$ncjk ze5q^)+h;_6RZU8aj$I@C1uK(8Pp+2e`U%?-BlKbj4NNIjrnokHZ(sUsqimfuZk<6A zELw6CGbyp#?f&Fv;^lD1&Yy_ec}wL+v~GTP`A|}!mius<#%dq%66~bZ6Cbc!nj()P zoZ^wnSa%e)AOA+b0(s2w`;6-njQ!dJtqI(o} zGL*O=quwV)oL8`44Yp#i?Lg|_m6`yhm&Mqv+q-5FH-*68^~CUwdMq|r^GE*m4U2Y* zm(PLNM(tUb6J-3{kKV-3e?AfmP;#g|PPeotS1#@r`R933+B~{Yk05bYS!7M^;0#ZQ zjV!uxjaPZQ318d@EUFIzr3u$R=ECo%2J2Q(S*{RLi8??HbFs|9Rn;>ObUKSnE2GW1pG#No6e$ial zQMNQ3C&b{BpiJs9{8q$zxq^Rpo6wz2Lr7dVPMT`^_cEJgRM>y)RE~|M+?Cx^a*IXe zG*BBNsGc_0xm8V=s)Y=}e&RSQm0;uuH3Ucfn0TG4uPTlhHS;?6mae9%ndmC`cTUjvZngsU?mI#$&~FaNBYrzmxeL1z z)_2);)2^GS^8@gvWqMn@f-%qFw1O#Gq68@-=)WsYJ)!P=jG^S{JJ{7-MVUF_tUmXt zKCa`jFC4?h_==>9X2$JSzg3pGP-mT$IBuCh75O6lck$y{?d4xjKjT(zjI8|B`2-sj zCHVOIdmf=5KfiL>K9*-Mxc3fj;>pQ-{&!zRP*UDfr8`>>p1+krv%V&`aAz+#$Fl8% z*ju>dc7$eJO9js3sq=eWB(%7J!2-lBUn}oHf1o+V9^11o0P~)c0gu4r zu#C^vR*%n%;JcCGKti<9`ZRAx|A2jc$~!-Y=(WN)&v&JTx#HB*E#R~B8iE-qCNh);ol($|MbYTar9yTw1Qi26hL9F}raXU6a{H_ck z3S{7#(9ix{j(PF?%5rN-Jq#X@=9m?uNk#-BB5S*^%Bi3T*QfZ;;sX{#iVPtM!8>GW z!cIz%^Wz_b+l5axCy;^hS;3a?SQ1bMa$O;-2Pw&4yFm%&c-;bzUOMe&ykui~e2@~( zlc@yl6TxI8(dzx-TfH!-;NOYzNszj<&j<=S0jgjq#oV~GD}{b%@{oDSobM~ zko$sZ39EQH@O-59N?2)UG!vs?bl8T4&m5-eAZq~e0myZ%z2tHrrP+f0DsfJMaah0) z#x1Z+&?6YgJlk9ylXdv0-l__BfyD7lz(?omTrJl*jY!EAk^3h-IgmhBxs>slMnbBB zJ^p)Sa?4ymTUzE}{p^a$-|nhM)zQyXN8OJ~GH0m81}PxW02(FpA)u6%?gqbSq@ zGMV)WuHTVy8@r4b`DZXA%al5Z<0J6_#_e6Yu{3_B(N%!tWt&O#2LY;*NyybFn*i{H zJ@)O#zzdx*OTMrVU|C8&%SLb=?#S-P)~w_}aN3a?U33OCYt3<}wzjxE_Ov z(V}B3_%D7E<~g?A^B)dCrr+%RSxTU==@yx7WQ3o(1%B+XdWw^S*54asEChd8QY=;R?rIoieKNKDtqS}8jn zT~F20tyD>wVAP9sR0VOEj*i@Bb(|?|7>JKZ<{@d%4EVytufyMs~8o&FG?V?OBw{ z%=jX%tc!71>{@e!utk?;btM$Gz{*>wRA5JP)|* znhKw<;B8qi&UcaUYD0p-p8Gq>A)fjAp?@!b%$xL1?GPVOOjg3D&v7w7*5O*I>bZ3) zQSx6v{2V18J*$7q-fdc1OJ`21EpYe9l0^bs0Tr0{w-HtY+?3YD zh11C}2G=g2Qg$jY_&<{+%z6xk)ATkQVdp!AvpV!6?o<-}?`YIqlJ{+3O69@SZ0_rF z>+xDxJ9a#m{<8c+>(>&_BS6@OJ<&to$Ip=-FV^lXtr2Py^(y)Jdv zQ_E@G*zDZqN=PM{OzTRYTkv(QLpy7mKWv!?K(153Fyk9i>3A~)G&0ulaPfve`>37v z)jWWWtH`cGW|C-I_hG#7opjUr`Z<2nU-K9S_xEd{4wKx*8|C_IbO0<=S#>oL5x+b{Z86c)A6~Mdx~7nP>l0i@Q#`!h6(3K zEpO=TbBmbUx0Ue6Era%*>~%yi6X7F=9b5rF)i zgP<}apctm&WD~=LJ^i9ec|6jE=GH)%Dv*UsUBHX!;tW|b1X#@yUqn6M5 zm2vcr>e5YWEisrB4&)9OcFo*P%1>>p$GB5&KRH)t>=y~#s{@b3o)CKH(L3i4&R+$c z-{4SjK~Gjtcr6WGVkRGOxKUdH)*0gQ3;TU1FSO_QFK3yK%}!jWuK*P&F#m(pxJ3UE zG?_1cs7Vi%@2uG}b_O(=e-~n;%Y}_jS5CZTxu+oRHGBZ>g3}0Cbs; zXDv~6lj36(2;AFf|L|eEJeY*+)0|o^IG$-{#d}UZsqxQy2Im6AifZurdPtU{uXx`A zh>)lIk)EU&)(`txaX}5ODgZQvAq(%F3L~|AHE;=0-<>J&L+k+4nS2ZpRw$Ty1%CH$ z-YW-}9!@9{WKaa83tOaD{|C+X1hQauH-0oauvt|*BTA>P-w)KK%gv#%6ajGNkV^-X zvG+BLvyucO;l?jU;BT(+v6I(-Z#d!&+(=P0m~rV>*jxD1fK>^j^l^t6x9Ke<9&_VU`5{$}Mb4)J2wum*&XBPk| z$%SNf>d4*ID1}Lb}Z*$P@A) zJ5(5wUP z-wcB{#F%V?A%#Q<>ilR|$&``@2QnC*^ZRubvy&M%)NH37p0+RY_XW`eCr z%-x<8lAC^lpsiI=-g%9*St4B&RV@WP>tV0FMdkHuez^p}0>j*gNY>E|r29Sz zN!3v;bJI&Sa5~q+Q`h2@VK{sj_+h#R`HHR()H@gM+nghHRjJ)9Py|A-yJ7b%UMVbg z=bnz*>gOKTz85%#rmpGghdx^9Id4_-t8DCF`dJ+(-8OY12n`LO;?MBrfj)cPn0G*7k_tKpmR>ic5wmAJxI%H zR*-tP?&L=|P>;>mHd7EvfvGJ}EqlQL{( zya=;I8f8B_uLwJXLf7Mmp%HkG@%UT%8`-~+Fq+XW$`_k&_!CHiRuK{mmhbY^WK||% zy1#? zbjg((nG5buX*DUp;eTlJWS~Hgs5EZ7=3aitv`Fxp81pI=@w%Y?&xob}!mo`#%9-t< z1?Cban9wm8cC$_ym-DJ+#d1~7HPOxjSV?T9rg;1emu&vVzrU@qY=3hApdx{Ha$^~u^5S;=V%99&Mw8vPjL;*@)CyKpKs(KPYvY7bdBzV}nV z+wV%dS7Upm#|vZ^-&S0B^D_to`4M>kz6duXF3QUVhEVwzu7Xl3kzbiFzcy-WJ=toL z45}Q+nf3^wuK2hPp?QhGZ~bJdOD?YnL;lFLNFOhz=p26`Ti{Hj8S6;&~6#e9! z()qWrhBn|5((Ez8@-9StntjO1tn4)my9#0p@)vyN@0)=duIm20NsYQSJ{_2<20i{M z^{UXsnBd=SwGvf<#{3;g@RCrelkw?YDULPv@D+pwCtHB$(4MiIb~EksjY^!4;(Ze^ zsGNuk^{9*P_>p4ehlG|D0qP_^lQaDoPMEz=Xx*_-#O z?u#1gfa!CP#lq6{22%6A#v)3bq&3ZLuHBde2JK;SOns3cc4M-tmCfxY<*GQ8YMK?i zcuuTG^PB5Ow%+mE~m8F$cTgFN~75Wb}>CX&k#?80%*uhO+-J-m!UzsX|oYdx>{1 zF!;DTzh}l^a0Y*7d++zLevNzH$XvfpOG)dl3obZh0&tlg))Dztx0JA`9UWG4ces$b zrLq#r=nNLDj&nM&;?yqF*`Rtv8Q{}4^ZMT$vaOg4Wja`#g@WG5yo$9}p5Wn2)UOpu zD;ene-IY={1N>m+l>_p55dDtFQd!$_Q3d=~?tk4QYl8YWnw)sFL?YHFTqC{v7gJet z+)(L~z?-)%EO@4SD7vsjWZE2{`yRwZ`^saNQ~oH|_MPG8q#KGB zmiW%v{QHgnDu}opzVy6Fsr+)zqQzq$dw0JVO&9T2{d}xXKfs4mD}!Q}d(!T0r*MaK zmV2IQQ&Zu=CRO_rSa_W5E}s@av+MtQS1(2m`0K6bO+Ml_S#H_!Jkt2R^Xd19r+;!L z=;D9oL3=2_iA)y4j6^mNfAaLkJ@quVWYJJO*e_Z+EpXT$DY7?esjg(dZE!31+g=)U zvb_JWDZj}=j-B%E#;ZR+VVkdQvTsk4k1*AO*CwVE5`XCAO!dS{0vdo?W*fiozG$ma z5F$@A>~kC$w7E8?BKufBTKGl_t<1!7n@C)%2_TdEg0-PqyXZ#YMt`;&XO9IYHnFCK z0tw5NPdfjCLWxCW_%{I49Bwtfpi9!PaI^8WCcD(1z5v27uL!3)H4 z?9(w{Er751(T1>1R%z^ylY$LNf~FIG2h0Wz@?n}`xXzJmT{|r!qgve8i@rg+=GRNW zj5>&GXH9)Fn!F=VzW)Wt!Xxa-{YM^<;w!Hsqn-bKA28t|Pe*XC5*-dfn$enKZ{{LW z2CWWEu!1fqfP_MQzoUr#dnHu_kw5h;7B|AC>FO6+^p#k7@p;UD-bk?`hwBjKXWToz zw8Jj>@)1l#LICUugbs@sHr^hC*C_*(8&Ffv-kA4Xd52-Xlpk_YQ4;g&1*iIBLn;U7 z{GE8$grQW=>Rq5q(sUlnc;0Bte!2d`CeQE|92L2YD-exX>64@;mBIvu|3%0{w zsBIfm^AXM2I$9L1%kMb_wAU=odA$tJzMwofa5HNP7|y95gO_sxx%Lx1D5PtkT0u;n zpJ1dxTwigj>EwmX=Tl06lemXd%|a^QMTnk%A`TDVX23STOJKNpV|$c!g>7e|wn~5> zwEX@T%WZ~#U=I?T69C!x&X=uYLr#nf& z<$b7HMk|aTtH#bdK@f)~?fAxuX)}t%VXj^Bh0HO{oz5i0uCgOk>Lr?eo-C0?Y^Mqr z(uOSleV5LUJiP#f!8nSBJWVWu>EoE&`gfzl5CpOs#6Gc>19}I|&D|SH!iU0V${2PO;AbDMWRvB3;5u-9mwo#c-&q9*f7{lcw@+)z)1$t?P9@^I&&9yjMQ0 z7pMf$C+XLDF3|rrN|w)bke_f?M9R%pci-@hrgL?GV_ql6mfkhWul1kBw{|dJz>;)U zcmFy_;#&4B8C7JlmI0#dwz=_;NzdLe9N>*aDxI*DJH^M?uW`F(2gqmV0~b$P)D8EJ z^>6l?VoTIh!D(3F2~B7%=IZzLvKTI#$FDfOu*UYB#8U}wntKdHNwCE*90#aWRM+5r ziuHX6fHQoS^P|P)eg>NFD;UNDE~eZ3&3?GSZD_Y!QyW>ZQQhA>2vkRJIguaS z4f29vaagb#F2`{EQ~i5zjY!M3;f8RxF3m8sdetS$N?i6Ds;RI>O;sw;(atp6(^-og z#zn$b`jkg?GW$r7H7euaH{K1aP(S{@a-uGHK+7*;CGq+*<6jgMCKxBGr4do zhuVVhhpBqrss8db@MZ7`quG{KtaVgjSkFGTahQK^-yoo{C+oG6xfJm$EB1u=A7uWz zz1SZF5%u$l!_zs?)Hh4-t#U`Q=p(*L$V`qSnrknd;>|cX&W_l?NE+EfR;<%XoBF3P z#!L4oc)9%PDtRI1IQejv*x7)mou4}{28~#3u|I`%nv)Jex-0R_6EkkFJ>KinPYP;F zw~||RU>5^&w7R@ceJe}9g;SL!O9U7KJHoegLCib3J;&sZ=iCo9-9tW4*9sFst(*uV z=!gh^x0$htJ;5*PCL}%Be#8Dj?rcyig#L;1p5ksI&{*#_}B&t z`F>eT!0n){{^V8$k3hk>8x?Rd6fqf6$WLQR7ln@n`0Tm>7dGDW(^%nKDZ-kD_Ec{6mp|k(&hj0Nh zf`Foy1`4~m>zuqUv9*nc2GAGnPH^XY)8vD6Sy?%j;DE}F`0ik(VE^oFGT~Qz$F=8f z4(u^6hG(9mKzGlG&!rrKI|I*V3`cITlHy&3Au}h6TP}sj|V%A;t-#SI}Er=w6qb%M>1}-{*!o; zN-lk96)LeDnG)>slHlyx)0tQ^RrSl|HVzG7i*xUK(nuEJ)y{mRkijfKkYaRT9vV}V}ex0X=ESa`x7w*=&sw+ll5JX(*? zQX0Hh9egg**?#s}GUFZCJk-J;EqhC*qsykR)Qg6Sx{>D?AzlKvjj)p!f?3d6_VhM)(~oh^)IX$?Iw!@$QZlz2rf+ zj|v>V32t$VlRm0@)}Hf!?vCdR#rOGo2tMFX>6m{BS)foL6Mw)EFO_jupVV$)V{k~# zd}S*etiDSfJttor9~vbfB$8MKnDlJh5wed9KgEhF8oyl4oDx$|+4$)k7TyB_XtG5& zSu4Ebg{ZkQ6(gxLjjyR~H zZWA89S*t;iDhPDSf`YgA6Zr7=Z8Dm@kDcypdY8Sy;8d$}_o!G!tWTdR+#s1-HBjrlN9Mj~)HIo%keKHu@b|dmHmhwlXSO{&! zzf~d~1PPk#rhQsy`m5bccQi#gC4a>#&7nr1EzwJUPDxM4BGEbi-g6Wc)bco4#2~)s z{B6iDO%@kiW5f{_0J~jD;qZ>qO>`!Tw_d-hWav`~+0~AolK+bKU3mO8$mv@AKnaJ# zAi!EQncu#u`}PxNP%TDOJ=s3dKIkXCoUuwpn4in;%$p{KhUyDP~K8SoD1g{ST8 z)W~Hp4Qax#2Kor_&xVXRs_GIBQOb5MD4$pO(^oxJB68v?S&p7kAH{wStCed<+NTi7 z7|*#sp9#F@lGMNQuvzX0Hm#n$;K7dsbZ?}En0m*iX!PQ`!vMLrfe6S+pylbXi?6Z! zWS115uouB4U~Kt>-hUwjbBkiD`%zMVpLuE;5ZaGSL;;BTCzalIweZ}D-I{r`hH^!A z&QOC5(C>bP#c!=N+@I%Q@sg01e2c#M#~xUTWj)V^tbUyOji1YOmpgEMp8Q?wlg2jK zJt`0E{`I(VV?_F>QIAR^x9f@6&C)6y2r_2Hzp6cf+NnY%UyE#EXL^7+lBRL^^912w zPN+ER{JmC1O3H`Vqh!S^S#D`g))56pwfqz=Y&G_01Z4!iYZx;NyKL6mt%K`?EQdiP z+LSY+YXB=i|0K+(X=yS^#InUIkg>xw(LkdiPg!DswYvCc9_>W|IYWt)UHAS6_1nGL@m9$G2j45ul;{ZfS7-W? z^?Wzl*B#mSr&L)+*%z-uj^g>Ac5o>@?7OguY0h9QP?Rj3YN%D1UY$Zr4;LJX18iRJ2eS{(H(L8=w``0l1E(3nhNvD_b2D)Fn!XtfooD6Z&Xzdi5 zeUSOSHo5H_Fx-=@($_5T#-$B*4(?GU%a4&520V0o2!zKkpN;o#_WA@ih(+1TtnV}U z>hoMZc03w@QK(CZ=RH%?@kd#f!%i~H-fj${7uZf*evZfazE8d6E!IE_zjN^+q}eg6 z6}wWhWk)x&XdVJa$#^`!u3mP1aSsfKyUTr$fs)0{~eZ=|yg3SL$%z#HEq+cL%008dYIL6=Ho`= z@yqKz`YQxcDo?{UW(ZW`?d@lYA&~{cUIgKJ|A^-@;>@y$9>Ti4&`@R3NuO8RAjmwS z{<&DPnG@Z7DD&#sCkEz^U&-ZE>~fHCE#x8EPdC)o%1#wi3g*z_x4xbj~B297yy1 z^|L9xUblE3J;AQQ8K7A4t!7A#iuwoS-QTtD=+v7i;-PkF8wWPpsScmlLAZ-S4p~cH z=MGT;G3ZeKM~pg;&-AqGXgK(EVb`N%x zSW+(_cJB#PtGx74T*7YGakN)tt0^Y_`yr@o47zZ4^Ij_K7CdhE@>z-rCh3eCVV`Og zz~<^{Rqb><^1G%DH?=2F-cy)Vmz~JP;HoT@TvS&4nXLcSqVkldVS&nP4F@73VjsZU z^9%>#@Armb%J%E{<>b?K4N<^+m$$*^yz@IqzheKLq+A%M4pfclRiE!rgl|-_;5pYc%>&NypsBx;mRMwLid;~J~H;$+(03LWy ze;q)o>c@FygP5RD#(YFWVSW32rSYY_;0ePQN=P(h0nk_bn*ka5ZGG&nJjqD;5w>CRKjq88n(#m)32HvV=2Nh$vWmASnwl#8 z_a$`ET;<8rouS#FlJ7TKPI7#ihk&KznM6vMF~Ni|{fq8ki$3?IDWJprv>Y|nft}qg zyj7MboyqgB^WA4}LQhKE^8_8Au(~i-U89l`$6CTa#*;_Z7OEsojJR}QQKSAj@mTd) zj^lLKDu-Ds`Y=PV zNivOVw}St&e;U`my)!}Nh=MZaXx484%&N%>deZTdgA<{Z30W!xEbWi6em~=Yec&^=N~ml(DPiD&cl~64_4kFSu8oY4=sK?2K>~_Z{jnsV8w6 zf@@XsMLo-+KOH#ga-~DE?W5F&A&zU}P}5xp%hdQ5xyO&u)))b=R#zJ#0yn*W^8-a{ zV!qNvSs>wVN^r_ddD(^jWj3Nd(b{WJoZaslV>r)fNkiu>Ge^ zWDDYbx*_wji0l?CS`Hf^u4zVOQ> znzKI~#WG0Q0wab;{_g4%!t$Q8bl!mg9r%+Y_vKTyReq>3K>yyPsgEOWQf^LQT0DL! zoabps_VT{vdM0!O+s8YnTLy!_uCxvXA7Xf`{g?y$%^T@sYEd~FDhdv7%jAfIlRY;_tg$4E~RI#roxh9w@gv)lh2m*l*(Hzc6*Hl(SB-kG3PETN>+>w&Y{#orgRy=cn%7Vq6+|;o zQLG$RR*`(?oi(y%@wvQCN=wL{?r#L%w!%i;MawfZ5yh!Wrw$tu49=Zk3mNpn-u=VY z@yh$<-^Sk0CvY>ZdH&NNS?42w$;|pbJ^#$uC2ODv#6Ine5&)hx>kWswEh9{#q&Nvq z)K;arQ7y3}>|@C|T!!vqq=EZOi=dEgb9|SNAx;g(bS{QMk_&R&3PUV9Fl)*dq7QlB zZNZ!C!RtyRq4aeO$=q#Wce|E0pzamhoa&>k9u^C#kVooiUz~hH8fxdas((xxbbIvE zc@j6S`ha;a<#5-QRlDFOitm)9N@Psd@?e)!6Cb97fe7gJa}D;sgjo%z=JUy#^6(pg zkMd!S7GKzK1p|`Mq1H5~ zFs}A++DBZ%A#E*KPof1 zi?a`tq}NTbB_N;8^APRlSPs=JPYGW*|CV(goiO`9=neSX1rI6*N&tCkjMjM1W!i+2 z+l9o^R=_C=B-&3<~+KctpRK;LKf*f62CT5 zoNi#04~e-Lr+^$L_g!jOM?P6P_8c8V6KNr-Rsj8IVw8OqN@w%SlzVY2JDuqvwN&D9 z=Xta`aAy*8Fi)}6d=YCwbXFD3;wlu%oy;P#hSAh$TqJ(jd4+&7w1nwP{ZV#wdt?`0 zvH+@m>VjIRYoP8(luIN)Xq0JhXr2bj@cM#%^zif9S{K{ zMT!9YaV*iL!xLRbJWG`L>z*dYlRpTVpHbq3Q2yVnvE+JZ+DDclKVV6zJqTqXl{jF? zhbny+WzD~mxFkQ?-S`Ci&#o{HOI?j`xVzbvM3&qf3nk-rK#k`7&2I%lymts~u zgO2-Z>3*jkPrTCeMn()leYqI`sKg$gIrfi~-D;)t+b@(OOGe-%CsTq{3VM@hLMg#Y zg#+FxH&+km994O(B_nqXn>#E^7H~lyA}1s$`A=I8kAanKY>Y|^VxHnWBG}eIIoRY4 zfUZ5C@EJ~cspR0EZSMirA2V*vND3(-a3R^5f%Bn*k^^&3d8onR#E_B-Td0Hie~q02 zOUYp`=**oe0(UwR$EFny{xg*;haYJlUUZ#6BZTEs znx}MN$NdC4RW@OU3x8c63CNkwKeA9D%g|?idf4V)>YF>#()Ev|B!NA2W6;f30xt|M z_w2k@BdMxTOJ6df8YV!<@A!N4wqoU}&_~HOy?G~;xdB&z`?hYc^r_%u0hRPq?!vDw z#0+twVO9(5x40RlOvLEjA<%JE1_L)sx8Ym5m^GAUd!3fn137)i5}$FhSmf&iBP&== z3(|N8^!JGd34F?gno)j-zis~d^^Wrh*2HpX{k9GO$UG%+NU-Hh7mk{Yq;)91gcL>u zK9NdOa027^inZk(lfQTO;;t~CL9)kdT<$#C?4-VkID0kRQZ``R!939+b{?>T&c3Pd z4_7zd)gKn?_Jc=o_QGEIwEc2mw>D8~+bSvO_gvWCu3Ni5BLzwTc;A8oiQ<8t6J`7t zUrX9o*IeN2%jzmOz@LVAQMJ%|Gs&b&Zc08{9 zZMnPL2Xk?Xrg&L191uUz??sl`|8r|zDE=9&tW&gJ)-GnT5Q;Z zzUe!`@6?5&grekzGy&1Yw_D0k;O&!erj@1F088%HKN<^s;9$#|rx%@-R{jM#a$njV z8S>u$`*k&Lgv}sbg3BbO>`s=2#Fx({Ip-QAmDqec7jMFbh&ms{yxnrk#$fUkl%ob{ zxLzs`gL%v+y-E5H!#Rx=kqfc7@=0H8VH~03JqwYTpBPCG9#IX6<@ES<^eU9`CcZFzBucPK zu{p>`v0(SxozLD0U)dB?Ty=jx#Hp^a+bW?l&ET8CxM*3jOi>dalBIZV*=8>^=83T1 zp-^WVVe%PXez=AXqvP{z52y0!Z_$x|NO%TpDx`LfWPkP~eD5XPn3KUJu2owlHjE!c zE^gj{V`Qpf)Z0ghYFZ?YxG~*!QI+-vBn5b`yIBF>hvgz5cHL3voqUOZ)HpEyx!=|k zb!QGRG1W_#%uOepEZE43+1};{)K7)F_7P60GdK=HG-$W4(t)9_&A>|*$u$bLtTXrV zito_SN~}!YhHN_PzOh`9B~$}1+lHEhQKNFSx0(=jHGKTuvsl56dnE;a(sEMKN1$`9 zGrQ3f=4O{_2VgfoY8<|~B=^DSE1P8nsYV7P@3UFeH{o^(u7f1 zJk|&M4oM&7mL_Q^Vm4>=e5>hHjG_XWOOELFBK`?@Ne4Bycw1Ln^M? z{y`k-ovGzUJ!h?h=N%wMt-DjvfSiqd~U*}{kp41OVVI0qKhw;J`F_MoZ?4U6jj7$PQlgZtl( zz1pWptrKfW+~I9T0kAdVv;FsuMGd{%HFu$kKB8xs=%-3ZH_B6fp@x=ran}VUo~{IQ zB%`Uk_Kt)@%1wlWcPF|I>7N`ZFJ&|(CX?k4OwCT@EidVFLksVl^5o6P#IF&VkiP=Q zn@3!4a)`BFh!2#MRGwrIP4%c4bz?b&G`G? zL=yr}gCS6oRc%5lS?u1OLL&nIgI4R*KYtRS9$E#Ja!X~ zRFvf}Uj;b-=13O}P7w1uT4vn1y@ksdR7Cu)tpGdD-f6T!M{pQ%ZtMy1@?l1ROW{Ze zE(&Nvdg}!zCEfggz0aeh)beJ%i1_tiu~Pcz-0jw`EYDf{M|LXFZi5vjQ3W@-iOWd@e%9BwUFe1HljV=EiYUmdE`yHeO6U+L z=eYqyZpZycXH>PyB+{M1k(a07V*Wy)z1srE>DI$WjyW+;IHbFY4z$yFeI>;jbyrN_ zR?W=oY6SSH5Dm@03`v4glb7@SYjZp5nz^3_>8TBFsd}Qoy};u2;HH~&8OLOJPb>(u zy)X36IhP}TL9>MeOcNo|)=RxAAqZi(k-27Z?OW^J$Yz1G)4@;SZyrL#x_Y;Px@1mF zu)jiW@9h|~aC?+zAL@GpxOm-B#LtMg z)vzt?m%TXt_--i9`yR1DzAOlx+Vnoym+v9uW_%Wxq3Ml8{BjLkw11ZPeKBLfzZ%fq zDdn<18=9+;*$)YaKZb4m_*w-#n5t~!G0y302`YZHs{GcN2_kX@Aeda_-416g@CQA} zQV#nFTt9q<4v>Rr$ZOr0l@;qM)?k8skVYYV;!Y5h>>CmF6`I^X-N6)=&6f3&g}-?G z*uw#)t}d&NrX(yL+Osu(sPG2D$dyGDLs?_7L0!rq${umr*Hflxzut;wvIAV3sdvw4 z8FOr+X(!H766g0UwU81fV4^E*8arCwGRKac+pAsM=+8+_X{PvkliB-nB-kxfA_n!L zjn8g;gli8W9rM?^pW6=FH!?{l!}=*9()rLyZe`Si2}x+(M#ZqRc^yXJ%shwV*Br7{ zx_Bh&5i$`V>SU9{EN1X?JLpt7%YidU{nb;D{N{6)FR2NE_4tsW(qxS=sYUzMIRUZCUWp}8MLp}rV_v-Yn- zS3O-&e=qjIrY%Cppux%yaWo?S+jzx|pJJLrTzjB}$e=7r$*9FhMM0&?=+8kh1WdKh zqEbr8v6YcsuT>iK+uK2fz1iE%Cz_?G%x{zv&&(MwuW7n zPb#M3h!;3WZn@o%RhaaGzT9|8pH;!|MkH8Cj$><+V;4$16J0y3 z2xL;yuid8CUju9NeEvR0dJi&)&{x50xx+?AYnxB|kNAvN_~Y0#+_o_c*iFNfuy53f z`;QEt-0T;i10%<2L^WPM_{(ka#rkt=WhQ{BX496_9_sbGmWZ3BHGPRnCR_jygS)^} zSIOfu^{UhHr^|?}swB`Z&u?!xVneul&myp*W5TU@mNWDRdzJmJ`^dQG8PpD<5T?c0DDiOuI zCwZdn+i7%cLxH3y{1HG6PD_rzy(ehwQ+AxH()hS^<-!$Z;2 zM??3#Yg9hFhz!|}r{)R}-_U;hKZA}rYt}<+r#e{k)E!Oxft0q?L!zpR2Mfz6uL~UwZqZD^GvXt?GmDEis{9ol{MHR8#N`t4MoK zxh(Zznv@WJY=_ycvnTx+zPwyK%0d-xcRiNd6e7T=I3+8La_bkoG2 z^Yz0z2R`Xh(As1;kYPjlwfPE5!cUjZILxmral?RSe6KsSzB|>1m%Cx^-Ip|mx9NO< zqNE9fOIRQs521cYW?3aw5FTb70B0inJjIfd0B7R(n?LkbQ;o`y=e6wu)%gH-Fw0yPb)% z5Dc?k^OthvmIXgQxtQ$y!ST>Ih9*po3e2UZ0@W0w|Y)x{e zcK}Smy_*8soidvZEy_Pul?wAQUYjYsz_QPEifFaH($v2Hmpi1WvSa|KiCdQWlhGUeB9^@I0*`ONz!z*?wz z{mR|*y~xqK`z1~1AfZo;VKmH3R^gM^wOnfWY;ndnkQQKvw0_wu+wgqT;$Pc86`S4@@b>BQiXnJMA)g`1bAw7}) z;?uEw{daOV8SJ#wkh*lIR>YQLjetn8Nk(bdDiT~Da$FN0qy0gfT4&3*GdSjyIyz$6 z^ziZrj1G)FShuj}Q6j)j-O`1yS!8&B`-Ws5f6}VEQ2>)eqFHBqiqpMDP z*&1A))Hin9pXmOt3NuUwu*n5We|oT)1((LkHPnq3jxR@P9=vRo{C9=G!*dDEn&j*l zbSC#7esiiJzI5RB@$Gd2Vj`^RfT*N(?fAn;!5NDN-jX8(G%JNj)J2VZk0W6 zd~u>vlg09~$MPQO-YF)_#GY2RhGMbg85_S@o#?DW!mdl_0k)22w%wJ zXJm+we_326sot%AASzuRK1Klo%=@AVHJiKV9$5rAO--1W9`_Z8?9~!xTH05GH0~I? zlE3qYfo>;(lw@A_mrrL)aRx;#o)%aKw)T0iSiXT7n8I!Gb(EH_K2z?|A?Vnh1qXAO zJc>$o4izZ6*m|Ty3OMPoS%+V}%PrF;hdcdU`j%rez14VRB;I4^l(d1K_BL0ZAu4W}JjHV>WZ3fFawDoZ7q0J9wH0b4Sc zGxL!$o>x!ur8WRue(&N4ndq_Lab@F&lbBI zKxIF6D|SDFd5MiI-hxYhz!TBFl%H>jQC!6C;6~tuP?sRVXz$9e(b`a~(n`m-Zb)G| z2j9EyRMp4!{cyyGS+a=UTyjJbCCS!pm)RD^PUQe4t1X?2J<(DTz36$7Xz9W^pOqg3 zZsV_h82BfAg>+=J@A7>M5bNtfKPf8>ALP^+(4_teCkU#4Qe21vH3v5eVJQU;crgZ2 z<#RrLhy!w5vG^1S_ssfqbd;>X*=+#EP6;ZP{^56Edw8b?Aic;81PQk&8WzJ8#$7L|Y4L%> z)N(3*MX`E_i)puga9t@EJ+#RPe^(62A==Sp5{QV=dk$qj@RlbfHImhA`%y)RBJ3lh ze`^EzBbFK*(7#{I%Y!aCKUz+!(f&?Vavhnl;JFO^7=O*196lb#4w}JFR5P*j)a%p%TpE% zXCYg^5%^mC^2=THW$=!tPahKz;Fr*B{tud$}6pQ zy4pU*?{A~SD@L`deWQApYZx3Ghqf#h$#95!N8w)1DVcOfNbWGWFcstZPpo}X6#G&n zUO_RF&eM(A1=T~A-sVXAHx+LAN&D$TTABorDDQU&f$LMbABrC;pta_Noq@kh#d=wL z?kY)};+g?j7TJ*oAjAl~DZVyE%)XGmGbn0uTK~gC9lyl$MN8M(e8pJX3{G`X6@5tHUjeI1Q}bx7Q~=xMqSrB9 zO|g^Zxpr>8u>#pMt0rJrjXJ)-k|W=}6(zi@#gl^BZU+`%1`#$fMwx&Lt*4 zpIJ86&_=ejJRN2NyfT~R$GRSs7l58lM;(%dhN8R(f)4EC;Tdb3=4u1cS5}4stuxlF zp8LuF20lqk&01=l^v3;kF9ltp9!r2oQ%pB)b<*JAtS)n0*63jQ5KwIn(rcxIOhrbG zx!_#&!QB8h%BYy4_dxB%MQHPo7T?T_X-miHl_|hdXMJqwiu-8nxLt83Dxkwd6!eLU z{K&i6LIWc%OtJ<4C&=_5^hH^c%O+k702tE>Ah&dHb35a&gKC|k_=WJYLy$es{`eW3 z(!N!_^RIV@^QI<41(LJx+!`ed_r{!dVNdD-X5IQ5#-p`_-aRnzb}v+s_=_Bq?!=Ep z;7BgIu00kFXxNdGj1A$F)1!i#)DqViGefV;&AMdmci<0R(C>xaQ4;Hy%17sloLK#^ z2w>&`EaSHUl4k^~u(vturlaF$x zjDKoDf{fRu%p~Uo>Q+Uf%1h*&QGnx;)Vh-95yog=h9dg0PfU%dTo{O*DJr7KRtW`ctRenVD7gGbLxPRYO{;_wy);a_<(V^>Gm z%a7jji`-4;hm-zvwxqjNRORkETXXjaFtwee4x4Z6h(L z6Gj10lP&FeftVuR@N`1O7wg6?tB0L+A&OajaSrTt)nilnh*UdFT;@HY(5R!1VE!AY zoJBZkQd0h?1QwltuVo1*1K_cl^x7MUGwml|!_bMd4Z*AaH~t6RAYG?+UCV-8noecz zEg-O3G+Vq62NCUiqA+D?o(xo-RyaJ?_&1+g!?e+$|3%tp<50H?ovLN80~1#}*ppjx zB49Ag@q$S+%|NyV4ke>Rz-Ue?SUR60tyIQbD=v!Q?r$WuYanYnI!-)IdDlx9#T^~oK4$t|J5-7EKudQE>_WZYDu62&YPl8blCj@FL|Fv=ej;OB zo(y6*Pfta#ztPq?M^=eFkriMU-KU|2UW^6*FjhDNHz~nT9KV;X$@-+rbt3Kq)fPI_ z>Marrht3(;SDwB>2+0;t!{xRt=P0q&Jyod)e8#cfRvahSm?6i_X>da^i6EH6D1a|P zzQ4w9_v@85FVL37RtWq=@e>1F9hhL$W{|~21u;#`z5@ibt@KM$PUU`<9i9nFG29F% zj&~`|Sm43s%=qXo&*CIXTQx^6)DW0lvYT-LGmLOoO^YvmQIepdNVY^<@-7+vz&gNX zTpBH=IWai@yJFS5vHeT&bsaxG6hl{sbrdl9=zA@wf0v8f!W&!$=EH1KT6P(z13l@` zGjb97p6(WlPP2_Pi{~>VKj2Xo&V)k+kO)duHAgHt^4Dk^pB_%i9=8LI%Z~`s49?TK zelBios{InxA>xx_F+|l4(cZT@Ulcpp?=WMLb)Q!k`zMyc<~M!80yB&Z zbn*Qk)TqS9#z|T5-R{t#AL?_3;o>%5{|SQP!`GaprfJ%_-|UHO9_k;ajlYU9xNIJ% zk9aYaINPCe%;uFG>MvB*pv`LyWI@n=Zb3GaWIVR5(d%NKWmvR4`3ohStK;$(CS%CVW zTKZusf*Dx%%4^kvVTf{pdS4pJnf`FLtEn#gW3nH<3gkOhbEbhGF5A|`o|`vGqrhi|Ao5Hv`JgUuGZ7aDREDf7fKxr<{@&^X$(6PFHA{qY&gk3H#KKq zd{|71T3A`N8w^R*SH#2Uy`<^rk)%y7yo+&ijF)OZ2<3X=Qq138pjM3{$h8h9hbb&L z(5!bA(}ng$VL~q|)g$ocie%uDQ>d?TGE&`)pGp#TSblE4OAj6xh`{Ua3Fj%evTn|s z{)M#2GqZqyp;HiyM7 zz(HF@hHxgto);Ho>(@&g9MTHHH1cXOo)Q~hHKboRFu*SitL@{@p1*2H$YVcbPC27L zuHJXGt?N_~&q$5DA4Jbrc@RiWgrv(c=O8Cj=d+{d<3bWvX3o5F=vujF4Ar|F|E9(# zyAKzwMj~;l0Nv*ak^DQa-9M_-o=u&hiE#%hj~@ov{qGJhR6dK1CSBlr2te3U2VDm6 zFbH~|v=q}hN^+z-)A%oR268q+OzN)lZwDD{+689zt!-Q7Gb}QOfeaNkwHH(m2u!j-Vkg`hqmc&ZIu1ZWoa$npEhTqx_0Tm;m_sX-@57QAwfHEWq91x&PgjeZ2;7$ zCI3CaCAR}%yWWToAWIk@g#Ts2+}Pwx;D1=!75q)JDhQ;F3DIVdW!Ocz@7Kpl@c zrO3n2UO1$Y{}&pygs<^Ei>J3hZ4LF=U%&?2%Tt+Y%n5j!21S9-vHeCn_5o>1x&-vG zL7`3hA4!@VYofm_c56Juy6+(qvw|TWsACj5?>stujqTrv(qt`hrE9uTEZGw;AeXeA z^;!o3?GkG=^Bgajt$4$$0|x(@iNWXE`yhS$4g4n&7Fi48*ZRXj7p1VN}UT|FrPcl4c|93So0u!X{x%*~hJ; z<#TlG2cod=0XOg5>i2enO-J?lFa5ocO;oGu1TeGc5z_FR&~ID6{m$*0V0X+9H4qn3 zGszbX;f(LckP6x+E(Sbb*ijt6zAiu)O~7*}O$k&PfZTAqh2!;+7c39njOscV8(~w{_RKcoW6)Q?o?ISJD!1^(K4u zNJ>}Dd42_(4}%+<-J@I=B#0JQoE;9t?9u{&ldS)eB`gTx7h4Ni#c|pa`6pE;!iT(oOhuEE-0qiwue&ie)9y#hKXX%jDd zyfLrgqG1)DT4)Yn8pNlumA9x*B0?IIl0Rpp4^?&kDY9uZ1bhlat3%4#tGwp!){DN_ zTwG5cA@x}w#3phuJE(g%`U`b&$|X=jgw7B|8MAIX%&Vf#3`AWPwMQKCT1&z-C)OJz zXtCrzjECEz#jrCRc+THZGv|Vt3)-i5R$+fnJQ{&i@sfbqC%RcXznpqL|Jr34VCH(RrH6zg-i41Wp#Ln=efX%ZcfPc&6ZNu#3jEH z2Sl;w8ykvz91VJ#>u5n&fj-PhL{5=ufY2rObUepw--h1kCrRM|4f&Xi>5Tk(CNtix z6zU{=)wQ9&Q)^k!O#qu4wbS%U)LJ*ar6BJdd-rYVa$rI@q*Ke{&Xw z=hT05wPaiCaq6{JZhfI_UV4dfUTb|k0B{InySu~Jhpb0VMN8`SFbH8K2~0Q|`2y0u z_{uDF&G73A-LOY_RDI>8LC8vaM_5_Dn@F2s!e)!XVc?%~AB-*x2|B+N>;(`A`!iUz ztLRJ{NsWQ1fLHC5*J^ZPiR zBdeALU@k(TWr8}JSAxtn6wO*g&tB#o(wSY%e++4dme~lax~FRFDH!d3mXpR= zpmrPd8>(iY9v;j+C8g16Hbvd^Dq!YbsT_l_ZjNhnt@Yao&!egXd6O{re<3xFDg%G; z#53&t_O7HxVa+q87a&<|Q+S+$jPEQYZG)CLW|2Ite>&Hne7)D5H>H#toMT&yIyHj~ z&PioSB&|H?kvXD!Dran}uNjj1gG!sFi)sGKm}m<}mDMq~qooW6#EB?{l$4-h{dLq@ z*p@ZNTNix+Frvt9~J*Fc`G} z(T+WyW!>}?NwqGm*GpjSX&5}zUbV|Bd}u{=HA1jNf@9?Q>j#8cO62Jo*&L6iL|a#Y zvaoT_n&;Y6pI(s&?(NB=u(n)*LU)eo7;t|D-}6e`^U()9PC6p@BPmtXfM(HC%u|MI z;lTRHKDbP`Ox=8QqiXqzm(eN(6(k885golxqAB0~#}>bLrV#-hjX``pg4CL*`2HJH z7cwbaf$Ah6(W1pRnE8Eh!UfAsKBFu!|G;t7KYzJkSAX;b!QaqHD>J1T@wNVb3j$zx z^YCVi)d?Hp-z~eh3M)DR5#XFQLpx{uI1!{It2pZJx2<$M zd2}IQZCQ3tVrLNg9 zo>Y}fhqVMs!s^$vYpsq$#uWMNl`tH;NTBXwtfXWOG?%+e#8O8UxPtY;c<>s8WYlUS zxQ;B*@EhMqpj%1za>AXOWfrMA$efLHTCJ5z3xIU;yelLhrIY%4#Kh&HBRV1>62aMzd72tg4G|S0Y;~Zi36a?YF zi6)A?KI%fbpR1^B9U9^s0OFT|0?xWbSw$XQc4IPW_BDGqX{99!5)`e|NtTcKtJ(Nc z(B{5KI7=gB5QInx%N&r7@_zo@{RTWXQOC$;0brB3Cl7X7J0(a3L`E+%eINHsA_}?( zl0lP1E~1hr^D1sT`u(FuCSkEoITiq_fJ=QAwLc7U_8suZttTdgBE=x}lp^9eJQ?~H zPcGaqiku@6X!#{S;5UyIbkkmyAVF# z0Cp1dbo82abR2?33N!k*=5L`xrD31>L8JALJlbh~d?)Wq!T>wE?8M6U^Fb9XKz? zZmb(FU%S)7i`0;YN$lyQKR|c@Y^!6k_hV1@((A}GDQ!gxyZ@6z0n5J{|6uVgftXCr zKt&H>p9SF59qByZ0@j@dn7=MKocAw4R4}-COJX51eWPE%Ks&bq-SK`6+aoSwty{Qf zNsAkpzsQPP^{Zplz}n?260)*aadyc6fU=J0Ta1@NHJdJ}TiGG^%d!S|%-Q3`lE#$M!+?UWXV-1|zmriEqY3 zjc&~adC8D^VFt02d7pe*`IW!kKtj-F*M36F81kzhP>SuEM-FRzBL`&Q)Gypgh9u&L zG9yML)&F6vpRSs(8(-U;e>ePuD6H+{7)vU|v+E#Zz=%K^_nu{pNJdlb6Pq z?+#qd$LI@@9fU!71(UtJn*K^A3)iYZgME4G9rmi!oCqzvFO0%daIh}AioHKkCU15b zQ%M)Bu=`)y-b1l>ALn0gn(IF*mMw|lcJTq{x!ilQUO6U>^;&s{V@o^iocLo)Nm_-F zE&%9SQvbLyfBgb8{q>iAuSGk?in}P{Evi7-rWbDJkG#)c!(yOowN$Huvt1li-VH2F zfDB$)7T@hmyGQ6V@=HE6YE~xs3m<`$3w!&a+>hle%#z;;-`%M_Rqm z7bg@=j-5TKeD`kEwL=?F^I(c>WQ0dtFYh@(L&OO2r9G0*NfI^-=hL$)$8DW=CSF^1OLFC{#KM2p=(0dx)H{v9J%>Mo67i(E&VK9sOM*O8VB7R zUqLnvkvm&+jh6VcT(o*Y>E7)p&%mtj#TjrLCoo{_^h|C@otYE4G4PBE%d%3jl{VME zQ9)4CFZa_@>-Y)~0jLj>4>>YqrLtI#l|Q|)6hkCl2Y5(|{}W-U@u*F1+4_L{s&=>g zVFDASki(Nm_Ikj*#=oU!3;pi6xD5uZMkj4~cZ|=$qU*S;|D* z+Z}+iSFfn;PH2;(+YSm=kS-{d{b;<54<&M_z~k=v(o~J9D?hG_E9Z5a5sG@tOmUfa z$Cr4@WNwCY!DI)H^W5wVkect$Wu9423ny_Nf#PysFW(a7-D>eKgN`0Qszx?U)<@qx zcYv?ETATr=FQ&J+DC972lRD63r1W4%`W>kbVh4?ynxSlq*~6}|wwR13+YMJ3^o2TL`L;U{_s^e7%`18m{xnYYsM}yMm!!a*h38tm^`XSxpC`;45Q7(?1O!*heX9>T;;|@o!cYrE)Y?tZj;7IKDWxWF>!-;tEFv!T+5Y%hUHoDO+ zP)SO;hMCDZw7&U~H#;oz8}P-hTQI%Ma(v)Mj707AU2XMBre!6?zXqQb0+#K?8D_pM z_};4?b0PoDl>+Pzp4Z-pUy-9Pocvlqb?;<)Bl%2kqc($tEJ)DMf6>u>BAlYb(V=xN z;hf&n&~-Y6;ubELN-i_~RwD-?eg3(gLGHmzwHXMc<{T)@b<2+JKIxMyGwcKPxAni+ zqz~wTw=$mRX87TCB)QO-vKoBF=?O|0^)~~0#~(~(J&gMoQW22WxklgogqbwERPf*% zo-yX;ef=Tp@*jQL5fA*MS!jMjx4~kF5Rh`wO`0cF?SZA%rTn?R3Qjq@hkQhM`~-*) zOsSt;foDj=MqsmLt-Mx2ZPNr zdxI-mtMFYZ@~;1GCx(9|MxS_Yc>GcRbBEZzi@%y9P9B*43t8dW8y|=~#PLVlboCC& z1=DF-%>amP`tBQgz92$!LaZg^brM4Kh1Kq@HDgh@ophsCuwD8ENVm}52EPZ)An*oP z+hR>v9$OgD8f@!oQJej5VkGJT#yL4V)jYfJKv{r*`<-R0l9&AhWhWieDRw zu@I0RH(w4p45c9or~A2P{_0Zy%gt%{JMksKXXNH+PaNB0k#YCMNY%B>rrUdJRzYO-J=?}gSPf>gS$kwtiT>u*o zn)bF=H~-3oP@-%+y8PFC_yYudVJjT=njJ>KeB_*XFmWa>0^9~1afYzS0o0z&qE^CNKL!6A?z@sCVCi=ZG&O|%r-f^){PKdFdoMnb*X zeTEBovv=z`ieO>;h!5a;5uw$_x>uwyh39Ky5{dj@x%zd2Mus%*-bgO}l}{w3k4IAB z!)haHJe~Y-BKAW`Yl4pCBFnDfPcXM&f{*JTx1H%4>g-ci^3q50Jxbu;M8AN}j+%IK zKrjQ-6&sO7(e)HobJ-gQo63e>cRRLhsh1`QSrC zaT3k`c#VZSF!;%?9^~<&nonO!lwL1*uUBOCrxOCc&p^`lPm@YlnXeP73 zPmDb)Q+VeKQ8My7r+~iU&|Yqq>T1q?j(+(bG@wL0bI$&t?!T#4b<^u4=Tm}3_OBHX zs}rY0A9MxrwyEyee~Eb3)&pxS2mxr47V@K;Sx|drOK|!$9rZwuOc{M`gOTE&M0iV< z=rf|&46c9vlEptLpgyYZr;nr~O2qRC($KHSUvxhVoDr$DinfG@&LW>~3j43Do2fq@ zU6>f{iPLxt86LFEvHv0!yLDia0*VgDI5k`3d7P@HdhE^GW7yWyNZ<&+Qb2HUW7_sFgjCp+z`k z&qn*)<5@%;_V707PAic7a{Vdds107dtA^B~B9=S@POh~4G zA6SJLHWqsneqd0yUK&5w6&B9RM0kJzu8%{8VZC|)Y6vyxW{+Nwy@eq@E_G`_$1id{AowHydndj*(mSDR>XP7&6In}?Ofuqlnr8)yP} z0bjwpHf4P#bhMr-j-9pt)@p%52ogX~mPmmJM%e!(3OJr`aRbWU)XWT?=pCFN{N#HwAWD+8Rj67l<@eGpDkK?A&}obv|cxbBct z=zdm-WLt=g>$+Y5;o>q;A!cnfB?<<Ufl1~SO_dF~QThzI5y;NsCH{gGK79MzdJ44V99$Z#d z$B3Ede4vWS1N1Q?O-yEYvk3>pt$xV={Bk&3)fkw3iIk~Csn?T&g$ZX+Lt>iOT|%i0 zUS_r17(rBH$!M)7hgYIR1=(mc{?njp7MUZ6j~-f7sA!K5-8FIqz46W z_N%LLl&C+_XSPMNrhPxLA3{gKE#`REU%Hp{5*_rr@&4yo8ECUc!;ngEt4?v!f6-L5 z1T-e;i}a?4dKg@)n;a6{Lw{yd%q}g_!y`J8v)&c47&kNq)*x}~V=|PbQ^f%jUOwIV zn`!=~?_BVda|^m=S4$eo$yjvKE6tWpL(YU&7N^~ z*FIB4IkRTGseA?~v_=#W_#<9tnd-er>kQ|&RqV759t>KDz)Zr6GfNwt#DqtLm>GUA zRTz+*+aqBg<2nXD;~Ds3eS1_rT`dHvc+v?;dO5l3ZlvYsn-{m~(Kmcc+y0r@kRTu_*%Nye^rkF>}_ z;!i_M=SrE{X1yG6zX0!sQ}HrAIH@uhBTg%KUj!!G<|wd$sJA6TceMUq1IJ>p?eT=V z)v>)5854L(uo}=d?raP>z{8QM8^y>7Y_m!@0vK*%ul{&0hJ}&yNa+*bthG0KKQjF? z{OHGXb>P3+1qw_!;anK(31VkeRYO{pZzAhP%-umct{+(ZIkuYF%>6Xvw0M!f9`Tmu z&3zD<8_q_y&DMQ34-wf^I(~Hg#y!j9R2tw6$Ji;BQ)<$J=*RS;X|c7fM*hrEiJNa{2P=U42pzhkAzE|Xw|}9Jm16dkr5>ouVhk8lYd9`6%D7*| z%|})&TPVM`bCHg7$_Cw0kh0y_Q_|xSQgg^aspBJ#g%i2nS8P3i2f9#ZB&ffrlPQF) z*tz<~2g3sv9$b)FQy`OK%P9DD=HC8%>*7evHc4{eUKI4eC&M;V%m|0>Fi{HN(6h2y z#bhD>bAVzNN&Ja{vnbS#Ma2DR+rsSlj~&qEjK~jy@QPuMsQszp*6UIxEG+neLn`Cz za84O=>;CIn-pcdCkqP0@i@nK7n&>ml*sIM7ik3OGm8(^9OQx6ELZUOMDfuZS%(a?R>aOe{&)b|be?njbcsCV zMc+CqPkRwq#UOkj9|g!`&kkuPIi#sWdA=<@HQ$!i;Nl<+H2&h6M5BVp| zx2YoHfU50SEP8uQ`WNq)(tKNkMV62wDm^3Woox!#<9aw|AnTu2M)#h7*V2f};IVjc z25co%SCtatYIR$18le3rpsmxA0rtw{h4_c`dwle8p?pY&u7?Lc8u)-KHeAF3k6pcDV*lMUkfL{t z`P0;~6j}~RBYUO(vw1*P|17vPjGT#l221IVJof)K0Vt2u+J zfygL`tgFY>?5tr_=_91T){2fDgDzuL=QVd1e<&9AlVO&n_;Gn?I|R@9s!%>I*iRf< zQ*nhPz)gZ*gr9* zHx2Or<9F5l%wN4E#l`|4HhbKq8a(873{Z2j_|W1nmJ%H%N9SZPlMGb@pi|PpK&!xm zCI@=V)(jyZEKX<_K(Bj-$lhJL0bX)FURNb zzZLgVj_zDhK+TKM_PD-@`9Wp)K+{5_&mZ5ItaHCwb-^l$^cgw7WnxCh|niyA_tf zz?G&Fx|*Eo`N&@@`fZplOE#4O@obywLdasa2u#dNmu1?N93DuY*vEbYRcsfH)Q1=@ z@GgdYkIH?mGREuTt(XG-uC}xvBrrR)iCkfDT>0~wwC8?|>akD5ROi5Dpae0w-J;3C z-@vF^N6j?*9w-(|%|k=~0Qq*Q>Vd5_?^5i-SIIQvY16IrbCP0G7hLDLNNp^hYZS2BdJ+t4GZ@_?+TGQV+WGyO zI?{Q2{xSr8fQ|z)+(n! zm;}A!OmbZD9i3Zw?$4`In|fh`j39li;AL+Wi2oOOsWp%6r*sKkP1skwH1g`83`Vi2 z0p8^CRkf@~aQgKP-$7j99~>79Lnq=5imK9qNuQ=PNx^)82t$ez#q;eW6&yVPlJP}Q zY)P`kUK#8kPCO5n(yx~=RSGuqSTNHn_cE-w(~H*{L>|$ZNeY)D3zvmA_p1q-U-4*f z#ndRs)87^%B=G0mE`KurT&Ez1iX7JTPgFA;63nzW9P4YzR(N3OM*Jb z6E@cLA2~hqsK|mm;6q0v`c0%Fsj>|#%n;xfE zfn-X5a_iOCd=O2(Sr^h^mFqPX-i&;Iaz84rYz-?J+1wIROlDyE7>?SWA> zVETxi-xzH0d_#l%HaY!lnk=W%z`!O)=Sy>4B0ABUn8-Jq>Swem>OjvJ2nAAU58Nd& z%%k6LY)RgbhlW{x!u&3J$%ca!qFwQb8ch#l+xVqiC}@$?x$-<7{aut)gt!Qe?#8R=g_BHb>lOHoNS;*=})b(;a7R&17Gpu0${n2AZ@ z>y(}?eLdfLgo6<(RsP6o56>x3+sg+j?bPz$qv!wIBqDript05y^7%mI3+mbla3pzz zA5_pUsR=GPBttc_!y__#Y8#uV-*4{Q^`8%}U_@5oHZ~?8j4SX(FWFK{1PX}S}h>K2T z2OFMG)}fUG>c3C|r2o0dE(@t|e-h;>^s>}^*H(6o*4yf!mP-Fw?ZaLoe0W@=>xCrA zedd+qz}!ja9v)S7u#=+qIIo%p&!2jI>gA_PyqfQtJ_Vm9I0I7PPvu7|_lp!|+K;OLp$$Zha`n`#uwA^ku8Mc_xP1{PUWW0z ztV&SxemwkvC6%(O@!;i_38H`F?+04z+Dbl)<2Q{k^kR?ZGi4EbSHng9pv#xjhK>7& zQc7U}LuFa$0~-P*Ljv^N4%>bBOiVbm!u&&$#uu&j2ztiE!J^8oRuOUkQT|E>@EV`-wJe*z!k*szH1JJvl=)0xnpG!R1fF!YKcmQsw!sf$=L-#)cp^c z?yj@Kkn_fbHlF$Ejax_lVWE(lnf~Yd{zJcO4NvfELI6c-)&0go7CA9^Z(r$^^P z{*Gc%C6N`A@GGkbUW)Z8(&R}pH6nVeus&_1R;xBRs9J_6&yRc zl6E@%LS?~z(--EJe{!Y`K%gX7?6QgDifSj{Q^8CNK~w+9goBTdALP%#)JZQ89wYs9 z#vIz;7P#t$<9^^EI*TW0khr`?{`G`KQ#+Na^wmj!r3Z=v?teG0@n0uIb)On3fj4sz z6T-gl{#t6#0aSnYHd?+}UU8z-`61fGrq6Nwnrt;!hO1tMplA z$3S?|xd@YvNHZp>beQpvekl4(4zA9^SkfvyeM&gF*G`?*jg?L#1VQW;!Wv zCA6y~lE1$%?**s|}vbMQabH`^1Jc zy^*Hy5@rI?gF#G|WR(&DC;ek9LW*>w7~sIdGfQ#s?(eRXtg z57za0R<0P=Nuj$8sgOxnnQ6g{;=M%S3vacs;K3(ymX|;VhT)&j9I0p|QUXE1Bcj_> zY4Kn3l65%@Mhyr5hJq%`?w#@F9;zrgqwTsEoEJD#rhgD22-i7&qw1L=v8#^s&+#}& z?%&F}SAJApZRjBpJF_+kDYxHj+^j=va_LdWj?L*n+y&aO4uq3dB~sq==e6l|+Nojy z#$J4p&9g?H0a>c|e9?2oBQKQ7B(Iamekp5)7-S3b|?b->ssxK6p(x zbj;5P-5!yCHM|+gJKupdBjsfB;oQ+!}=0oQq?iG6a?~lsOyItvtBN zGLYN0c^M<$`aAXDa9pzymKsG=p2Ic}21erTm7ZKz2u)){M1oEy-vBxp!)Z=!oZsL>mW~=6YvZVez^UDxUwpsA=SL_f#Yr8 z2JIIjy3NjHYHfGGJxa(g+gSKrjkweymcICH2V^I$UW-<9M#)C!*7>h<74u(E1@6Sh zqNzc0bE}6{AI-Oig}cZx)<`$}xmS9Xk%$j#sY-Hu0%tD2H1;SMgBB5b@}LZU?!)f; z(|WFc6$PIrr*`pM4VfMfMOHv8gyRZNH-+ZylF(H3r^NFCJak+<-UC3nPm{bEhJeQ( zGP))U|6b9&QezT}XOg6QZ>@a7#G(C&U;^3hu~CtHfEg<8zO|!Q z*q%!F84*w9%bSEduq#KPn~^paH&hz3XzgB+z)dEa?)k%TvCMFwt{U4vQLmVA8IxXM z1}VFf8W0lex7B}>Y~R6T-|6EW-O=H9^6vdpK~rS3pr zSFSWpsiw)}JM&1M(YD7=unYNPygq3=b^`kg-|)O6N*@)K9y5BBed*o#MN{)FIQ{&n z3x$jQYmhI5_5QpuuMmXXv|1jM{=0R<@NN60bUcgf+qvZAfjfoKU)AiA44p48(sy!{nB6tnEL*P zV(xWtAWza;UO<`##PRhxohqI@_Z zAPGz6QN}Y!yuBz|c=YsjK^&2n!CB{IlUL^Go3$&0=vL-6Nt%oX%{lWJ_gU1oqDg5+ z$`{A59ToN!kKfCUurn{(<}fV7<1tnH?_6s519YcKTSw}A9jnuD1AwDF-2YtYp%76@ zP&`6M@?Xe4oB&2w51oD`ZFHau!0kIW)@TQ`p@9A}<(ighbqMSJC-;St8!5*7qh-i6 zP*Clf&j@Ux**vz9+|^>QYMEKVKb-+}c-D1yR<+6|aj{>qpszzS1ICYSM|kUmvwk_M zPTPhQZ!a|#uH$+5d068=CXczw>oyWM|K|()Ym_#N@cTr*n&8fWK{UK> z2ahnRcM^p&Mxwr%KQ#Z+CI%d_CJc_22`Gr7ui^Lflp=4_>O&Y@1+4V?BGq|)gX4}W zq|*y}Nxg@K*&cfUrW{nQAngHvX7m0=i?IPLi6YG6OJg)=Uf5>7lMVgxYZP0EqY5eB znz2Oe@TxfDp(Cq;nHF|`ZQL4>D;|&dU=)VV(;Yr%F>4N@gY$Ym^JrC$! z3>HOL*2vqS;m@du=HWncS1)0o2dL;mA$$#wjLAA_CS z0!lU|h$T@V!_ipe#$;$4KaCo*28-1xVvN zfmY6W1+z*pCJf58crwl#btW`hj747vT zcHXtYU10ETH~u=UG)2n~v8V|3Q|5B{@^?=hI=xg+K_rGfyXR7RQ$)YkxrfI}H=4Dc z0yi1d{J#aiHD&zZ&_>N-fBpC%Z?I#QU=dMa({-vz{rDVZquc0xp;2FqMbqeP99XbPcyjDT=%%z^H1YrXE>Nna&e|9D#I#aycIIUwEdY-Yr8vMhSQ;sPH&{Bp zY;*p^RZHCxo=H29HO5O{yg4DXHhn#`u8&b%MNmJAWd?SY{XT`6M}iWvoF7N;;^qO1(=L?Fl*U-%PLc(+nuU31+;@OF13Vl!D z)v$4Cn)Nj!COO4V;7LK}M)?ymnJi$Joc@j|fSu?7ZL;()>Wu!%a`tOBdEN0g z`>8>Tpe!EZH=G zc-EqkWigEpBsRXyk~WCEANlrympsav2xq}h@T#v^Lgle(kzb|8W>PcOerQP2jL;!m z)51M(e=|9rrGbS^SZv~%4_H3hd4S>7gHN7430=(&a>dmc*9w+Z^}PK;zuH!4xO0=w z0Q|>ileo_-vLW$EV_(CAu5gYhV3dnC^dO2d?txODKZT){znAC~%@2m-XVQWF%P3GQs>I`#4S6cBq~nAUHWCYp zf|Q7h8Kwa?LxSZmjN2<|-`GqR07R3;qG1lE+Uu^=LTsv=e?blxjDURV+qTvp^0DqB zlU|i$)=B5i#K{i+3~PfJw+@*Y{l1O`+? zCk?TS;KcLYAL3POuQdoU^|h_@J~5B62JJ`;CE5!9m^4Qk{ONbkB>mS6;tGa{f(#hO zvdSgLfFEM5D7nGpqM$$P7H72C0r|SRNA2t^~63z`*Jb=PLrN-i<{7n;Tooy??zU7F{sd51uh`ZLv? z4~4Q8Ty@(68H4)3hvsaRPpNRdBws#feEySXvQAw;J(bBu;aN}wwANBCm?_ZrrLwCt!@R3p1$L+NMbUIaF zJCI+|+I3Vid!P(q!hv@B0o8rtW#Z{6WD(VZsC&lwI*=uPL>C-C^Z4=U$hW~RY-&f< z=V1>A&r?Q?xIK|u5V;ll0W zmb}`gnNf`%JwOi-%TvJ0iul}#eL?aME5Xo%BKkwzLjAWTA~;oA;%b+dMzD@QsZ>-Q;4pdQCJah;kd$m(84mlX;$L8S$Mrv@2ygB)$M`*ZelML<=v2ewL-rJ&!^%+@k}We zz>x=a4$Oufm!#b*@`{v=37mn68U;2#;2mH76Jqs{7|V1o91i}ravKLr^@?ei&D5WH zUMBth@y|ULN!TMc$-hCEx^#sdR6lFW-Pbwk5)P zi2oi=Nb=T)+~LvWinr4FpYkDX+^k~$CskE}1ZSgxAz@YPb^@S*NY35dAf1e^`U-rn zl@lc=pLQjKCwp4>g^9zXp-pc0VIDfiM`suHi2og00W($W7S}X0A^n>#&u4}yV8L6( zW8vg1-DOn=%NmW}=R6gZVtnRN4Rphq0?1OxV3R&jeyUm(*QtJJfBM7ow%th8kGWM~ z;Yqu*1x!;5ABt^6`|%7}s4+-Kw<^(N0rgetJcG1KqDL!$DnYygrN04##GD=tXpelP z#mgI!rR>g|12LB@5eJ!n#|tpLUV82tRqa)@C6Th>^0;dNSHSJW>{U;%p3B6VtJ351 zj7neQ={v`vs@!6Y?c%yBN!Cj?ZU|Nj!6RHxnD3`2+6+z5#bMDj1QPI$om|y_D*lb# zw1G_}=#riDBd2*T0-thJ0CwfL}dub*5zpavZ?BR z_ghPdeydl&XiqGlq$X?fI=C&2>uZ-g8zqo_=qOK}1z&bjw@hM^rV*0fOl>3O1ATUS z_MpTkRVw2?z6!3N%@>Q)dLc#Z7*W<{P4k7tjs^-kW@rDsI*|8|t@agblP8OFsxs3+ z1~igUndN@MHyisc<*sz}gUi{wtE5RgF4!3Ui z_{Y1$*hTl6l1GBnM(FM3>WI7%7)VCahL?j&d2*+4s5d$R7ul_Yaeo{S$g{1QkopC z!6<6>^C$3w>fAGqb8|MMIY$1m1x*T_`-@4gd5IQB_y4hkd%aRvjMN_;q50``?I)bJ zbAd_PFrtSUT(A4WeZPW$81XSLFHDCY?TSUpcPi=6xFVDxg1TaS~y!Qj}M~@THKY&v%ld6aO(yl1B9`3u>1? z$oAK9xpec&OQHp@04u0j6AqLSJuQ9)e<3kh&x2l?HdI^XKe7_$Zc#C>f_ z#;rnPPh4Ra2S)*{%Tc(rO39r!5sN~MTdGr8H z6uR@eE{_uVv*bj@HBSYwA#b8(V6EH#LcSikVN>+ioHw?<;bF8UUUgZkDN_7U*k1Q$ z+8GEx@VJ_y`t7e$VX!E}RyrAs0~eNHq{GQSH%QqSBF%sO#ifMCg2-`Sv;TQn9c*g$ z0G5e=Ggqc&@5)Sq?YaSO2p}>t!__=e_rm&AyOLG<%{8JeVK+yeJW>4d=@U)N+kc`~ z3fgFh^cFdo*jf`6D|@k^3Wm=}?P&s)@t{FXCUhdO?N>$OHjTWIU8j8A;8|Dn1~ygZbH+a-4yaT?qfGkC20^L8wXu2Vd&h;hP?lTA;&U!U zKU@nZ53WuXHj+V>-Gx0Digj$r!-`?kqHT$6POEEz@)jWBl>i^^Txz@Ee-J22al`Si-ePvw|*Cs0Cn%T-q zy7uT=7lkga?3q2HBH1&kgrr<)7)2#f-{0r=4|qIq&iQ=aulMWqd|gfoEruF@LpjPo zU)4p1#-{+wy4FePntRFX#DqRvjm~AWy%7ylH9IpRhHgz%x^U zZd*Zk99ZrQA;orrH|;Ne)NCF@Z5$CjTd0f~Zr25FOX+f&=(9GU8^{ zLSKa@jre09^;Auad3>~64JJ`Db%?M>idn<&vX4uTMToh@{Qp7tRdv+MPp9b+_xQF& z_KAxZI;Qs#FF3L)@eHtvv;dc+MNN8L{~yTWr;-dzS%xoqh9UKLOnYW2A-Q=(trt>O zS}Tl<8S0^+o+iCzMp8UNdK|uMGx5Z$M?>hz2B2esFC7-G@15L2BhM`EXntr42)n4m ziziod7Ezf471`%hSQDQ#Z2K^MqVqGjj4nHQI;xAFm= z9h4E13x%^Rq&AcfM3d~=(U0Bs_OPJa8t~#r9%t@rXi6c^%A1ZqROnfgnb-tNb~Rdm zV?iGyN8EZ&dc#y5r zi~1-9WWZ3UwB**sfcr)h2}+~DIw^D8Xtwhq#H?07;b}`$ilWjfR9lsa@yzPhW1OY~ zWu;b=cC3MLb9K~*Kp0b`Z|2mjqA<6(Hl1GUz1o~6$cSyTOZ`U*KBI>ZDs zim7w-MH=M3h|r|6DO01o4fpv6|AS6uN;nW1#`gZdX4DL9R~C9G^Ym!0fuyM0j^)Cy zMrRnU3#Ay#H(S(!*f{6I9PSxC6E8qklGLKu(cCnTO0Fi8LC>BGw?~d)Si#dIc~t0tk>=0Z>(Wn`MuSxD^@@SJ ze8|MBZ}I^(lU4>+`Ec($ss&4Iu;Wiq;bCrzrD~QxCS&BwP|9U+hQ;ih(qvq?b)#q6 zffc?aTv8u84seS+=Ze(Hjy27GY2GUlrh93Bt^*to@q%z+fgR!dtQykB>5YAnov2eY zBXrJl-(+ndYxSqRzf$Wa%w>aNj^W$L6!{r^_rynr1sjRVq?}04Zy*nxJNl*~Vr*^m ziFrbF8m(o?#1Wo{J*k~Me78`bD4)*YtNr_sjS4Bur1-yl*u@QJ0@+Te0*`obWuz=% z`DC=j<3T6S&;OoR%gVB=!~8V= zco3zRhc-_*dL&8(3!r5rbf2}hzAG5mUciHg`_d&WgR|WtvJd0c;@mFdHcTzRra=*<{4gm=FeHL<@Z+qn*#7Oc;3cLTCn2YiNG-0r zkL0*#lvHq3hFqPOixtA2r3<+^2@Y}x@c%1AIpZ6q8+YFxV|kAMepab(9D;{#C@&G2 z*nSOPXOPcL3WyL8nQwG*;EV_n)T;!JT!^4(*wF5w@I-ots|t7i#7vxr61Y~&dDxM7 z_S*#*Zpuv9d{L=aZrO(a|M;b67eok$uHHE(smV45;;roc`@AC@xg%T7g()<F8&yAh(`TS3hariiDzXv%l%NjZhl(%G2ud*W0C-w6-sk2bT9Bh>Z?52$?e9)n#{2}hin)7Q`vrFpd)cyUM9ag}aPc zgf-44h3;*O#B~O7t-1(s+_^lZl>udLzcRz~OmA)?}-P*j-Rkc15*Omj3*~y$9ej+}@b7&XdkH`2@b< z$|);%*8XaGi5;^oD8|tNz&WR111C?v&kl;JE83853}IF0Y6j$5W#8)TQ~e|J*iSNN zcu4g^$&3A^$9P7MECahm7yV6&-5G>M5Wc@ORSl*lW^ON@PemaKn3~a`u)&=Zs0Wh( zq;^|7YIZh6Nx!e`d&R_ybxq!x(FZG_Y-5hDX}XwPDRAYS=Kc8{4U;V9hk1ML0xNot zJ78`urXO=O+9=#j3%7pOiLyxaED9L8fy;Z|axv5p9$b+*2{qMYP%U48?VH10GilGciqyM1n%s1}j@C?5NE6EJzD&5w}8|9o0jk#L$14~7$~V6i|7@C;FKl^ zF(?^6i>&5nQt6pJ7YB{LJ@6bK1Q!#wOg_*HL}c92j$Pw&`?Qb0{3 zQh}71EYkYtMM1Z;c+u~XEl++ZI=+%~WxZAks2##N2zCvTP~b1Nn(RriIu~l2*uhg; zQ>P1F?p~qTr`lXkD~_ITZ&|sQ2>kLY7+&Z#k8s;h*iFHY<;h z>V#rB)|y{|zR9wSHiK4Z7?z#J&I|Owr(~?K=L(@1DnNv=xRMQk*|bTvdh>8>8K7I= zT$yI%7a_WvTKY0X$GMG2)7fzdM~|4+2?+I>_vI6S1_03?*ojGrn9>y{B_`NMZU6P9 zGj-1~hmA@hs*Bi(TLVH-y1%I`>lV`D-H{5f3ZjJ`(!T=&`WGs1qELI@A3$>KMNE(~ zAP3_TgcRqE%_a%?4~E^7c9^k|FkeVA{t!3YtjYg7&OwS}KO_qDe)%xmC#+>zb)Fq! z5wN5KenK%HXV#KON}(e%!x=;kcY_}wmtUCmcdI;0@0W6!zXV!udI&Lg0P-LvG)5s?n+!Bi)MSRtMh!bW%$YdM4DBF{xg=sw##pp!nbazUci3OLRQl@g z>C7U#@9}e`KVbEZm(K|-kc(JNt?hMk6nTvhS1hgpz>rg~L(1@n>{K24Pjd3lRGQ7; zwWwZTGgb6Se$(=H;pent4>6rUpM>sG1{C453bqI87IAh)9{vU8Qb*@Zj;4C2A;&gR z-&7U4NH=0s6NkBn;wsvDVu$0CFI`_pnO~-TSbc>~i1GQeJ(nYii4@sAX*$4m9ixwF z)wSxBpX$xLghQ@_KeFOm8M1e-$=V&7+X326>slax(^VNn)W0jW{4o1KvdpcQsSaB) zK1{o(_q_!-(aWDtHx_`qblix(6R{YH) z9F;@6_s)msMMC8J_4O>`f0u$Z$3d8jTY)!^N#9;60h@7IJ{xV=CLo`dB;Qa1fvX(cxp`DIO-~B0Rp$?kqz#K zwl0=&sio><`|&tS6I!r8)Hs0NdH=MuXV}P4VRR>7RF43TLcLj`Uh+yN^;FLHL`$-i zG3Pt2Cz#9Xz?(dHKh|d>PHQgtehyQ>h+TqdtSA8R_CRBU%8AEc8cQ#Z4Ww`^uM+uY z^W;+4JvSemZgd%oo7Ew6Mnd{0A+y0dD1IN&D>GK}lqYAW`zgG0>)&a3M!TGJ_FLj1 zN+y~$nX>@h(iO!I3$du4Hs_v1Zi;-9)yB77DpJyb7B4?biHABJTs#Dk>>S!r=~7!(-BIC!O<2x5d3%W7-x zZRr2$#%w-vDP54CPdq>Y5gVV+r)_{;z#SCB6C}1yK&=62c@FqUi{kTBWLpBU_|IkPDkM#CL(iJ)4M69 zwuC2HvIQN_$F_rD?A$y*sf-TRqJmgF1-5eYs^Ql2(J+_#z>(*GKdb%bq%%K#Z~WM{ znc^X0Yyi>%DEth~!>7$$7Mvfda}Afw;S+BxJiEblBFSBc-TJZb4C1ENOS>=CVOZ{++%O}HkXx*szF*a@CM`Ll zz)Zc!9!DYkyY{pkX~`54j^$SSD!QAW0|w?=smPVwmh8Xp0emWtep9}2r8GS6XOlFq zMB}2Sdn*n+ToTF0i`Hca(X+askbK`cqHAt655Qi8@K^lxR|2Hvz+kpPE_KggVG9A3 z!ljGSnU@^pS%k|zd^V@MujK|s>jUk%W+#%$;G?P6Q9I4Rm<7|^d_@!q-S}Jlh8eTm zf77qpn1}L>>#{T~Q{ugIv@8ZyEh8b5+xj>lF-KnuiaU<(xda-^hf4G3j_lycuK|3H zUXTZM*ix|ah|AK?M@8Ovf@3)X5TIec(+s@k>;9ueV&t@$TAjvD#^1THo61;zD*Lb` zML6ZDdbIu(|CafQ_H@_8Q?grJZ-K}ixWw_D@=>MVv$vKZV9LDkrBTkdhL^+=ijyT6P^us{Dhhfs0??IlDsLx`V%YP>>}DQX@NR$W@7YISM8FDREP8s@1l`eepbSfe-a{n9xz(}_70cjV8ZxKI^ zWm@0jQo}RDBWc}eMqDQ zf><7Bm&wB-ejN8^2z|BeLa3UmIX^9Ra2&j(_p1m}!C|szGcT?kmqn}QF3d6D+aIkP zrbxpTw!t{k)bCOipoqc8%rWV{UGDd~1n8yfFP0wowYK0l8`QD7JyiJQsRH48Gs(N& zx|$#JEW1fVs%NI0lcK!VBBvpxl#C_k40a}APFOX!)a~iZ4pw(XL8O|etum5l>U-B4 zK)c3k)?8_}P*MNJ%2CnmOUGi(;6Jf1gQC=~%`2i$p|8qiMdASES5Tp*Xvy#N0FF=J zM2EGix4(N$DSQ%7FYv2J1=1X`IxD%Az5zRBJ>yH)fX+f2ioThDj0B@J}pAoG?qnn~S;9JreB>q1#EV1FuKU$mvX60=}b_VA6Y zR!$TUDcSQfD|CD?qmT6T%H+asGWo)$RM|AF4{cRWc(N02?D@jlxZgPFXqc!yc2LdB ziE+C|WxfYT>4*;CQJhgW%sTWSWTj|r@_H)Ct9{YVCWu|BSBm7*T?OfnXUM6m^%fW}{3i z(A5SD!FC;5_-=6-`&94eH}^o}X@DWOWtfUIMLC;=LO(_is<;G94HvnFUR7o&ZI#2& zM-f!I$W;ro0(-F_x{BK$lFrqC$ijgNyE!hYqJ>2#gSE(S&1rUYc^c+ zN|(TIdL2aPzm%U~Bb`BvUc@Su4d=vc3ga0D9S9ytUY1d{diavX#4P;+2&2pZ%OUo{R90~KoXC9fTXyqLzLr!n_f-S4>U&~Bmy&`hBHpd{-(zT-Gc+@Kp zGaT0DQcB~u1-Tp<{C#c}=PW0mCb3{e_Y^OwtZ06idi z>*6k!6NOmK6(#nW zaD(i+kG_nQ_e)W*0X@f=kgFrWugkXO=-Fr;y}HoG4ZG_DOnTQAVWp>+R1!aq zT70N}==AE-tQ@IjDFZBP5r^HUHKd^gf8#4}+b5zx0E(3?lM~yQpMV*?=$_%M=)okx z-?ITm?0RbZ)N)wTIpah)k@cir5e(T%1{8n|J+=2EvVB z)*D_WPZ_v{!T+ywSPNO;R!$NQf3E|;HqzK5_z9KAPOujbF5Y=bRHy#^FnsGGlyIGdbG@#jbnI`~ zh8vS>-x!K1hOUY0jc!P5vjin}UO2hcm%Dc^tp~_v<Bq6=&!U1rNebiun24-(>OV!&kAMdX6$1HM5Y|Rx99ZTm1q80>4rAS^}4?D zD3u?&*k6S>PvLrq|9LE(e9>8HW=i-62n3?*Z=YUptBTUpdn6l0K=BBGn*b2)ap*m- zTs|_2>v!=?$&QUA%Q1mt6FAj(U{AZ)Oz76GNj-9jJ{cwGG`m}laR1T)qcgwDBVW5H z`%dM+HX|cyVnu2)<#HGXeG&?H`FL5qBN{H6Qd9DVK2WmQHc6BSC^xWP+_&bcM9$k| z7;5{t(C~mv0zY7zwV;*L(RUA0KLSbYh659FBRogM3>^W;9UTX_4TZeaaVZhs+^)+# z%n>~UVn5ViLF$Lh?m3_Bf+Vj$PIJp=Yi-5yT;gT=uxe5HLEe+7z3^&pax)kqyV9kN zXH0l%kQEa1-SD)8TSK3e4gednY}lhx?SczAdQaaE=g%#VLa#4)s=+)~I>PEZZau^V z7yf{EUXbfh(FxtQ;lyn6Lq9#f$IpDz{_YSF+?AOM#yQg<`%T650^9D>bfatYC2 z7x9ca%4v7$but1l=>35);J3n|40se(wANRtJ;e5n(C%ii+;pktkPl5b;s0Q-n(o-E z1vYxQ1IA~OlYXK+hqT#g@gfNOL>dKDzdG(5?j04)fx6h%jU>N#T`b6`w`{Y?F}^p! zjYw0v&I=x5XS6^7r4r_y;iS0UPm4Q8%{r@G$3Ae~J*xvI+Eq0^4=um9+IM%1(lo#LaRLE;Z)o5ewsF5#i#$Rm<2agWglE%(KlZ3Q}_=*ghvfG63h8L3hGALOHxL5|kxJMv@F zbHImaC4wATBwILU5OpKOPxyTtL1&EVG-ghPCnF5I#LN%1js!Fnco?FS_J20&;ve6NG0UB` z%!e`HE($nC1hjn76n$AO$&Fd!#NS-`33jeXT9^po!WymFB=llLs05PT@UQ>wXutuL z{e`v_1KEucltZl3>)1#2k2Tw(q<1t$7$M76q*zaU+Ra91c+VGE1tTp3U&DV}rg^!b z60w3W+8yn6J;*?SD_`{yVMHr;jLN3C9eCe_!0|A0b=XwgV4%Lru7clfK$LkMh#sT{ ze(`0*8sT~sS^8xH@9-p~5yAnv%4UcEy3Pk}G8Xi#FMp#h?W8|%dEP5;wss1rF-hsc zK}3_RBd*nm5xv5u+nUMS86Exuq=s-TPkZNpuN@PLiem75@vT}G)_qC_g#?And#2Yc ze_qL7Q~DBD(W&Wm!$&)Xrpxk08j@7Rx51uNa~ax`!k$fkWKUu6sR3-{bo4_zR(`|) zaPiw7{d$>q4Q$nJ@R6TC(w`5T4FQJJssSre*qDLq1}^4?xCF@}!Q9 zKwnMhe!>~t2;fY(t-ZL$$@M*kHH>@sQ7;OBsqCIeM#L}ts#H-W?4U%<0Cd_=U-wC> zR254+$&ITT(8Hu>XHJL_qsptIiDu<5+}BUDqVoc>gsE??4VC9 zKW<0DkPf#l@~D;$@p=Qk|7%!FIq7Zt$Dk;E@S$ie55CtkYiq@^&pV(2_S{;G0ppni zKR;4^f8|j&=Rh(kXWOv9l+lRO(mBnBOC*}g()5igP0K3cH?Q&CD~w$M-KY$>Q%DT7 z|GKy^$5GO33vg65!ijKUH~hUadRERG8vf9=+Sr`rtPd`tL!C<85JR@%E|zO$(fTR% zuP@vaje0KDrH-? z{vL>a4F0puX6-!m^mu@>kLC3n+YYfG_Zs1vdzIO8rFA^u_|QnsXKs$sSz+HxvKKm3 zq1})XwtINt*}FH%%@H3?qrOpDU+tq`&A(MCQzn|i%aLm@Yr7gA59RQ$WgX+`C$_r1 zZO4V8f7aj(FP>|B5}I-he5m%f{k*tKFLiBp8hwAT`0L3S%Rf65-qHs zl~%F3uK!kn_Ovnrh+TLt6|DUYLY{5o4M%YS$3zfo%|yQIY+DOs6!eK*UQ}Ar;I#tU zj+6e*1UWK#6uHB7)+_$%rBpHbWHY_gVklcdAtPt-BAtQW!t(j1{d4c*@L?Aqmp=A+ zSFsU9Z2wElhN@G4^vdT0B9+C*&qjbrVRQG4?u82gnWMf=9|Gj}^VzEI7<6cHikK&^ zoszut&J~pHhGcvb&x04}8jJVsmGcx%;;dM$B#7t>4czEGE5q|T?&aON@90Bi;Z4w) zF~dM_y~hhJm<;ztw=7Oyy53&Xhitu}bF@>NHfDQZ6W~r^_a>f-dTg=Px5p+dSbZlS z!ZJ{P>`2ooOAzfE=&Zi8?>ofyl&5Lp3GM^Bpmg0baQM-~k=h*%o^9(Xce1?FN(~8Y z!d%3qYpL#*5DLM+d@tEKA$c4GO)tkZ$b^|V(HqVlfmQBD;czTh%H}ntRM&Q8+wBap z7oSN5c+;BpaMV55Fq!>{6O#%hjiF^RP{K!_tXIwn+u^d1?ya$E*Y{7S5XnJ>I)pg} z_>^q@bH2pAcJ}NhD7cHw`} z4XLJ!thq!x-li9!ABc#+r$md5OON1mHSO`wcFfSQ49JCK#NOB#5~BRo-t>>rTFRO1 zeq|sUeZQ|iy^mWVYQetRPW8Ol4kgjrZ~D^p=Se@MLmWsHH-n9BhfOM#O+$Gn`ZS{v zHt0Vn-cA`e%Xx6CP+fV;#FuWxU$Q&6@OYf`_kvz%m&_Y0dAc>wQsvo`veGUZQbAu^ ze?Ray{cF7+FeEE_QN@h*v1zg|Y#Ri-tbvPl_(Tb>u}Xq1iN_X|l;_W%&OMVDpphfDr~DojKDg3n z(JP~qqvn2Z8p+OdOA8{OQm%%Y`3b=@{tFf!riyV_HnBW+E)13tqt~w0uJ65Yvlnaq z3g%bs+Q3a;D5kW4$LuoB=!WMPC!SZJIrE<%BN3L3nKq5ts?8q*LtmQI~%M-*&Ytuhf`G25ZC{S3y(bU$UB120i~&?F$dD@7b`3u)8s;8EWgPrf_s?;uM6czbAHuxFG|LmkgPT;M4 zaR3%(X1$xEGu!9)9``;jp=w6bi&Z^~0zS`yE&n_OgcVJ0gj?LEv6l-Be$obBf)_Vx z?M5F+W%M46U+W374)M&)Z3j#BMsKfl{l{9QR|U0hhRVyra=qsfcS}_FMOMX0)=+Pff(;_hhDCXj+(KB%~>hgq>qTLJ|Ku z7o5}HGvO@V!v9|wb@yyMD~?>AA>J@%AQV8jIEQ^9X)c5DQ-A%7#BG&*54fX+NidMPe$y&7D@LcmHTR)b z@tn*=Jt=`@Kn;2!Y@;!K+TauP)j{DwzU0IRIog+=KvZ7zAd02<8aP~0QV)qnj_*2f zuH;9^NquVtlZoo&P{24GGw>nrqnRP-_W)Cu$@s?m`(oUhA1X4u3)Kkk(}2=t{n0d< zdv=2F<7_jRc^1E3Txi)nv?utteY7jSQRGCo!UMpQ);T5?nG#CBn24u>=x00eZkuGy zv+~as!<4i=F;qxa$fE*7UoIf{Va$-P<0sfmu{H$D#y;;my+v71v|;J4H;uT5MzMbV z|E4?vcP~uOF8eNUqoS_+>@Buwlz%>)qi5c7<<%ms|DZCmn_7;Rc0jKTHU-$VB6MX? za_vDUB1|;_ijH=|M*q=lP<~t8iivV5c#4<{!h3N817a`eq0B@gmn$$6w6!=ht*m{w zl5BmFz`v(G#~`*rw7M6%Ci;Og-9dT+d<5ifRwmQ9sM^+98zIEnbt<$e%+2x%S515i zIm}*pN<4uU$LubCNV<}vsf!6HhP>CDd{RVG&&d?|7*Yx}0YYp`6G0PIkDt%;lR|#F z@dG3R3UK0Sur4slkr4ljd5&X)Zjv_lCGAUH<8D3aan<^o&42gtxz%@-pUWHg$ay}Z=&K2 z-m~m0J+YXhGgUHUxza~*dzp&oo3ZYpUw#CsoE3Lrw$xgGWRgp15l;j3QH!MSdm&(M z==jF+r3jukqYnJEDr>y`>%AX}g|70Nn7V@*IShhdW&eTvY-`HL^g-|c?YJpNy(cmu3*ki{i=9V|B zwwpiRv7QJ}8wPUK7E$tM3?CW|Pme4u+L+ur332`Dd9gqX-8Bni+OBjTUy+6lp2wPK zaXowCM$(CocORKkCE*#q7+h!_FQfls89j;-{sOtDDOt0toH-Ri%05;{(Ym;y|UK5t$M4`esS?jsLc@TZR6W$7Ql*wthHL(<@b1vW#6cy%_k2L}0XGHEBQ_wR>nNzALzRwdXQYL@yBJtk*N3 zg!#4eA-TMLUJP#6_$U2NOotvQ(E5`9W?p{) z8ZNa>*p6SH8MQ1khl)3Ow)RyFK4;ypbbBDuU}X8UMi`{15lEvM*a&RwUnk$nC~MHA zH()k;dUm#e8ESdk^RkfjW}TZGTQV?7UW6KI;l&3xBp#ZMyjUsqn`^J%MhpZMegYrb zZqCJZRZE^LnbguBW9vs3mRfJ+h|Ih*hZ-k>e@2!(wuiIcATp{UCH%x}?+j{ZTX+-C z2BKw7ro*QZ5as5MoWSWL;Le%~D;^M; z#ChPDb*UzRrzw*P7HYY{3S~Lu($c$<*5zBuw?s$?UhS6Yw30RZc96tOZLC= zcHP7vs+Y!j&d`k)+$!S#X}f}lbY6zBNz+q)irKXp^iHxcBR1(lCxbxREd|1uE^nBAe%8y-DAt8NGanEKNoeq$0L_M2r=V3d?Z=D_@FOwU6eQoMd~R zDzC7hG>5NrbpKr;-AXrs04BPQH(t$9{M4(u^L+JNqhnmw%bn20&e$v&!ALP#YH&7?eoUcs@+*o!tP;aRK|RP9&z(s;qcEk$=*wSVQ~lZL?nON zcO|zWSgPv*zeVIo6-7~th*g8?I?4*}?uIAMzQ16+LTWCPr72p1%JXTha4(EmexWe8 zRZwTd`^!r$m(NYHs%(Qe5D6PwwXhySwsVS=^fP#zi}zLUhGi?TE6&`rnOq zPPQsYvcr6XWz9Twui8n@D87;{6|mslvFzQP-Ho%y;h|*)OBIypz)l0U{&8`sGB;_6 zal`gwPN5hHP(9VpmD-F{cFhBlQhRXx3Ca_dwyn_LFK;I}Fu3o#D}y^%Fy~@-9ZE{* zCZpt}Dj&Z>D{kY_1<1~cl>2adD0lEPFl2ld8%7SXm^eS~p|bwksbM6DHSCBrYrXvK z$~%MmNK_3f&+Y0D4`2mGj~V~zrqRMNa#!ruq3QEf>)*hZX(1SmLg!e?4wZ`xpU9<0 z2e#)bwVQ7Ypnp|4)k++0EYxmKL7Ay4zN0WE4FDM#DrmBV60W-MzX}=q%uc(Kl-b;l6lsBL1G z1v+sOPOuzh-oqY!db9ZiS*s{OwvQI)trM9`ECa9IR8FrL0|)pDOPyQ*vqp$Qs^cdG?JrS9Z@i3dS7^a5U*d9IX8=-d=Z8*Pj2l}4 zc&X+E-6X4egFc z%y-~?lj$b-*@X>q(R3iV4dGX=01GbHyC0llt>fLQ=j&2z~P2ID3Mzkcxg(4L?Mt zVk>kH2WvjRR`7Z3=y26f2reAIthWG*`6r_@cA38L&FBo5f7S+y{JwXRPNgITT;c2D zl1PA4DqN~H|E6NG&0-i$R2TA*4AYf{#J3+5i`0zguX$F`Va{p~(mp?6GQ>>@WPuSU z>($^zrSG6v=oLHmLw#jb*`)UfxSUYH=ZJg9j3>j5doUILxwwFZaZ#4YhX96Q=HV9A zlo!Um!nySaBn+CX70{|E=t1h>?&5-Hn3Wv2B3 z(}rUebY9qPg-BPCz|7CvAeXha?gy^syaba0&Kumi{u+mTs10^)0I$8LVn?Du5WGhC z1$&g&{y9pQk`RU;x+J%>LNBNe={aml@uxk%M5D@_Ho*g`3dbtV!;ZCQP~Xt}L?{}T zjR&!5+R!rs-@+fe8~KWKL2P9sMDEr!Xn^w5dPbHITcE?9rTU7@xaJWA?O1TL=&Irc z?T|Ys60xjKhZOg$=vj#b42`3A2PiGr#$JSwm1WF-5XB~Vl8n-q9`wYrLMtlJ`fJVd z0W|Ludw@-1QnxGMIOv^?@qh-7I8#9v1^Sj;kk?EM!S+PrEE2&G0Y>({5dQeRy)eJV z;{p^t6RyLb?=kjz8uz1|Rhv5wwh2tY$5i;-#cs&YZDGAX!a}cl$vtVXqM$tKl zZB+a`@f80%IZjg%*w^0+Z}6=09?4FDDxvJ_moTH^okbJh3x{ zR(iw;Pz;gBe=kmqz(}ORsC;pME0O;Y-{P$C&vrCU71rtsRY4b8J@cm0o$3E>=+D&n z{^E@{5dthO^}eFW^9Z*ad5#K38K#smXRFE1S~_2a04s#RDV?x%ooCLx$rmIXZWHGufJS zD;cs(s>hwU!$x`)J9YAFsB7lEm_5K9_$58-|2T8LHek=tq*gukE9fFENHk=iirPZC zbI(N&?Clv-wElxKp^J8V_P3QM+1*$fR|kbnUL06P={^1dlbcnu?%ui2($PD35W-PA{a~E|&dKW;dXS091g-Jt4t2{Df@z zT%cHSaXR-mRUXB!HmWB1>@zPy#fE{bOZOV=**>X z6fc3nJ=*}d7QHFwg<}N|@9BV+>C>6OJd_D!L_Yut7A zu8HaN6S<3j0GEI9*Wp~@f~pcacd0yW_objQBv1H<1%Cb0(dfv0Zo|^j3Tn-;?6ONy zv*&VRGCtZLB2{)jo7L{Px*)9sbFgE(LDy1Ne7WfV9`X}nfk)J`qGW{?Cf~j0NOlk6 znXo@m%4LN;$X|_i-9|k*^e?k+W8mdB2>N*q=xl=KHHD2))DNkTVs_7= z<@?u!Xo(^0;8SN)(7*UHBj3wa@oH!;q3hflHwtis=+C~~kuSf_+J<_pJb%Bp)A_jf zWyT-Rgp)*3i=gDi*il)3?#N*pjcDq+k7dqb4>Q<2r4egCJ&-JmaR>b`7|CuKA)MH+ zgW6_;09jtseJYETC!IuBTnMx1OAnUysFFhjfUC5N3?ugRt+y=$$+&puFfLd$6~3e_ z>#;nG`BfFQenx%@hh_#vHBgG2)8JBw#<$9a9BMp|>qc7?SPro-+ucDRMnyZGWA@&> z_~?KbebEF-`r4)&&{O5;VWlrCR7XVl-%am-S1 zUB47{o*mnS6KJqnw`1YGO=;J{3~Ytls88Nbx2{-RZqeMuvdQ~XY8?XBX&-K1J4D|` z`+T(cx!wt#7-KEf6ywQgt#Q5O`$kBnjqFx9zN3m|;NG6XEWwsXza7obSOi$#g@pim z?GG5obLmeFFQB}?GLxv$qq)LaaE)T)$!Rz4pm8c^5T3(A>3sCeVcDPy)!q;JfXVdF z6G~Q59eV#Xsb3uOrh$x~T< zbFXPYq6tKyFr9`}3ok@`HUNOC7=ttZEoK~;!B>7&u1pNJF(U+Q<~12?f-Y1(umE;T z+^eri1QK2^KB?? zDrB`|P?@7-)ej{GTO~?%nda<%E@dqpDPt#0w*1>t9)}J@K0yyVpWPGEbkBU6`X6NK zBR*NaRB;P~`AC#BQTmi)!tze7=KIIuMDKE8)?{ij9W4@gzAv?ohCEe*Etx1U;Cawi zXlaPujWbmQw1eXe+u9OH{k*z2{cUxYO~(K2PMpj70Sy->dF3)>|NDe2r(>EJ@eH^I z2hmM!7FUde?Z{0T_0f~@LNuRvkegmPB$0%r+!z0;eR{(gNvA1P3uDvF_I~?`>muI1 zIk9#Ot3mjGH(|Rp6totdh!FN}sIQ+QFxr7yEm1d@lw5lDHeMo#$2y2HcKQT1x6=f_ zW-&>_yc2oHPrwp+MmtYQ7`3?H4Ub(E`TMsProi8HY+Q6DtOd;)8!VQyE=Csu)FZTa z#M^}*3R(UNg4lM~X9ld#l_$Z3cHWu6Oum_fL_LlF;@|xyZf?NbWrE?Z`)i=-p8Xn~ zRTaXi845AZlOaPF?k-%|f@Uc>!Il@PB4i=+1}hDwKP=Un?d z`q_DwW195w0}C^JR|duLlONVzeMRz_hyEFN_+OU@y_akaOK)2XF5M$PP9Y-Y8oRJl zWy;!?+IDvm1Hwo6owd_q{oBY84YTl#z3bUoMgN6`6e_mw>D8SmEg zHjcp4VVbjIq(G2^r(QNrwO#PC@;5{J^hB%&zjgh?3dKCFvSWl5e#2GZ1H0)|hBjc9 zrd5!M-DwuvtTj7A+6Jq^_J0tJA9iM%8?ZV+zu|?MY{&1Uk<+R&K?2S$i6qf#B5DKa3gH;Ct+%Z&G4Y}3 z>X{kaSBWUHE!ln?-ojMy#`!Kp;!LCf9!|!PUa+n%7k{lq9?MCwjAy`jjf@_CZ=njo z)~?l)&&_wO&1gz;0!3YAVnkVFK2`sn@JU`3!uu?ta^VaJzdo_|A7nVjp+PupBs1}Au;QID!?R&<#FdTpsXRZ(nXPYsiKeo09rAXg_TaLt@ z!Si@`%F5!TE9C+5?f(PR5P{f6xyEo%(?EwgbOGz?B(@w&r|^NbeH#H?*Y}}-jn2rm z$4F`I36Wy8Nq{_xBlfrTRP6y9M(yB#W8n! zXflrLpvJNCUVy062yWCoAYj#5R@+mP%GjIP_ExqTP?1~aJ!um}m+2G#^vXL*_UdleXOSSFrLF`Srx5UBiSzGDN;R`p>{?F6hd4m!EzlR9e_oUm1Pr-Gv zA8IdHXW#jF_O|b0v@J*tgpa1Lr-=;mLo9>SS2~`iJ$8rhUVGlqD?gkLfb_hkbO8~cmWjeS{w&wUGrPVM znK!b*B7;{ulFXw^Yp&(AXBoWpuk#{bPse`H(-Ezao}-4(zvmdQ>g&GmFSa=~B_@#- zBKxa^A!7UE{bcX=-@>Hnv?qDJZ%M2fiywBCekcoOc;{B^QpcvIL;VA6)$gqJY!k{~ zT4@T~6BTH;R8GUP-F#u+Bu1|h(6#pH&Ck{tKBJ>SMoz0<-Tw!G%=n*pC0z)KJhf&o z+*-RZ8FJEjkXOS$D9gv6U(CMII(faqAl#FOez$Z6N9NAT)-Nj(6)!p~4iEJ_A;|E^miXikb!gS_h4 zE1uxCbeLBe(31%%`uCIncKrv1T=@|-X8*4P=8a&#D~b<+mv5W>kE1gWhw^>f_%kyY z`xZ4A3}YwRveO_8$(EhUz7%3)O&UZc#*8gAF|uT<$R3q_-$G?i4BC(_N%ej2-}}Fg zgJz!lx$g5i&(A4#_2Kh{ZFZw8N6SQMxfHgNy!ADP&~yF+5}g_d&?5%kBzpBa*o#kd zyLeUYo>mj{O)2+m=e+?kYc)l7K1d8cKtA!xDXrAieVwi41&tFAn8&P*6J{9Js)0St z>R*aYSD06%quK>P4H}_n!kX6Dqs!&;KH2cMoaSm9HBJp?!Z8%UTi_ao<_b}@=~jP1 ziwJjjgM)l_pEvmr_0KeBpW$7!dY3E@O_Fo|cflS^igOWnNOGKeUnocBH_+u=kb%MR zgHKyG3tJ~oqZi%67(?u^Fn|Fr6t%gzO-rbFJGDesHDucHPz=PC@!wP0#Nm?Qb;LG}nfsV@w)j$=7v^BUO8u+D`?D(*7@kEU&xOA#k_Z^_ zjJb2J+K%RDPUH@awU^H;87ob^L9s~Vx2T}(4M=a(m|(lU80{UU=!#W4H;$U0%*?wTABImd!?XPI4mcQGD#Qi=m~pDkK7`oq37=iu%L=f@wr zr;=iT<;ZCVXV-wAgW)hO2oX|7Ll*_LD=q_T3W{zL+RR%?;EWNhL$S!KnkWsbzxnVK zGLV14!iSM`3g6_4X%IgQNjC1#qDQQ#K~Fe>@wzW4`N`b1eS&g#vUYR+@!)vO2z3@m z@tr?7%yXT&Yhi#4vRa(|`ohPY-hd6}9B=t1R)bL)c;DFisRO*Uc+sni&)MK%RFVII z@7#w?G;JvRm4df_pwC)<;g6RAB)933YkD<854>l@l2vWLin%bu0#%P=zktAdsBC>p zEq?T@>Gk5TvJwf(68Cdcj!SQOrH;dm*o4eH7FnEOwvXbgeg|Ir>HOYwanVEP4ZAf{ zEDLt7jllI%Lv|mO?#B5|-vtE^H^6augTY{ga*pQrfP<1+w6Pvez|B!e(_60G7jITBo~979O$_wTL)x2?y}xojT9D%tk87K` zM1+PJ&=EAIE0HE6*o@OmCLvn%rSS6Adufk`T;;NJtAh1RpF*b9sw@f$%VfrOhJXBO z@lSImw4mHtic-dxCOc&F#*|5)h@7k!S!UCRT zmQ>>v1%bx+!9*Lm*6z=hr~;F(P>xbh7Wo{b5D~jqAz(0;JJY`u^bI%}D8X6W)$?Zz zm78X1MiLkLS*e3&=lw`_g|9KcvA~cidyl(u%22 zJF%Agc`k$qhwPMtsWJ>+Kq>j`yiAeo8|EZLk3cf9IVwsxnqk0VEE;Efs~v?78tWW&(@rO z6A7S~%^BkM$(x~xJ<1ZSgr#^AF12;d*v+)Wx~241|X`gmg0=QB@rA|DpE8LZ;L29>GaJH$lkX4v^%&{S0cz$Z7$ zVJ9LZgtZHQl<}=R7p2gUJbCcO<(PalXr+68p`aedxyZx}ac2Dq3~4!x(&~Yn+7&sO z!oH;7+CN1fjDeNdor6?oBiT#Jf4?5v{B#FX;Fu4&2a&9PtUtGu^}0?DnL+i+zy??E z&TGM`%nH$v--kpT%dV-$$W*~?yBB6JRqPP;e?fX6t$9>+PAzriB=?ypZ)*g^mfE-s zeVfhNoP`*8KFh!ZSQDO)uDP%!6xDrv2AJrMYY43|+nZmxk5q*8ev8OJcgxN}&Na9( zmmH-cryxxpu1s9kDVB+n6*PLu-(=i8sYryfKs+Z|1qBYK}zjBo0$03qzrPmYQf0*mH| z_5#Dzj~wF&9L$&GKSee|^GxkJoE(I`p=72CLXskGoob$Trz8XgR0R{J8MgF6acug% zkm`=k^~84ltnrC+;=SIwKUBJRXNjVSK(|3U0}6}Iw8bsC&5bq0ht+Qa2)s(8Q!hPe zDfM4{W2IkX7FVu3A`^s?8V!O+AN~%3wEALd`O4N*TN#R$R*GmyC#5E8|B~=V@&Vr` zp^^`G4da_uZv~Ds`RrHkxScqegADQcXa6{(=7C~miHyx==m=zVGv51cqf=dAKR=qF zt12f~1=>}b>EldkLoa6C@Wt%^V<;e^xn;=ZvDZP9pUm0i@Rn40W#ujUPILu!M`3E- zGMCaf!1B+RsNjTbrOsQ-*c^0xrbSd9L_WV6XNXafW)M-uo#5$oU1vMi>{<0KC5I>X zrwKD%V`Y)3uuyWX(Ke#FGqKRc%*oh~*DFdv!|N9j6jC%LtLk+J#$UKIH>r0ev3=Ys zm-;=}5D0zcrDQ_}$|idR*~-R&0wcCXOZMXQKmgPjTxJC5vF2RRZ~NvM7ZuS;vKfup zDB0`!QqboVfQ!m&n=#vcSh+veC-B%o0>9_%%*jC$ z=60bsa!_F4+Znx%Gir%fZr*t3@si)|QA@kDem%5m9Ge_?u(XnYUH)I@8hbWGhygB% zr(RCWc1TTufl5@yxg;4D+}y)RVHv1i%>C*U*1IjJGd&}^qq^fe=U$bda8<;|MCqx7 zzwswii`ah&=x66QKKBXdyQ6^h*ac;(P1@hv9=dmGjjcNdGrYxDrk^rovI1g~2^)#^fmbtq9oQQ8)4cKaH7sPRXiUXA zzar_jQl@_&7>>-8HDRQ81_OF30o8qjX%9K-pww^?;Q7y`HwENDLAeY+*K#mEl&H^2 zP6e+%!|HmC2gz^51lVr~I%ky|r?1+Ry?+A@i_D~*`l1=eceht(>e$?!N4XU`oU0h% z3iS-^z&eAuWYv0<(!{fFz!qUt5>(wrduN%6a~hNwM5et zwtdg$!cFEu>Lbb7bJ*RZRFo*}pMx*WI@FvdIQ>S0D>19@bPSbif)-ARv-p(ziJ)W6 z5Q&W*{Zwl*5ksAN7RDaN7OW){&#l&u}W4UTj1G1yPjs}0q#v_8Tne^sTN>;ysJMH35SnpJ7GDrON zZ%N7lB`-|<`e0R5w!>_1$DV=L^%8j(JmHf+{a8a;1FJ=2$z{7+m-YmOnY@3ZY5IwT zqE!osIx16w$eOtB1x{rdEa4gMSY{>neBarSM= zCkWk84m|QpfhHTX*V^h&uxi``y_?mSdoxbg#3ra1adxS}%p-We@&acE--i5bG3$vT zU3{ghPU|@0Q*Ql>+!ebEJn|&ELs|Y(>Ey;N6N)qg3{;K{I-Sa%r3ay(17hDOV2;K} zt{`WwAipZl^8lH0pB=3Xk^e^@W%eSxP)29l&hLp#IZ1gZ;b&Xs+c}#7Ki=%rqTV^L z??RI?1?&0^Wz&Ipq$a?ShUmVa?WQ;}E%Tw*Q>~foo{8m~P$G@h9=a9~ud9*Dbbe8| z-Jf7Mwf=*|iDJ(2w#us;^z}pAK7`JLi!%Vm-`@ktXSu;Jz8A)ZQo_Cm5#cIks4X1# z^F^K<+pt|UL2jS26I>*%72yq(^7*u!43X6uNZ8~`hJ5|)jnOEHd?=tm`6cFQf0yFy zxw|Zg+>l}tKjK(gx16W#k z;F5kPmx2jcSQZyJn(Rsq`=nH?GZ7^ek7Mtx=u>&D>V3i=3b`@Pt5d<4$)50u`l!e8 zBf|~Ps(;&xkJtUB6Tq}RdG0p(R%P#Gv)E_j^Ma4-PzbryMJw9-k%@POc79|Lgy@wtvA=H%)D?Q}vJQvh*#C`ZEKDYGy-j~= zc}c54%l4Yw8H>UP|3Oc2$HjBls3@wkb!6E4A}{T`w>=sukGiJ7N!UR?k73L4(eJ6(H91^$E{Gzbcki)c(pp46t$L%|eFd#f|w8~qW6fqDEt z2=k&b{$kGWl7YPKI1g8b)mXxw@QYko3VHo&jVLCTch#a3+&}M_cGHIn-7j%bvr| zvIk`9=ZG4myVUEs;P}I z%#CrlIzhl{C^c;MfyAR~95%7w^&UiF*RJEriu`9-GFyF=>v2;VEUi?Ebu7Qr!^{Lk z&MBile{_N3)mTdAB4GWZ#me*KmW98WXzdfd3`$0z0q#%K*&ZSLqE$O0ehB^R}> z_M=!@D2)22c#$a3Jdp7pq=?iT*f4+ECCfkom(7buy{xd0`N7}OJ`olA>w2GIaR(@* zRDLA^_qFR8KxLHa#_Y>mc0OL$T@en;hhlhHF}@Pp`%mC0C86XP;3r7*nMx%AcdYwi zA{Gg<2aFN&f}lckJ#kc)7#*~OCUx)gL4VsHV>&!G&O6MBF?f?qz?2{`^E*U)D(S?q zX$bQu^QbD0bLX~kafpi+4*QzxspZ#keEaTk1BRG!M7Dt4wZKgzU;k8+@*8ZqFqpB` zJ!%)B_GCoxEtjbI91g+Q=3wS698cSopPlYnNU=%_7N6_r?VR;FplKbGG}AgcfSs%< z8B|Gt*-+W@AxAVe=`d@Z)S!5a@ZI%fMp&T@jz?<^BVeMIglU;y$pOs@&qaFH znH+Tj=kj5E9%E(y={ZQYcvK z>ewwC2VON-Sym+jMOC0HqR;WSR!3~-2(yV4y1cZv;V-rpD$E6QFBe~ilpWOf{Cz>L zzl3g%y_^|9q<8Loty=w4j`Q>QNXk~2D9`=VI>iaRsVXMCP~47tYRqJi4gDn#;5KM* zo9=+{yc=H=urfvTsyg+`-@i8(VU#$jiTs8Wj7VvmbFpCZxP7VgGD9I+;;gX2R z*D|nRJG1Lpo1Mheqp9C_cV;Cm*iV5M4!`$hDqd6Jp{13Td1k*)&_-f5f)9pDHG5Pn zz<`)O{vWh&?|jZMw!c=qYlnF25u`hBgsmLBb1RJMrKjJZQ1OCizpXc{D$c)Aj zm$a}0I=cnhKw?$+>XXlhQqES;Q_a7MH_ej|(p^dHTk?n3k4e_=UF0`@4Ukg-O(KGvIzFktQ&b&kd@M1L>+=zwzBzGx8MK#_0@4V_ zug>f{!@1V+$F%k-iz3thtcn00J3+)`TvL?2`#rb+n*~|kwJURn6a%s$CzQX-H$0&J zEEjf56&^(%Se;3NH0astCx+5r-f4QDBOfh*w8ar zDU=!Cp#PQh3q)H)#2q6UmoI)auw6(h&N&G^}X0fkp-Sx7i)I!gIr1|2_$u1%x zo=tY2sWi>b@P#sSB32)`KTgjLlXBkj7ANVY84|fL zje{0vqMLUvt3mW!-i{w8T-Za~fPC`q;(W2(PHl!JYDIv?a?0Cjv)WfWz;SMW#NW;9 zTG@FBx6!y3`ya%4#>P%TJSylm$!hFT70_BnKmB|dD`W47(BI21i2Y{Fu%x7LvndQG zP&>PD^XZINCkraGox!*IP<1rYN_s$4jCpAsUYz~Ny7#d<3iyZFiob~Ny>GdQw(CH*Y#2r1U( zm2--4nH|x+Lsjtl8ckzgYY1q-hH1kk+S5flwH;D@~pG`4wPvOgWwIiqQ8-+N(tGHC^ zI7{TupH6(KVyOfL3VN_c1bL9#!22#_SJJPa;ms0PgPFX_0PAB#y6$R?0gWy5H71F` zxf6-rE5EyAI=ayNz7NRFFhwCk2~v^h15+WwFZvKTE5GZM8#*R?}| zB$s!EszyYk!>j#hYlhti(PGkfD6plUh^MC|ce3M*h0OXuVRMG|S7ua5Ur5JeNvDq^ zCwD6pl>hp}j$55ZZHSFJ-MFx#xEPo#Dzo%^oKYxBIb8J^1;ceH3d-2rUUy#+RMYo3 z?BZq~ps~I+)@@vx_pYVaq8RK{XvvI1VW6_j1+u$bZ-KqU(vg*UXn#MG4{=rZHrLA{ zFey9>U9H=c#uA4T@h)jf1V?Q$DWKvoayft9e%L(Ie*~?uUi*${<69 zqK|0S@RF4LOT~&zpG#PkKfp^!bT?Nn1!{dXXY@ioR2t?Z+11xjlad%JOwgJ94~n}+ zOGDo*;D!9qwY`8|F%*f`7-M^e#RjJ69K9<|^ycXl9ZoY?nFEVrUoqBmVs1>$-Kny> zDr7_>e>8XvqIVf4HU+#e8%vJ*85%RQVa;cROkY7x$rAaUHkjUJtxM~F@*nxYXII(6 zXZhmnVEk2q8mWWkGOSwrdzDw$0 zCjzzJP=F!udwToUh`h(5p?Lqa8N(j<47Q_v-p=$R<0%u>Oh#C%_!d_*h(u5}E>5s0 zr)F4*mP(x%8Nm2a)6Jn2e?c?d{+9Fe2?4rI7n0J<-QgfWQ@a< zp5?@=0VX0c&&_+Y?;w=SsJ5-t#g(-*j-Y4K#Y*Vm*rwACCX9*Z8fb6*Xg*LA?0Zte zg36QeF{b!aBxJH>2+l#2Q)1e5pVq&u1k4GR77~tz47AA{SdS-75kGAgEGE#rwFYk(;9Ln|c}ui6txXW@%f*CNL-G@e>tj}>5aU@>CcWlw^?M_t@-hp} z)YO%78biB0m3(JSsG_PMYW;6<4tkuE$YgpYb2wPvl6jq)4-tO!Nh9Ycn_`D6 zBT^&JJcaaj#5ppo4C2T5OqTG3d1a83Hp?OkmEBl*Ps5eg?IdVQ5hcUBD`CA#mM0p{ z`cS+HNXtZ^;&o8^5R~-o9;t)HIkSE=9bg_bifk)OZ@!#*p~C>WI3cXS|7xW)ZSDqg zw38r|*1;1@P#1>u5%6fF((L0k>9AdqeL>Tm*Plq;tdM}^kGsuoFv|3i?TCnz%;z^T zL>_Os>A>wtVYx0&yQN)F{>N7#Q*cE+oOHg& zNL>j8GP)heOYFxcr5@dpxxM*qpbzxklN`M2i%=^Cs#`9{;w2DZt^zcvd$WppGCEu? z;Vqt@`EP1Xd5%Q1gpjEoH$J_ z&|AJ?UBq5+jC64aRlL0&V0od2Vrztrh`W8z(_7jFo zJ8WXQR6;M2pJz?7@zJmWvO(p1r(XPh#*QEW-}0dWr{cK~BjY=5=AlXhWS^^K{WtPJrNifk&|6U38AY+{sA6G*??&Z70< zu;AH7^TsVgIQ#$`1?S(Oaugak#6?W*+D@g-HEWc z5Irsr9Lm$!^Ju0oFK50h_1!lVgyH2hmDU3!9Fl~_=pYl{TE;%>xf5B_(`AN^`!r;L zOMN7XfmkK2^>6>Ox2=F`S2&F|OnivOT~jqnM^1`e3$Yp9Up4WweJ&tAtJE zXJeFmc!T;=$uj_wmp@aK>o&K(Mo;~5i?sXR~&w523x56>8H+gyMC7I`FgzB zGsfe8Ov`tzy56ch_1k+-Tuf31w+qRCzaOZ$1s3J zbaPqu>3y+Txr^Tn;Wu5!@s2VNyZ>1)xG{-!3xV{}*EtjBtsWz7{Ps(B-aY^~ z*CkU*2(Nz%J6CioT~kGR`-j{-4;lODTAN#J=jQdQemLV&8>4TPmE8YrmMGs^!Xqw6 zw6c~RS|zj6bPs6A>MN`5n1mLI2(=a6tE9Nk9hgY4ben5du=Bz$J`!XJmD+?3iNMd* z_BpAjt=Zyqi#%m2b&99&=QP9T=`ix(0&D}voE8~4S1hwt_z3zc_pbZal?H2~qEP7n z_=n?i+r_=N-gDyjzgW-L3FYyaO}B{x_p?S`@Yil`X28x)ar#q3tkDHzhkyQpzu9ABMfZtm-9f_TFmKUTcOi-WDjuaE-Hbfu zzn@{XX@gxl2I~ULkI8=@d@2c5u1xINi$H9&E zGi*DUXIE-%)EIwq5dVGnNuNL4%Bh)NWY8xf|HA% z>n^kFZH7<$Dz|jU8F;%h4pW6zlAC;bE@2*$JNNxrCWHIx zqam-BHKI(lihPy{Gwh!tr@Y}?Yc0%!j>D!1a03t(Ps-FWTQ8@Xq3;&m187W-*l(Ur zeqo>D3jbB6JDQ2iGJ9Fn9mD$3_N0jNs(OuGvc3kX{hIB+W9<7d_)R7a;das?jb)XJ zdhkZzj7x<~faHB7^7$`o__=?Trt>Pz25b^~3%Yvzz6euM7lEMj;kbZ{x|x`I?tnImR@ z2=vuu$kCJA=lVTn3()m9E;cb~x^ZT~`0#-xz8_ynxV>1}TrQ39yr-a(kKOeG{d19= zwX^1U1?Lwn8y&r<4Q^+za{EEFDP)xv=^Fpx@=NOaAY9FGkcN*G{G=N^pO%`1JY!`aKRQy+5Md zb+k4^l$ynKI~QZbayP(cFe*PR_^G@wonfVDE#vcv>t}7vn_M7wCYH+~ec)Z|zq@de zXd7491t`}hielEp)x0JqHMl@MVW zm^7n%o>TDR-&ROoFlu0??ynh=7|lfF72f{BfpIX?s0$jC2$5T8Gjy^Tnq(u*(3m%G zMP4D(>;t?zq{mrD!}My%7A9f!L3-&iP|Z1f?b&B2SqAU$3LoF78I1&H4C9g@R27`S z)@*db$D>IvN&g3aQUHD;35kf1fy?_vMCM=Y?@Q9B+VyvT%B8O(f#0|GZ@HUNTFK7C z;xh)e{oMF(AzzCdXQ6UzQ@BvD6J~`yYceSQrF;@q#4u-86?g1B1F1Oh{3JCj)8?Fz zDY-Ty(vKZLA+VL{5B;=exnkHS%8bm>ysB?N$?mM|+D<9in=5Mtb2H%acyF#LVc>(Q9BPmWFvDfYO_N3u-@+HLc*wf z`g=bFI@;NfwWvV_!F{Z8;E{=3D`0I6O64B|@5_)6Rzsr*)6@Gu*f_c`q=Q3iz)=33(`s^fuf#qu^;*O+8@RNR6GF zm;D~13Mlsc1HHFiGBH9NJDJT1s_kQ%V=iIB?XQ`1{V z_>3r7uwTAtwT zZP#Qk@yi6pkZ8}F4}Y<1Cyu|}c>xJYMA8_~$Woe;#v~GL`khia|CU2OjF9e6wJFaG zZahXYY>p2m?~!aUxGu0?Ixn1OX%z?dzZ%*QZhD$tP@kgI%s{75Qwv9lypjS?IXdIa zBQr(1{msP8QGnZ0^Yk8_Xx2J8{rERkc$~F8w7~?nOce7|7~(Ziu~OTU;>voQ?9q;10`Aeg z8DB_j_J+)eu+5={q-NvH2Q2y^+eo5g@3spDQcpg(O99-niiMmGAcB zu7=!OEdO(mkXMHs|4 z0Kz(3IvmAVOiu*p8owb(K6F25;enS>crbVzl;0@4yadHI%DYxGsYS?eb6_u;h+e1?RQAmYOwbpEEk^HVqs2Mhuwkttx+He%6k(hc88@igb@tYyVx zio*dum)`6QZ$xfF*K8JZ@F%`2RsLO=S%36#T@CsIHQ}+ytv~Ze7utS0Si3T0Qt~0^ z60aNuB~AmciF^rPMmBC{0E`8zs3E4zP+xW?WGrh)nAFnHMh6fo7lhY`(xs2q&y?cs z@tW338V7?oGAzR=G)VoM8Fz9A=FR1pH^O82@xgef6pC!unTTs(;MOeN$c(92pA`KD?RB$}A}C$eE0YDeA|6M^sIgBt zUJHuEdCkF1Oz8AM36Ff^yHVvw3*~SVif3~&B14^;34EC=C*j@EiYq&Ou!xYdFE7G~ z$1Ce@Q-i*3jaxGuF6>6=9xK>Om+t4}G=>b9&K*Z7YmH4j!g5Ju2lko}04?0;TDsZ^ z-JkrPyKw8bbQuAF%+^$q51+@0x=_TUK7_6?Y>ru2Bc84~I@q0WeUq&fguTuRID`d< z01M6U6K+w9M9tYUkL7r#W7imV-xCp6cKSc#ol>${O)pT_@`L^j_p~j{9DS6r!7&d% zd=pVr_a|ItcCflFbp)Ic22xwkiBB-L$zmd`~VxE`iJ1ogKs)#g#`HJWN+LiZe_1Xe-a z_VG2j%(`bzpkn>Av13y*mKfF$9rzmnASEj2cGNd+04zW>&XTMV(1nbvL1*d|b$kEfTI^;?yg>Q{Q99a7!@_jY%)BtBw8Bh68JE&pGmBgxNwe@lj@|%M_^CWWG&lGD&wuqRQhnGB<;nv7hz&KUDd1EAiKX*J ztpKQ}{;It|GwQaf^7mvEnF^B$1(?1w)tca}TWE@kr16{hO#KJG|5` z#fy62i`Qe)q%r%n?R?jt_4LF2bGG@Ix>o3?W0^$%^g>A)1Qc@ai{Zuu=)QT}xFe#N zmB9z_Y2fc^hRn=IOtg0oZa3?|fTvT<*r|y8{p9-$r$^k+?81gXgpQwexX6s*`e@XM z;+E`qqq^eFLmD%?p|ex+#gY|~`lASY4>pb`M+<3?tM}Oe4Y*xM)K_`!fP$&NC6#ws z`f{`@#~J>ewfs7eV~f<<+Bz4NaP=ogKMB*e{hlavYV-S30bHjl#1(t}-}N4`jO9TG zHf-4vSaAE)Y3qhGGiI%0U(0gP&fS?MZ0({+z$*5l`pSS@LKG%(;_+0T&=B$YAQKJ% z5TF;HKk#=l?qq(yVb}lIsq9`P6QdiPph;tBG*gczcze_px=2}_Jo$rbbOgKm%bHR1 zSy{I8MV=phSCuxtIBcBsq2!f_YdTVEz}3U?;F|(F`A_4Qoy~=W^SsKc0T5&-wd@bq zv*eK~(zR^1kCo*}wfCoel%9!wLnal~^>5*h@AZ&oua=~$p0!R}sQXy3j~20Spa;Ok zOYRja{aqU~z#Ts&L7FmJXH-_p9YP1KB<5#s$OvlE5m#4++zDnQkdK?ik4wYp0y4u^ zmc)aqx8;tEC+?xIX^k2~SdL6xZ+GR-I`&66ab7rVCG%fg7R<`|QN}y5`+2_7=;gZl z_5+8VStWh%n|!3(KtnZ`mH2;Hf*n>pRuVC55Mkv7v}kNNJwUS=?9t)yq(_!Cuut z;2kLi&tm>eS}NEV%dyZjlSth$u=R%}FVictS-Vi8J%us( z(q_`W&1Xeyc~Rx`Wg@0&r*!CXe-h~&HgvB?*W`1zL<<-zp*+`vp!J|@7M$-8b59si zr-DktfgT|*1iwH(6GaNOW{C==j56yNCP8|(mP8*I0uyQ`el4*63O98{4aSYB2r3at z(!C15kqXE_CJ$@@n4YCVa)&HF}KIQ4x+W7c`#zN(o>C8rKCcY;#ow_abP)~b}$XCXu*inCrMYC^e zDy4lZEPm|TDv&526T%8`IwUKm0_bMDJ9|hAjVWpNoV%Ye>AZ@>K8g<}YkZ3`%{V5^ zL+$_2!A_@K6BpDET{I zZoiTrnPU)C!cI~F2yyo;cU0(CZ9AW=GM24;$SUsv5*8_;6ON(P&ONE0 zo}YvMTS_35gpM^pdMSI?mXF$&3t7ZwPqpEfiGtgz-M`x9KDxzB+Oa4f^9Cv}K#(U` zWBmutD6-OTe)azNDoY}Y9@YzHl?VQA!O$~p!3=)he&+TuaIsh;;o*O}D^Mc}nh@0s zWSz8^+68K?o}*dIiAA2;En~oHb1|<5Pbam7?LR4zIj21D2oet)Db-A;FWg>V>os=H zH;_nqTXB^8^MJhno5y;d*sQdyS;dl~nTg8347E z{U{K~ZU5%4T~45js^FhKhW~Ybs3@HbX{|xQOXA+VFFLiRsYGt)CHTXU4J049TierJ z=t-#yktf6{w#Nk0rQig1WphmIvm}a>y?XVMF1<)C(GT1tz);Aoam2CfO||{MI^20o ze|SFR+FZ0J6Rj*rxil7RT3U6fn+D1O(O^((z^;tZ^#+mS2>ij~;|e5e`Qd)gd6lg; z5~BnI7?Rs|Se{FGrB&@ne=_N0ebLk+`tK!l<@umEp9We=SSjcD@Ca4IuH4ooo3_#X>ulra~%T$M%R> zErQpaZrI6|YZQ8u8@r$8^79Uf3nppDL3<$B73FGdAqt99N<*1^?441|ywy`VE8|xf?6doGhZ@H!^X2XU~Ufy^OEl5|Cl<8ZK%|*`e96gopu@oXot8y>ft`+(Ko4|DGiqZ=^O_o+ZrdN5Gbi|` z-K)?5$K9pcD0mi$d=^#pqOAKiS*4|24d!@C!q$9d(OVOq`ywd&v8LAleDb^|5lCEn z;^b_JO^YUmTMu}@{Rl`Z{nP!i$v6t7=|SQy%ENOL)}W&tT2yMvOc*P6)7h zKjRg^my_KL#GS`OuW?gwR( zH$l+d?9fQkRoz&S(vl)_o19?Q>RTN6W|^UpgO>9y{6u5RkK&$_un4Q+8TgW4{Io;j zr9m>kma`E$0`Uq`Q+2iCsCy9GcSkSzs-bS~3i&VH#MU2PLiE&?ckH!70mE0EQ^Es7tlkr`Bp*kyjt*Ut)wc`xJzhyH#>E0; zIL5)rPBnI$>WAi;&8E+;K*E1CJ^-Sx@Adik%N3K|=Zyl3!xzw9e(25~U@{$JlH zFgGsBT-6EDCbMYLQxmP;S>@fR_wqLm8{>mPcd5bl7l#rC=hqwM)YDe(nK;L<7h1VM zZSfo(=UYDVlC7Rfb{?M+jz1!D4H7YVjXg>I1P#A4rB(t8+sRskyOc$u*h_bD8>vZP z!JjB?ND;&M*z_6UIL$PMB#zP_a2g^OUfxQ(K-t(xq{!}GW}N1jGltY<#4`Nnb!xq+ zD^K;Y{3F!C3!%5kb_YyEfglA(!NB)lP`p@qhIM#haC$-=x77zAeqgM!@x3{ zPWQPl0e_s2uwf8EbV$qL(i=nB%sZ}c)Ui7+lh@1q;Q^P2FXzfgH(ohSXitd9CE>q2 z5aknB%_oraY2z_=Kff!A7%MchTB?FKlqLe+Z%ILQ3226UU|UEX$JwATp8;<$8t_J%Hjvl)V zmc0t8e3N#B!d(q1)zn=akCEIZeEGPVpY+vJKt@x`PZ*P65vJ{m9X(BN)B6nNbSnG( zyhwZb?B{`02@gX{jazy)KOTH3pFKXDS|dM;RA)_m|70iVyO^J%hUQUg%b8C&?wR?U z&sMt>r=0G~THr^(~B*aRs@yfR99bFwz%L6YduvHlC zQ`lJ3xpm!7^pj2Ei&=y8{kElBU?e~mB=E3Pl2KiLH9yOrO$pRc`?t#f90`j99vP(D5+uJFVom8%7`6oeCY1EsG3 z|K;`2datYMVUMPuq1*6Vk|B6&8dps2+>1k-1f0oi=oom+^jb}IirxEj|czIAZaiay=w(?!7)PSOYYPaVceE=o(9&B zsO{GP@s@pIIio^FpFStB<0z3Z(~bgmvJ-6U|3SyloUiNu&ee3&SI}UI$IqmiN*P}% z2%v0UI@D{{1@wxHJN>>LeR^r^zuZ>J0dU#@sVPRJyW7P_c4a32&pYSY3P&;;fMF*p zaQ$06j+J(2j>@g2-ym@|CsWSp88a4quDRF;Q0Ufdj%~HcWG~VZ1Pgg#YxuCgZ!JBY zf&znyjdkJX1>vVJV; zHG-C#hgD03y&0}@sB^bK2CgA-)+Sd``^J>WxpkeIFe+e}3O>(j7tMtwm;&0=&h3;W z*Eo}C8VmRGM!d98jcG-N!%`nxpk~L)mwYIBcKKY6Yk&!p<;3(dwf%6|lwl7Fr?IJ< zPq@hyuo9w|Gu|e+1&__2B+9@$<Aetv`ZiUrylngV2&GR;RetWfrK! z*pZWsncUo)MD}3lc$4@2Lul(yE-FN-pJeSvAah8}sbfT$lHY!j?op&}v2taqfGd zpau&#!8jMU>f8VGXLXQq@RQ~YN$c`ET408K6_W6E#%3<3Yq?zaa=?`0CNw{~=obCC zQU>nrm~_{(RL85q*_VXv@RhDXz=;xz>~84Lxs)n(uK~_Dn|?S^IJW9%f79k6w+N%t zd+gOa31z5bjE*L8e)IL>h3fJtP5bs>UC=e6Iw@8v(j z2@0>1F{x)2Rq&tf_tVBR1yA8Z3RHmft3W+QMG-~%ZO!Gs#raFO9V{=S=X3>>D>UzL zSnsRz4DNh@3xffU9s|HanYGGYFrt3AbNBWR{^}7=3gbN~?L!>r_&L*^$FhP$r)*yL z)mSBnji{t9z<~=UG4kUm+k?W07GIgx6hJKI2Zg)YXL@?gwFo^5~U;WGe(Z&dpSjH z3G)u=&~esAU_{;4<(uRy44*SE=12Jh7vSw*2i3VouKmf_TM^X?#reDhp@CmTPuR%(x$~QCF9trL|{Fdym2|A@W7LyzK3txr})I0qNRD6#?)V_H8H*Un0)w0 zbr1)V$AwHi2<=Rjs164 zk72Z#L7u*AlQ8*}{CA`M#iDm6Y&u^?y?JW1zL5y!Oqh>HpuQI| zhcDUCFZpxF+~ct{tBSr$w0(-s{b{$YfTc9)ePS!u!?Ng0!f@<#*!mKDB(t zu?DXzCQ@|y8^|DF5kTpdYmB=Ku>iASM zUW_z?*RIUmREt`=0yyo(bj0&If zK<0yr-qzV!d*vOPO+&8eBc3tWgSXuv8%{ucFVC6rg-UD0Z|e_5_x0cBnC)_lY~c9I z3NGdIL-BO)BqqCzg_CVb^bu2t&>$_v>JqjbVf|hvAbu8j08#6ec+G@pT9h|7Z$Kb= z6j~>XyXeVkp7wj9WR~jMFUo8cn*{DLA%Wg)hcKZ5ZRz_5{vow+(E@-?f48*|fD35K z!xAGnzeCgR_ET+=^C-S)e+zq=;Ee?$JaE3vg^z;?QCXtwBj1lj&H+8s?NrG{EJ_$;#R9Lc ze4p+iD7KIb6qYV}{zk4dSKQyvj6cBJonZz$GGe8^N|#b*urV`4f@$&xyrQ{ozfk4N zKf)V893-387}T$oKnjKn0V%!1Wob%u1{wkQ*_EHI#eOw6a1NMStMz|Ay0=DSGxwXB0Ir3s*xk>oJP^O zw7ry`v<9+(Z=1J?rmP&2lzkp~)W_JssSKtv5mkX^E1e|N@|j3y_MkKLz+aLzkgCE& z*FvYqrbVT}%mG6HZ5jB(?^++>7%!|PUoi+wjj?ZuBrTCuOPU^1in@Hbqhua8`uEpW zE}je2;HoTQvaylmApF#petwcch^GPkMSQTcqe^kxT{iXRh}g@oSw#h7U?4@=FqG9z zS!^?=g!4g}^o=ZzA!T=i?j#Vt&6H<3J??>()C5N%!`8n7XD9KP6Eu70doaH1w0X6p zs7+r|!g=1yLn3*MM1}>Y-G@g`H`;z%q8M4{8UHQ;$l`dXW2)|wnIa;{O6#VW=9{a< zg1`|x-Fe^Dw^MLiFlft2mB-*r@@o|m=CB#W4*9oTPsz~!D-*Wjte@Dg65R(fA)h<} z>`lYgo$nRHmgokhyM*OjD0fTe=8XuRf(7fXlSUxC)cl-w1W+Jqy2e)m#y1<+vkRq|Z!2Z&PcqjP0P;2P0IxO!60KG66 zWX|S!jDCpPbRm-DQ%VYSfw|PVu>poWjVI6{U4N>UBn`m7yPcjZ&A#eR0QXHB(<`J8e*2CJay zOb-`B&l=7&J{X=Df+ZFK-T=jqcV^qE{KkTav3>Cg=1s(VYU3Hmnqzv+Nw_SdaJ%xD z%$kN~{Jug)v7dUx=4X39f&a(%N{l9z-2Zo_aGZ=bG*`tRt=`sYY~ZFoC7L-FdP8~J z+s*4(PQe$UQ3l3iJ}ow*!{NZ2%$Bbx>IaYAGE3A6L8MkGgWgIq=wT7@uAiwbNiyfS z7P3L-LLCi^aXgy|!qHd&hOjafHhs8%H|QMrtJyyE1v5@U%ZpmH=Z`k z{_1nG zp&%R-BU|$i$L8i!E*puVCm5cDN@nplcnG)??+O1WsfssBp~RF}%bdO`ibQsU*d&j5 zn?6ZQp?A`}kDUs?NSH@q4RqBBr5V{>xd4}+_4!l9aIOWA(S0B3c!^mTXq%c*SHeC! z7u9;H19o>S`9-EhK&p}Guc{9-HLb}3a*i*XAo*b(r(U_d6po5rnX@+z)Q@M58s^Aw z!FoR_i2XwgX}^3SafZ?F5T|wbZ4IQep!oLw%x6L5;CDeQtqg*}q-E<;A6o0&e`h7r z{-GD$;q>v)ib55Q4Z&NJ8ANDsiCSZQ9%zjC8Wt;8|mau2Iv6h_-cyHM@2l2!U zDp(y7q>%tcRZA2(zi2P9%Fuo-{mS*^+||n`%UooxIt7hr5aCZCbM2GAC~Ov$ zHx5P4MSlTNJ2@sZa{dGPBmf@xF+LpYxI!ENwrncxuofbx80)G2y(->~O1b&#m1}_~tp6F4XA$LPP(gSZ z?OpM~I@ae;2^-XOQ{j(?ob}Ev|A;f?j4^JH{jn6qzBd3S+2yhd1szUAU4dF80Vw(LS;BFab)uA}hsLyWo)rtD6DP@WAGAIV$FB zJwI%O1qlSSKWe&1%N@;|>LJFowCU#$fA$jt1MLC$h`p%Wr4s2w3fGRf&CgyGM-Q3E zrvtXSPSBzeZ{|VDg~7Pxx?j)l$tgY(@{ev7o<o?axX^swMH3IhN&3md#!#(uZd? zXP}b(1SpqfTHTND1&l3B8$dR;CdC7@I53^ix1!ssQ2V8gJF?7@@d)vn;i0w0RA=!d zQ^@0S>kR%wWtTtn+)n0-URC>_j(xBat|{(ju)%M)!ONm#Re~P>a6$wg0|AOB!&>eO zOoqU^a_!b9WJ$c@lqTfX?w?kQeYfS>mH4yLp0 zH^+ZhTvu)MZS~b@a<|yHe%Uk3Fj*!dlKvklf~V))lcMJY#I94HWSx*qQ~EVsBbTgU zOC#q9AR_2!iJoynWxtq4BB0Q!SFRH$PjvV=T|Q()lXYHKz`fURUBDtRWWyUfHVY4P zmGP4vEcX=Bp-cnB6Q7Vj57ykaPh5WOTmAGg+eDy$)TkAX8;)rZ+AeZVN*^ziuX>B+ z$cqO?*GD)x7fNKFk^Dd75{(Y2u0%xOq76{(tA+Eb_ji*Q{z@^N zy3zRR$$WB|3`U|S@~yP?fBj#hrg*-R+frMnV1bt<6m)9cBIysfkM0b4ezvCur6E&- z^xkHFFg#00i@}XZ!S1JqmtD^ZY1X~fo~*lfDUeo)v{ScRINd4?qP0hKvqUS<0jkI9 zGsegi2$2ij-pMZX8kuICBH-P5Nn%MsV21hU;rq78)}u87@bWU*FRiOjrl;zFZP-6! z@KX{^DbMz(g;^=z;S+pY4^9nmkWnK41c18}kwhFsQffzxzIfH3mhyLn|2E)wvo9G< z>~R*Y7u|6QijGcfeGh3l*4bw~geeOYGy$T`09u!q{Ql!*eQkFGV90A5uCnDmO=_+h@n(9 zx^_b@{_4vIF3Z7)8i=L0Vxigre_HU@uN{4Q_y91!@qm>G`+b;yn9k&43HPjKU?A|2 zrW8J@>)$lLC<&dM`7iUD1heM2Fs%I*tetRlFD)?k)e9W<_qU5V!DIL&-+ANX(HO&m z?_7zX@@@2Zo~YEI#;?v5GKTFLhJ!gU9C5M6l!v9AEJ_`jgKi83d{q50M6SBHYz;%J zO-khqASGYhro;uqSmu;)Ec?-FUmnf%ev6oxXL`iAD%=PY@cWEcDG1xLt6o#`(8hrV zm&7>8ioisvm!#n9q~YjxcQWdYvOmjazKAhJgLNwY;<75teG3+QvgNn6#5cz}1mK~{ zGPHe)rTA+S--dC70uHSk%Md2+6b*-<+9`-A)h_6|ft5_8V{x$Tj}2;JL=N$m>CPel znlAmY2%hO7!Lm_qZ11Ys%A_6j1!lD1?@%|BKzIU|<*4NdG{1r49ppL7rW5Q#HM3#L zCUIzn0Lk}<^|Py7?c4Xd^8-6%YSl-@$*OW$Y6O;x=ST~-!GpA4Y$k8>4Ihq-oW-ahXv^T>&s_6hKe5=qLZkVi0yDy&Da z{{RKp%QeRs=A4{V+Pw6t3XHBvudLfG2w#2ENn}(o`qZ(D8sJc|=$~h34y@L29b+nE z4LVaY^Z>Eu?BzT8xDqb7X0DSDf@n>n6UvXZqfF1!dt%7~<;IY~N{COY+Ro}!_BW&= zFxy-(ZCNv6=-k_wE^c@zt0FmQ1IP+@8zCg)M0lBR+pjIF^KG~2`h~`jeNpejQgY{r zeP~5uduhKrJSnuYh~2KrwZ)Q}dE&4ZfvOs#h{~u7I{#FK#sUP%-_crU@QN>RSU z*$6XmO}+W4xl+=r`&<$K%WxS&o!Jw+qTgeTzO}{&OtgcGwC7F+bp;-os)#2g2a_;4 z5F~!kIbWd9nISYXg+$5?b0WMU?-5kWxh30G(s+4XR@!@o5T+59FP^Yljw(_YZ%6+# zm1iq2Sb>Mbqj7wx%9*Q%F@YMtKWJvKMJN4<2*`!JHN)+f%&-gKGcytQJS}JLZ|DT_ zS|3esj%@cOx3?G8TuJQRC*%dlU1Am*eGLil?PU3D+U;+R=T?_0E}Vi^iHtPoBdwnz zSE72JdvLXofyU!CuiMOJX+#sDUSWWh@gzBDiG1?C&DG^^h%KAPe2?-e`8)GCPKQbT zL2k_il435c!%=lZEt!=}uXfLe+znZy zMF4A`a`!m$fJMGQ=BoODcKf@Ff9kWm_b1sP*J}4%kh@B9QPh^X^J~V!|(|7=UJE@=}TOuj>&v@utt~U#Lt-J}qKEujBLDB#wkLWVeqkNqc<*zt2EI z`hQB+^zUZm1-?8#)}Crw2d`J}3;5~@rP^@nQxssrWzQQ+1|hOcLCJvur2UgGIK?jn z+7U85e?G@9Ni~IgKvM9)@cJg`EM8*X5k%VaA_{++s?i7sKU2vi94UrQxe&(TjNxM~ z=>gI^rH<@av~h`&priqCW```{zO)zjTLz7nBysc5-7l)3xWK3sb8GnH(ps+d&RPE z^75?mWJs^lL85yk?z4&j_-P1R;D*D)}v>IjrQ~b{tVobnc?AGNo>Ab!W zq0TPlELg!obOTt5nt;f-G-R?2lGOTpy1IDep{@$UR~Q(N5TQEh z69OFZNPbHES+8Y;PxgFcJ|z}+>eFBG-_tXHKglA5RBX0~u8vkX9mQ4t9jE;LQ24C~ zVNR8M+{(x3N7t0`bdm=c#NVYp0iT*6!<#=_0l`clAoEj_(eJ4{F&{=aiaE@b$zXpq z|G3Wgf6yC~%IjAugKpzCl3wZt-&i(oz~slwJ}#bymo~wG2zin_1h6m3qfR`M)uWcpK_aY)y#OB zql|d^4$}ES$hAWDkQJl$ZF=1u!~UJAo=B>WrQ%M+u;00pPIU}NN#ucD%?hI9!A(D? zWpO6~${V(c|6v_17;qoQb>33!RS%mYuYT8eAnw^cC>_R2FpAN*5VHm&ohn zvt*`L#S)Us|DbA}xGC*RmxF?T2Fp>q!|8h53nu5`BO4#+KD(wANFX?!82Z zye&Y#+CTr|$ytG*in)?s>kV(lp05qbw|W>NeFW3~A;ODVTOKkR%0x~F3bbek86WT! zNUi8%ER{GrHyFrbEw?9b*>RdBBkZRh{uW-zxadFL-$faI3ipV`aX+hh#p=L4Xuh=j-14im zzaQ*@*TMYV3(gz!0QV<&Q@eDY_B3)YXYxCT#`IN6sN9Ym?+bG#J<2&gm@h@4MqpX% z@t>=?PbYR9f>r-6O(n?H?tnKnH2xJE&SvYMDX*ZPRtn?u5_Zrp#xqdTgj#b^tKF-@ z-SS`*{b;f-`ptq8FTp0~pCXnUF8;&JI(u8@{fB>Qjkvbv`C#__TXOq!q~JbH#ncX8 z@A=qyFd!Po4x3B9)mS-u(5%i(&3Z3TdR-aUK?1}9UP8fTyo3DKEQx?ch!YpVA3_o2mTf+7kAiM4pC|3~5c~ z?!g$NRagCTTt+&5wSAqACauYkRQDBmipu#Zit-C?ZPYTqn47q+2eOiI0kTHtq0{L!0bfF25D@S8(Xy!vWw#e5d zjSs@Mxgs8ijI&7kuYwmnP*%e~px`N9M!7#PobeEN4TXbdoO$v&)48*c%s%e{DN{^Z ziyY@tcYa7dclc)i#c49X5l2^i*z)SK6>=!09oMImQ^X3faN>1^MkD+MPmBKOgoq5i zX6pkY7ee@z78|3xHv9B*A=hvt1}4$(S`*V&F~)RBBp-xd%ie|GGBY$*;692y(FMhV zHe1i=3k)F%-7XiSYuYylBtTy73klELYO$StTb3&P7LCkb)IdbQM@_C_JBnY|CNals zS>Hnv$xP;JGg)0-{WEno+hv9$CokMos$z1v}FBtjy;T{fQi(;w`h+Qg6EHE-ttO^I|U##M*N(iqX=aqHV7TX!` zf;lDT;e2|*LdL^M$rmCwUPGt0KR?nm zLkyP$8rZiErxsT*E5E%{RD_VBt}g$k&Xr2h&RgHh{<{w5RPWnw$jq+(_|6Z*bkAGg zLiUgjz~;Z#`;h=dhszc;rd?i3rlPSZ6i~a`rrH(+bMs_&iSAp~H2Tfn%Ld9{45zKr zGJ`ISOaJmscGT6QJ`+$j?mmFHnay;C!pa~_M)ogtkjRCS3lzwhD~41~&gDNwXQidu z|3^y4hn(+`6{SDl$W0~u4|+(bKWAC~5{TpUh|Ez>?Vbu3eE(-QEF@~H$>3to}-Yk${r3*Fiw-%Rd$v7^}M0& zrj*a}^(Bq9^bnzeBG?(pelj%B0|R81uws0WF!M@ia_1PswNXK)lY1&^(0c>zwp4sL zuOhC~50EITBSq8XSpJ+)tw?4)L%JAMmrrEil94<-OME$|s3lT!S^D5}w?ZTh1^ome z?YAt|=3ISz%?ir$NN2f!bc(J5Kj?Uc-HjB;cgiB>v{#Oq<9%oheg<#E=#O(HQ#3OF z8aQ6!iCv2+S|;}a!s7{h2*zb*c4yM@iU4^~7jqD00Exx1I;1TBKu>4RUJo5%!S=90 z)^-M%{;J1+)Ys#0^PG5G5CMjRdpeg`@)htw6qnr9Hx3&_O+TOk>G=5><&ea+FZ{ji zj#K!Z0ik+DE;^2f@XYA6=h0~-TUe#qE$b14)&HRBB6ek3jH_j~e)7m8x)ix+F~y%1 zI5I@522T>Uk%ce`_wjd~1C=(-5930toFHiAZaPF|naL@>h?bT^ z)L_q?L%(-ooc}M}jo&&`MBZ9`V4&>EpQNAlYF`R>>$Fcm4lyc%0crUS-?tJfE2JK3 z;{I_V2-sDEjoGM-uCH`oen029M$!-WTe)g<>HruW$jZ(p7os60=G35iDdvS0V)48x zq`W$azo5*M>ka=Y&PpT3L`lJTEwrl(bXUv!qr5NM-KmkkRLtvD~b3!%J16WhCmn%<{17gluqUjrAIA6?j*BxmtxsAyE^ zzHJzZ#a7&*&uZ)pL@l;W)c3RNlQTr@S2h-Pf6l~}i1V_1k%Im7eKMYWfp*Ld@Q~dPz6h#pQf2sZxI5zvFS|Cv`)}x2&s<;WJIqS%B^^v>J z$S18Dy*{^C2QvD`3m4buS2=0FC>CJCqW3}siA=|uUtaXjbP_ZY%Iau-rBSWEy-GTb zFpi2bClyC33SG4EXW#M)zWJx6YQRnseu3~q2m}bC@h1sxq6F3(l=Y0+@nHjGZ&Dzu zNH?(Um9;oL4O`2}sb$usoxo%^e{09bDH;f7f-Sbh1ar3Av~r=6{=9Uhhdb8|UO_^E zK_q@Cv0;vY4At*arE2^wdhU82(wDDGnl|F$YoPwK@^oGnqlBG@4UP1Iyj}L6rB1O( zwgx<*v&`{Hj?5EvxqAhEB3BO|E)M-h9Ag@4C zK&Zw3ViWCiizOrzPPH(Sa-6YF<)|7c>`{g3SxC1X{^E(h+V=aVzea zSI7Wz4%L4HfM5Abn@%g!06O!1;@3M-UN5ymPCxeLt2a97DtOp<3^YAitXMC!WZ*Yl zBsw|5Gxc;!-g|eR9mM^O*i9VY&YzrHagR5&qV--5Mce-Cd3WpxJI8MbgzF_{G{TBl z?ORP>A#Vq`_FR9GR@2pXutxhiJbv^hkmTbrFc zL&irif={Ma{`Y4Xm|O6gy}~#a6c%c8pGn!zoB5-y`cT@^cL&vW+H{I|y3imSaLp;* zG8(bSzG;CwbGgjpR+c(ghtG`e5*AzJf8klHMvrG517vstfU=FU!u=^Sx%{A_HuBxf zEM{XJTxk2v(_OUff;%F{?9T-S)il@1!ktK(?&6pY-X^fh?!;H6c`$(yXJS_LM+8b+ ztH|QVQ(3mg$!C_`?3FM9-4n{X$E#8oOCi;rOP0edBVj^c+)!fI1*4=Gbb-7RWGs7? zRL~&Fe;R0>F#gyLmjXj1-(uX8Vy^*Ie-W#Kuqp+AI>am>KfvVRXPs=}Py7WJGQzh= zsF#!)H2GHw+y#smTvWhp*ERn{&)d&Nt$$al2tQDKQ1$12!3``Fcv0rw5>|BoQ<4;9 zIEPgiqItS;%(r%_UoVA>cqGX5^X_UKc$L&*VF1)G_UATV@9)@?7W4s@<6NWnHF zm^G4LN9V$Z4Jpe1+5Q?1@;MiS=E~e=MFe@K$!F$6E4na0em)DIK93YtQ@j9bD)A(h z#yuah$|yh?QJmW$E_GO#D)dx$NU>kHz-us$nu-ttoedO9;mEI#jiyOI7bB%3(BD88 z{9zcO5CkCXzyF1^txTq=O#RX?j9p^iG3&4=tr58 zU@wxIu?)1p-uO%8bOb!QfXTsuR}|Uoc-*>)Qrb<&mZ+p-u1^_hU>1@-=k zraTVwL6&z$0>%nl*=j%k3!6W?X%{oA?h;`eS+`v{L@Z$dr-al5In;|pzM|3)5&naQ zsn}JLwkf3TBr%v4k7y~cu|KH0i6QMVi-P^Od$*wv%jm%TYn_wnnyrvO%R31BZPt<% zQ5F?dReiuHp|)n|m#qpF^|giwgiF5gW)%;=H?`)^W!))GJ~fUOHvH45 zjY}&bRlFjycEL466(;abz}VrzFf*6xR3iNrHO))(5XUYZ;!<5a|75mG(FW4&a2-tt`*CH~0zF#_3haZkSH+d5OjKgY}`pxy$OFr1O zOJ4HyTpQurawI>Eq$!N99{Q~TCJBTpn&@46@kwR_{!zHBaJBx81WB6oDHlS=4yV8e zpZoQ_Y+|-!z9&Z?c1FGOm?KLz3!J);h`&0hC^C}AKUwIRkvv!(Lem6g#vI?xVo_!z z{(ynLD>T90vtEjqntImPP)``Bhnc~UtC3Y}t&&|AI zldu&JcoE=3PHb&?pYnRMT9?fqp$fI?w9jwy-|uC7uw~!l~&og%m#7zDqhdHC}JG7)pyfR!1>7k^lQk#D|kbR z^QjYsa{Kvk`(!=?0#sTK%CRDf|5`T)eNa$V8)IZ}&;D#J=lnz4=(hz1*Py2M28SwQ zYmpxbh*AGNne*_=idbXR(|yX%BIa4;F2N6lZ6_5%L#*Cu_=`8)K5@{%+Y0^ziuHy) zJPq-Eg4P`C{IUY8@J=@X;%RgK*T=Wx&zCj92;s^3?-zrOh`P%WV@y}C_HsMC(eck_ z{C+KB>bs7sy^Bgxz^5g$tmCs%%2&E1ruO&ke{IfFQwb^NBUr1iKq5!&(oZRt{87@1 zYRP)DHU}(!Vs#dR(x?;ZP8<`}*_hN`mbH4sYv^jKb^-y(Z_DVG>mX8`77bnrzi0RA zJBXE^)Zik1Ke)1fI)+QwB>HDN1*^iW%&5inF>^j^zk>cwSS;8=gJYK0Myi=a~*Vl{!6%(W-n#6K($`TWl90qG}0lS%-# z>edj~rV#yvaMb6N)cV{(v1>ykFz%NALWnAaI^*u1;8c9zB=~*Da=5=m+<=jjkN$R= zEO?;6lBqZ(GR%pE&sF{f5);5TRy15-_(s@O!kxq>VA<-G57SX}q43H#4N+SO&#Q2**1?)d?@PcN_!yQt zZCLvNP@w6xMtC+%Wk}cLJ(Ak#(t8DKwl+An^wX;~Q6O$jLkFw!9T`lEusf4kT%z?!t^w_LB1=2~KJ48xuW?We z$`G~r&b?8=m2JP`YDv0;%ipksw=W}j|GeVq512d+*j(^5s z(78uCpg15Jt((1wW_Vsl44`R_F|~%jAwGXd4S7k*x3-&j48&rh`V-+GUHM}XOU>G2 zZ*vT#pG;Bgx!YtsYLT}yVI;%UkWP<%^5#1%WZgA}-Gf8z$sm{CYw6~HU9JpT@MUSz z@tdGn6JNSjpHeNeBk*@xdvm{kd_MfJx2|@vO?L!4olN=#%#F za!O7Q0#4HaoZTbGlwy!rM&96HS8X8h9u6LGncTh?FNk^xxg@HDY22PANo#f4F|)41E}ounYik@ zp)G?x6Fs4|4Bx~t-bcIHnnisH<>(0&vO@wBUz&OS!A_J87h`c zi>27ObD>ug|Ix|^wlREQx#SvA?2Ivrhp!_)Pup$ZgFDo}a4;&=d6|d(Ir~ee;W3-1*K)c9liU_zRz3rS%C_ zrbUidEemS{1>9;VPOZq<9D5$6z;}A6dH1xRQZ7^=I=iqT7^v-qELJgSYJLgC#^E;w z9y#zN6CIn>jNoaM6b+#2)(sIF2`op#b91a+6kj(0#W*I~)tRNw5^?1@CI(N_2nOV+ z#%^^9qV~rotl3(RR2K?39bk$T^Z39S=6pyV&B>`23m9|ao@kcSC*rSvzF1@LBvI*S z{8uRr$BKk?!cqOp0Y+UZuD5T{0M0-HoBrfz>wtb9X)(#0zsiFm-6C! z0z^R40Aq6%!x8ozh5OEr$~;*`{eLSDCvOA)3pJm&Rfjy4mBI=U9dKYGZWvgX*0}bH z&3EAJ4Aul>M?=n{GO_*iGNTfCqvEWv$B?C!N~GjzRZ#L(vqGV5Rqx@X~L@{ ztj}+l0|j%zpqSDo8IHNFeILSqXt>^63C0VOR>*~&S3wmO!^w(;vTAQ{j_8bAvGOzYz7O^*Q*U8HGxI<}MrpcaY zG;|w`O&N||gOM^CP{d@JXDfOYBU5o^3bHKG?WT3g)%2}Tpjfj~WJhy#}=UF$rEjROh3wQd&c)t_3x?gc^ znTfyQFblc~5{K8;b-%z0y~k4y6lEA*gwZRCSlu)+Hpm`fQ^G-|M7@9(kwGZBQvceL!(0GRnezco~k(5t8aK8 z3U7hZyyTk8f?ptGz*%_gkWHSiO`dp|km0v}>|ZIk-uUf1SC{DsI2u%yw}v?{-v%mM zG}$>e#{TE`UuE?TN>Q`${k~}+QuH+Oc~P@|Zn(okwV|`icf#L*K-;r4g^U=Dg;%5n z-{y#5w=VnFrI1!%SO-jkvE-ewwU&&wp5>lkl_xRNXVcI7Hx=ZhbLK_3x( zFh!Q-`p6;CRcjBoil$4qS_y3C79g2&A!fMU969>gVn~i?_$Gv{G=>7k2KmUR>VEdi zJPQRGd7@zoq#U~xU2kZe#$qYMYmbyU#tbBO?4ft3od8si`T5bqS0Z%A`2=nFh6_9f7tm}e$HRYGkUhT z)ry~PGX4VT{B&0e_KP5&AJQF38TY$BUA7E^<+3$bi7{S_;3~(C?aUBY#{$u>GKMy>skz1ER@?|-S{giJ1y;F zQig{~n~E23aAsurMI}NK-#<89y^`e;9Zl4<*<9wm%=4vUK6#ADoNPh8!+UKq)3fp$NXB6dY6wo zS!$u%W-6XV9gZ$~9{S<=rR26O%AZDV$B9x>DX-kAk`+-v;2AXC1zkA|;)KfABr#Fl(pJz295_SLUU#yUs$SuH}e{ zVX9o5?5&`W3}o5MyaP(2r`#{<+updYAkD`;NYG5ak(3TKA5vRtaJpm$V#h{*K7$qhY3oDz-BI2hRGvK~*0?nYUwqs}U=s6{NqUo9! zC6opAOKQWT2C{b>lI~PhM+$9vR)DbNh?kWu8jASzv)?iH81qOJ`Y!hOr1qGsl zBzRSYbz7Y!a{6s{PftI~MW$M*ez?Vx8VJ%@juzK)UP!XDi49hM@jgYBy#IkeMwpWs zegaX<6k7`yn>q2$*nFve4MtOVu_hCI8!WzFV?Fsc-KM&w3 zLH#rX$f)e@SNh;J241BKwI* zzG}wePP~H4l_X~e{HVI?y~A0e(hDsZB7urs=44c=KdWTvnN)FI_QCL^vzB08>D1CEoEz7$cW?IVWkMcgpGxnPK`cv>W{v zFi}4YPr8wvvG4)80AH%sR}mMZhb%+`6>g?#TWjitF6zKS)axpN+XpWgYH6#kx&ZtZ zb;jtVRm*iL8BL+>aiV(d60?9IR z;z=)a?boULD&YpdJkS(2sM@b~HoZ6KS2iVhTe`l(KZf&?T+dA=u&lUnX=|-as6K3T z?VW+Y*32w_apZ+zE(_eLvoT7%PDM6iYCAv97mQv0X`RV1JgDOa!=n4QA;NMpfx+WH|e1*fU$nwEfe6B*!Loqc{##Pk}jW=ADqMQV!1c; zgr$4WvQ3L)y|A%C`0lT38ZeNU*B2{w@R6mhudT$H;7$x*K1{=Lx%_kW9g!qaIG)|Y z=ExH{^DlbDlcARs=uBB&y)${5HqD|*7(H0#@z{WaO_ z@b*K@aBUWFb%^Abrt-P zbjE!guTyX0ZR>An6n4|ZCUO-D2Vo2zXLob@`T-mfHTnSR($M%3tX*_!*JBcTZEG~+nuwX1lpCdj&VI3>mZa!B9V>dlQ)BO_|- zr>uYtE@7ImQujFU6wC}(BlRMHQ*9&gKxHBUDq%Uv3!5K14ltKRJHAq2FUdh|pMW^u zb_^DJyKTFzxDBk;1^`#p5DU*G9+-0=ZpYTAzR36wNftZ+ShoG*KvFUWHkmg4a277M zIO!|`7V1xel*Rt#Nb3`*A~=~<1q|^N78Sv57gsTl2OJovNwmUwJ2_?6@p~5%OSmKUL9vFq7r3hu4~ZA_yw(^$@6+$!`vU4itHLzZ5B@av zLnQGAzu9L**SPE~WakLbEKIB`o3M(?DkyDTX=8v>$;_T`)NaxJxHSK_i$bR73aGnj z66ti$QAwa7onK((Q=$J|AbRKoMw0`Tf$*q3lE9G9t1WGk65l(hi(EYe`8?dd;CT5k zgx_(;oWC>4SuCdy(LiK)U$Gk}i=Q8$!M=J2D?KK-)0JbYxd2+)^%F|Ioqg8ZAug@^ z{~VonIMx6E#@}aftZX{R9!F;4kdbxly?0dh3LSe^2PJeI93#X@Ws8uEsL0-u$~Yvm zh(aZ)KEK!Z_rJ?^uIs$d`}KU?&-;GdHyxlj@bGb;?=GbGVV)e5neJtO+Kt3{qaJfZ z*((^;r0m}Nn*Q&I`=+W$c4F=T9i;v-kR{=6fFA<-SsgMR8)ba^nrFmKniPC9pUa(V zu#^T^<{aYIQvB(I7iWP$5+Z@SWb{%>HsA9Dx_WoD-8_)JWLTm{995e%O%%&;^$b|; zoSaCAL~Ngr;n;ZnTEPT^0Xz5+bN;l|kNF71XXpF#{xOK4iH^wcq2FQWe>jPAgo>wf z<^0;So9h>7{bU<^JPUq*Y3uUuPP9Ag_0DG0*VY`I_Om|nT2pOThJ|Y$(U}hGIscKN zRLqyV_Zw6&&w@--U9 zZ)lG@*01vZjCq$);R=OyR)K=qir{B0Dpv-^m+xIa|8!Hs%`|Pu#|sXahumLuVroch z52Zb>8V4W04cQ-m^u${++#2Mb{JdV6vQ2>|nMa@HFJRL-c$$IYane=%Z7@~ZYBbH5 zwd$!gb5YCesP=iqRPh&5aH-vtW)!wAtYByUvEX0Be}Dc%j0A_#@(X z-!%l#jRHKF(R5+th%IT4h|>cAguU;xh9Cd!hzyFkU%c1MxxF^naYa1-D!xP)RTop`5pKVqP<+*G8&925a-VSd)M*-H9_kWQ` z4RudDythpvH)ANe&5qvr;>A$r;j2zxC*~41i{G}0F@5DUahDDl(47Y*zP%<5IY3&L zrgRSmsKHyHg~Q66530}_IL^z->*OMBCfO~%%+;2-`ub}(>OE+xfz9`YEytxDYqsTkE;jdcH3h2e%hMY}qwdLBqko;C8P^L7z`m8xOv4WMR}Ej=-u z&ItF_trSOR)q-87FD?VRn_$n?Wic-otWsWx3hZ&Duj_B3Cd3w{8?c~%3;@!3T6&$xe0r~dwO}|p?0W13&3)tO^7HFC%@G0## zY8l_>ZtAR9W7&R*axz8=oRF7PzioH0l{1w6z_8ha9{25Qh<-z=$B7pzN)o{CHxD zVO~duFMw8L*yZ_KS`rJ4!9|3VC!X>rE3}K)vvii0uHg^^Z)R+ep}W(Sj;M>wS9fJ3 zQ;nGH43{*(lAqJf)lTs6B#XXKZ-pgak3cN8arw^{S` z+e$lQ{#T{$hDPZXd1yP=*)HZXx~A~j(d#V(B;Prx-4~y%Vq@Fvbbo%=?1_K4H<^|J z`H){5y(S`>Udh^fJCf-**~i?n*{)YWmh03UiUzBOQ>03N1BQ?H#J%TKDfqn6>j80b zQL>@p2s)$g3=%=jgYGiHIyKTh0kSD*WdqI)v_A$gTNMw(4aF70`7fsqu;R;LRYXZ|mIKzABpGz5H-Ju&Ig)@%pVvE*1$?E;kS!V(uqViAu z=Cgq}!G^?10B464FYwavSMKikkdxA;h5|KoUli#vNB0ndf%FcXV|Y+#{b8EOuFa}h zMo=MIi{-TvqEpepiQvrA4%lTUs9J#xOvWRuKmmo5)iHyM>i19Xel!(&0m3s5$VwAQ zO%~SFicF&27b}$a|G;~rxKt0o!fcx^pLHgl`!NpMLm_JRCQsHhtBl~OhI%X83o8scQ{IV=%}FR!7X=?|4$9^q0sYMIvJZgIn7}= zC%K^~DxdOJOIA_E;RYst!clQ$%)DPZv)+_2kq!PHN~VB`Uto4Wg{ojhbC8FEA>&F0 zW$1;6BUCW7g|8g#XnlA_2bj8Te3D51M3gQbGPAREfyA(96vA zpv#yUuD>4o$MLQxj?z@Ns-p-+m(j~MWy9ltASx^+*}wF1S{VV=Suf`);oQMjJ5Js+ za&Xtwod-ldFTXTj!lIPP4ckU_Icv4gcvia#lb+L5+M5RY;S~VM(+tCNd|0E!`R|fz z*)Vm{<1gkk*rWdf#4@yKvZ;pBIZxhq#&Ks=;sRiemA~xSC{ z&@qhshYvF0HVxh3BN_*#x(bRoFCU~N;d$ouCC_vZSPRzvBqzQS8xA3#BLS$*=nx)< zySf6>SQ>;H+1){ahvofpQo|;a|A}4M5R`LBK%Ix8Mj1jKHsWY1lee@X5$q^~ILAr^A`wQbY z+>>&$J6_AG?l#aU=dMqv?lMOSU&O+I=q+60F)JemSPSv3q@AZ2vi6=>i6}k{BZ(;Z zOp$_XQH-`luM`STw^;njB22s&i0Oyy;{@1qnl_sRKYMbX?YkEG=75C=;(XZms2$|d z4TuJ5*up$^Bnt>^asaRBNan#7&7n!UJHYz>df{;aB)f6u#3b_fv|?Wf3S!k2CkDXr ztD#IjM0Lq}6<3}Tbu0|eZv1R*+1Ej++^a&DPhDzahaA=oWM_`OJgvXCf=Z*Yh72Dk zvGIWO;}q@c*qlRs@qM%|CzPJybo*7%uG$#*HKUE6K{JN?IIwLFa{=ES(kmFJ3^TV% zvrPC|$x`mKdgO3ukqT($fqLIihU1WPmo)dnMayND_~mJrAf3Scve=Pxr^1bwfM>?1 z*m3a`>>@FjEc3He#*+2d0+uM4kb-A>?jS;cu_|Ct+2zJf2?LL41T_J;t_)&!Gd#?d zEts#eoiF~Uwj|cjf07_V?%2W8)o6Po4&AT$*xg)cX*x=h1e;Z4?ibhq{|57*+PJj{ z=Yrz!(Q_%dpp3`QSM1gzJCoq_T*;xJ)Sbg{^ag2^nMGCefA%4mH&c@c#~Ep|@pHTBvClQM9*wVL zgE8ZIBF1NuB}Q6x@wT>GjOO=BV){Nsk+lhrFm#df}hZRaX6gtB7?nVs0R?kVNMJ~2cg`G8ua}%`m0+bw9`*C^)TtAoojgH6f#$l zmvj6I-%%O?busAKczBcC6FP}pi7+xSV7j>?y0Oen&v-O*c5H`2MVQx&v?E4u(gqji ziDE>3!q%>mh-Ry4%|@&*sb9s`be^Qaf#;;aVS|MudL;=FHhBLCzWZuwYdr_)?{l_QxBUAnX zbD4s)Jn=Wvby)4oGRoyDOwyaEX`<*0^V^Pw`%Rs7F9_;&)l9CYoj;r9CC8(FLN>g9m2INL#ZQ1$wv`0RA*Oh+3a33zsY~=(g=}{m22F* z^thyK6?L2&RiorSu*V@6DOe!p>j=n5C3_fs1o z0dR03r_tTiIjfjDVXKw0@ZtPu-=n>x1=18US^B~%Kl=+)=PgSW7p%!07`jbTGh&Iz zQ*zsHwBaNv<1sgR%bBVs*8=mEa6rkc8mX<+wJ#C#F!>A{(Nzl_w}(V zpkO-;tgtS{LeZ594&D6C>e$0FdPNCVlkP9Zx>k!DvMM-Tl-ltO^uyQJi(w$Hds5lB zZA=|X?gClG%WQYUY^=^j%oks{5tx)ZV2oZXhWu!7m>rvQ&rrDsySJaf-eeGT^6w>x z?R>z0kXvchUz!hFJQQnRl8^aJvBFoK(ZEj%&6UCQh3ck$2HQ2M4`}1He?Z!LWz{r> z9}g*26h*)RVPPr>J^ems+lTq(J#m=OISd%EQ(a!;g}fuE`HTz7)28h?B`iX(h+y0M-V6b*>oj8Np?!0Z}|?j4YDv6zD23U4?mLP~mH9K;*on!b>% zL?uok3L*{t9Y1PNH_BTV?^3vkT_BUZlmD;0Oy++v=YSDXn)F9rJ+BHAVZ z`xjTM=QRzbkbZz>kbpSy+Dm#73wWvm4Xfy2&5Xushs)x;n0g?wcil_ zaH33{U7QhK)$`URv)6C(S977=?*AaLsykCS+F*?jk8rRP^QD#IVn7+l6E)Von}X!9 z&$a{jyU2amIAnP3V&H>J{M_WlrSG%Zce5S-?U)`JKdyjWsA2oN7iw>CY9u)d87LUa z3^@#$SV&tS}i>qCT)Vg!Xj1wOxm0>bB zoDXadA70%lS!p>PAxIuLTGGBcw1a1Sn)o!hI%0V8x!PCIT|_OJf-%K~XWeO4vNLj# zYi2hdO#;2U_N_d;%x>r%D+ugpxd{NoxiRKw7?t$$scfGQK;3ib$ znvS~ik*c-GeEG5mSNAyN@Vq5^z9r#(oIs}v^qVPNUe2Rcm%!;Pz}W#ZA-p?6!Tndd zgmu9yECN$-x`Z^_LUx8p zREisiyYe&h7ciIim@)>wT{KYh==7xpYe*t-e8zVa8`0{Z7?30GHLjC)9oEaL8XxBR zIZVMO=3K!wH%%M5*{CA%S0gXNxmr>M95S^Y`OvQ38!}YO)ZU8*lcc-do6m-r63wse zxjp?hXVz%OS3+(kCifUa0H%l(B))D8W?emN zwMbQw>wXrzR0~YdVHzk5=)vx3wIEG1mc*$D*up}HPM4Sfs`VVo}BWGG41Y#q7 z1Oy5M=^*Uk9=7THf|QQJ1lzWwffAC_S6`F(d#@OJo!K)JDl8xcuUo{3H5Z=^ZVF~$ zO#u+c6os}d@d5&C?AePh7HySZ2{x0Y+!;_d?6tv=Nh2FC_0kD$Rcy2Ly{>V-*yRmq zWL*6)hWoR`mr_`Qt^l8#NmtziUHklAp8p^wr~~VVuOn)mFlhSnbbvOrFNFUh1?zb? zr(&XS^56?nLMdo-rRHJE?4)&22Vzm>pkM(47h|?<5Am zeGb~x2NG-$RoQ0duZTZG1HHpfgR;m|7B>`1 zS8}$cSFjmXk8`>rqoC#ul&B-!!j>C5KLuXPrFlk@S^`ijz~xmft%y+f+)p~sVE9L# z>o7)&Y59;F#^IOIFLpI!Wb9g~qAe6!HZsD_)^%=k3M?0t&UW=Bn;Yn>PCqk<)bK?I z(5|0@>UkcN@8gv+7iQij+v=#<5h-@a?kid#ei&&{{>-<*eQ=U{V{+Z z=~|IAsxX#7*#59>NnuErBaTqQnWMo4%p$?rYXE)5oekhBFvawDHYDC`4j$fwTrEzQ zbWAYUUGs2sF!I_~82yeEl7iz$S!YKxC#+7sd{=ix@s(@RBBomeWbW8Gjw#+pcS~Y~ z@~RHqV!o@Rfu{3%V4Pubh{cKxUDdBZx)V~%&B-1AvyA0fYL4okWBgTdNa)YoJ*j|{ zd`aYuL$E7fg=56guzQ*@tM!VjrVW^5y-vWB^pgl2KJvz23svtoI&Xf*4?W9j3Q!+$ zf5zCB1t1xA?;A<%S#eu~|3Q!OCvqjUZo{!5#=9eb#_Fc-1+`-M)~iH9uiTMrFJp){ z72mm=Y8M|LS?OMo{Lw^w&8F7cJ_BmNGRxpUotN&B@2#cw*^hXn*G}P&MDaF|S#}r7jr5S0BP~ENAj4vAd#s zQd1}Z_R-0dPozD=ddZ`N_6Ystvb8-*>>@zQDL-;sH?&b(d)+>iL`I~fx=$g(`J<45uJ3N|tEA*xUysb!_Sz*-_&79sjq z3a-Gbrgqf!_sii!mcNXv+~xXRH8>QboZ~j+c#z@t=i@gGHxxRyIT?j6_moe zO4d zmvndC=A;wXDD;QyK8!Y>y-9E-Z~g=eNCf55WjP-@^upk~dfl`lGYt{RYLF9*XxyLm zN#4W=gjuqWh0NAU`-PNkmVpi7WqkDm+U6C|g7@?8h(VTUM)LncI;UHGIhz0WWR922 z*eS#f+y5H^JieQIE^?>uFNai6_W zb(213f&amJ#6mHYR2=6bm0v=RPO?AIC%=z4bLl+9t2f6n#3&B4#*}AP@Xls)-;vZX zMwcCA4itC_+krOk*SIc)bqH8fYfV(6N8x)omSPwlhn?u7%C zI!J&piwRT@@n>niC3(J%U)%h~4HJLH>DtI0|2Bf4J+_}!n98Jt=kYlRy(90YmAvfn zHocsq;Yiz_SV=^`mx?}9fG$~Hd5~ekvJ)zn<7DOXgJ}j9gnKZ=&|FvI@ql#j*M0*LOryptU64~dWqlI*D`UxC z9^SwTV5Y%dFw`Q23HW{C}b$P(47bKkKo ziTCG!kUtOokU>*91y>nCjMX_0m5iebZ;LpmlNk`%S~Yt;g0DjXXlUrYVZ)3 zNik?9ilzx4KCjKgRgC|+X4h6csG$LF?wz#{*IIl|R3RTfj(_l;uIOa+CW4vVQFh{j zR@e5`N7jYxDb&?B-zxRCf4Vd~2QH#@(#z;grOCtMj0Ya29XrJp8*$T863O0(D;VHo zz&>HvNb5fZ7fccV>eTl{;!zMu*AL^lgU7HjWB-E!K3-Hsve~+Eh%AUWvA~9Y0+ZPV zoZ)5Vq#MCwFv&LMBJXnpDe17_f84Os?HKgts7UV7yU6JMmMmE_2|5#lO@;vYwqgfa z`k8{eaQ)fd#Y{?*E=k~eCL|{OfEpGfSX@E(mwp=pwyE zX2O+P`1M01bIDTha`8Z?yL3mGU8hRCkhG^JTUQnM3c7DT$nTNqn2<5KlrUBvrE!ZJ zI({Iu{_xXqW=FX{&Va;y3&8KoQ0ClziqG}WmDFhQ-hbnFZ024^;<*EPR|(i4cDiM;FoK`Xv2UO1z5 zx+jC{HWTBg`!YIX)sa!)+HguVcvJNKhR}DjS;s`KLEw)`oV>RVm4s`_I0a0y(y#8k zBVzdF;N}Ah>qakSl7@puZ*G#mxIbGl9u)vPV-;VQr_A;W_-QH8+;`RIm5hAkRR;MI zs~;xext>4KC_<$0pP~sZJ@p&DHJ2epWA|YJs)8nJqeo4r_ujheIiGON-15C03MT(( z3lH8*Ctg_gsu&l>p&GycsdagUV{~p*lLSc~!!S8Q+Mh4Y2RY=9YQYZ^sTgr~x8*!T zZE6e5aveAEk!#fU>oAXUOiy0JN;KGx7^w}3kLM}=<3w@0JXzS1D7r1%@J{_tHN1aK zV%Lp<%r=IIcVO?M7-Kme7r=4HOi=UZP1DPgl83^Pj()C~!;*&Gr92qcc9o0;6W1tAg%{ztSs!suZZpf;)-Kfa1%B z2`0fGtu*XPzZR~l^M_A3-+M(AAv@)tm;Q&h(hXJ0@EuE|?|1gaavDMx9UT2!rsRT@ za+Yf2{<-_l4EZc}YWp;|`bePFi@Tc*fphT1tr#Zq!Rw2RTju zQt`eOR}~}U5tE)EFWN|VpWtiuumPoCQwY@p@xy)3gPa+@4CWeOv%2zu{bRO&4xWz% z`E&bhOZ#ojn(yB$c$@AzzUti2}E=_9T4 zDjx7}BYoM+$a>SE43k;9#WfL$!(lH7&AdmtnvH*|VYEDTuaawyS}*2KPh%FDu8*p- zN$^h{+aO~u2FDQKOdWP;{|223KA+LR>Vq7C?$VBBq3^G`5?o4T){b5j4F zijPzZWOjVc?FZEx{g~j~sc7!I4wb97Ch_s0s0(f~p1hE^o`oo8jJf)d-(&|UQ3uCK z#FLT;@#jXL>2`E`((qaXZ^!IjSgvgOd$$)MQyUn;#UtUlk4QC0psL8z_X!-j_|12ALWvl{qVC| z09nYi+iA`kF`zLmm3bjrC?(ktaw*MQeLyWeH)D`AWCIpWkj6|7h2c~D8{$098OyE< zZUZN1+QC0QDSo!{au&4$du(7)7v^1FrMDnxp`PQ;&g!buj2O28_}-C@Tf+yIZD`5P z$g#$?Jtz*sl`n@Gz`If4uyO!~y(yV&ttAC_g4}Ab{>Ao5@K7~D>k3F4!)N*H(w!Od zQ`yxZA`Mm&Z-X=`0eP4bqOp3-W(fG+9Lryo)cir2CCWiEYCYfsn62$1VQ=b{Whz-; zlZo|^3>9+=n7$Gm3UxXia$)hyo07o&zM3ExmJIVQ=>CLyZj_Un>$OHA%z={@mrHOy zt}1bSH*10r>fnZIyD@kS>dm(Pi|JaNuKI)$o_?%_JpRX}JkH>9&qQ1qGF_#K(7n9t zNune;Fi~2@rjrV=_udgD$P1x|>PL8l;Dq#E_UXnyJOu5F-yw4lf@Daka#}qbL~H%w zAEkDo6Td<{)or4Zj9*Vf)H`S1hIUa_H?HMDzuu3w1UNJ2X4BbT=uYisa0>_pkuvDT zZu5gjB~;TOw~E>9L8|NAGye?8(1G2wqw#hUHLil}Vy=|$VbYEMjAz2Gjxdk>oD{1{ zC|S7SIgBntCZyRd@E;*PyzW?*q+`^2NhBNkid^3IkiqQhGMN(#x2x|!oac$MZA|>) zL>(m%=Y>X<1WKlmVYIX+V24E@Ab|2C6D0h*{+5A*+&8Q&7zP~ zbf3-epQXKcx)CpA0{vvUx0}*~BS6;cf!!mqdG~!&t!gDBRk@~A2IKK1BBzU29IwfT zs^_NCw{B3R=Cag4Cl)2QT%H~!qor1NVDzQJ2={tt@_jvI8z^j9dMO2i5Mj-a-jJXw z7f8cpo z61sZN%!zQvAhLU22`pA|J<&VC344zIO0|!NYQb4$RxO@+QZ`>$aW`-*sZmhZX?cT) zbbw@T+C4W?>^HKLu(Ou320insC)l?SZ(TW`XLf&GGuaLCtqiGjg_H8j(1!Wn(+gKB z7~9*W|Lq3ApZ4k#DoAnjs)@C9j$~x1m3WSBXxZ8nGH1RYxlpv&)Q#Lb&m7X>?*j8@ zd2~O^8_&$tDZh6}CMb0ep7%*=sA9&F@5$2xt`OoxIa5-ad~Xglt|~BB>MiaD+nKMB z*3|&m0o~IA5VblrH*-zB>KCb{pmoXC!~Cn)b3mx9J8^4nwq?KV>+1X$>&0>3$trWY zo>U`b&8wzQfaZ3XJabz$Gg!OZ>o4|CMcMoqUzoQKDeaY47Um}VaMJo1- zh!z%z8ihG=Om$?%r|KwzFJF(?8!a%x5~|W_Y;6{AdTak)pq!O<3v?Xq4HqYIR%Hzo zOI%pyq8N)2#SKr$O)Lt&u=b!6Q+K~}Jfgw+fOxw5HcR-)&)GrsE&rkpgxj$w5RyC9 zE^~;PcO^rE@plkvtKtpduH5zdXt$d6t#@vU|7Hxl|lG zjK2e|{&}gu?ruE0b5kfo8PlW;JuTxT%?^JXdtaA7eZV>bObN2=(T+LY$sel!9mf|t zJ$3aAbnG1%&B=i313q{aVI(BnzOt-&HC*`jh5s(0!VQ@H?k7~~xf%`Y&X0ua9@b7> zC_J0y#iAFYSn}BZ9r0|RXK3m0B6k_bq}iQ-v^zOH6Wm;&2a@NVBpKtKy?MGfexqD^ zy@y*0r!)cZxC?20P0uzl$5k}P*!<#YO!Q1wD2N8#Q^Cusg7c zkEOkj6Q_`)L9a|^*bPelqMkd?VBMmP^!LU6*S9R8OE2>kxwlw{a~s0QKj5G4oSnL4Y^jlK- z0c1BtEB^ceHZU{`NUcNY)z5eqf6idd-9bcUfKUF4%npf4h~C4q^ZQGnkp>r;ehk&- z8Zbt4tP$#fRaw%)+qb$t+dzkLm3<%?dRvBQT^pSG=ogJxPy5j6? zWBgx8!QRLW;xt0{M}jBsSE9>|?Dl_Pn4jc=QCB3#`({@Qox}d~K=PIjWf7+j!UN_NHl9~Id^}7CU+C2H5$W|`t~;(}$9aRLLbHiSj=v*z zV#yVeUAnbOw=ZElvWesYhJ@0t9qb*c&1A=J9Sgf2Wzz_(ohhFctApAy5EG3fo@sK) z)52ZI47G%dgV-I$%sXRi-{KC-P8d)E9p=Z%OxYCG|DDy!dhXaE((QZP>^8)Jyarrr zg<_Ze5Sb%_;pg~ju3S-Yxm{pDqUdS?fJ{HCc}&jEyRzYrDbdNN88bl1T4@gWtbL=Cn$Uva|7%LAhf1 zhjqh9eqD^@Sra9s<%if4-8KkXXSwI^rV~fU_T*miq$2}cRT76d* zOWo*M!LcvBSRu;x62yJla>(CaCLF(2yg=EVh~5WbK%Sfo4b67Nm#i9D`p!H1lF^dM zMA%nwoc=&^3gavoY&=-@2dePbpUk!|?o(>s0Ns6)OFtm}PiTz4p!6f*pccy7Hxpp? z_lSzPV;A;D(6mM@XV?R_zjuIA>xbl~@=ScZJC@Zy&zDAe7wkn-B8cul)1>Q;X=ha- zKB7<9vnd#>xF=B<0p1Tb*=c#re?GPf-~Vj6{|-0iCNaSV${;`cHAqQ0yg zcSVmJ7D!%1vD`8Lk8MyqY`Oqd6_7kmok;{@c@t4+W_})=PyRO~K!_tXC~DPBQf=-+_b1 zxBKYr3)}YsZ{FxA+UN*ZKt}k*q>?k<^AEgp+x*cJhLEga&W~QJx<*t5$Y;L!I^>SHTi>xtv)xDPc zZ&wO#A$WtVM0KE1CCB^K_t+lD=9LrepU9qN~uK zPC#zW^SL1t+~oYTJ%|Lv1U)N&6*`|Tox|)J*Ru@zF>}UEfJQBR2We;CjTB~?=%auM z&g%V)XYSlQY9G{OO7Ogtjo(!S@&0aSk1AnROwGsiLs9pTe#lb1z(XODNgu1c+{*H*J2 z#lS~*z$?S7sc+0Dywb~hbb-5I{FyFapV^O5KIo|$y{K@_a%siLKPcy>ohPl+Vr^2Pos_vRidXV8ON1G1=!ZSsY=HZhV{t+cCts5pyjvvKGQ zOy`7ms1>2UP3LBS`a!S>YYaQRA>>5HVABK#Wui;=24q@%sJchDBuvWa6h;NYc}6=~ zQnfO%(X&V7H-qE3sgm!Ar`kZvQX_k}z%n)OczWTj#eI4uV1n}Q75q`08eI5GXr-vq zSea9+e0D!=EN&Zo7Oqp=yOY?NCibS%?tBrTfSOy3opREVl^$%BF*iCBv??6+%>OB* zeYz$@Ijs!Y)BXERXtz=kq)EgBfUk_XsMC<2`;W~EjU9XC1oN+#^nuys^v}0ggRzre zgKLVNFWdB+AwE2ZtT`f19iasOMp$&5Vi@({=tOBjfE+Gduzs&}0{xpMvi6~bMes4q ztM3qy)Frd`aJU@dZx6$Aw*pa2JlrhN09tIQtkDxFS|G2zzu(UN3;%Rrypv&I*R+h~ zbHm31Bg--N$XCmV^Ye;FbL6-*f#1MjJWofSjz(loIE%k4JFa7D$2JZF{K=rjvyY~p z3uXPf0-OevxGSy3p}koH#M40x)8#RW)u*wixw7oeIlQC%eXztE*(*8HJw>M#BbWLDps`~D9*ry4?3>gcvjv`W{B&C1m3HN#6~o0|vLc?R zzr;vtB@&rR8z-CApHkCf=SlP~hDMSHG%h!fO)w0drVczz8E^FLP#tV9iCOy=fN%(= z&@FW)D~eF(9$k7uE4UnaW4+|#Bwh*6cbO%$Ut+L;$KiOTD$e!+fzgqLn+K4ftnB8* zM8j1}blm+ODAs@y86luk0**fi@+I=+wl^IUMQ9vI8Yb!isX$B-T$oCLmVbc z<}DSo-XZ`=h`ZCA;X9HuXr)`moOA7)K_ADPdMCx5=eW>r=&>0@5xI~gnapZ!^#i%a zL5^+|Um{AHY94dqn3L7DK6|9ay*8by+XMUZ@Q{>|he{@i?Soa%^%|xoL=v07A1M8C zfbOtlz&^BzWW}dXJHv1d%kZiyArPK>OJXrvy`^<-`S(l~U(o|O>*1lznlgr6`Iw>D z`G-~Y=cGyBP@)e#7EHuTqRW_G)06jihD9Bjh1YEc1?a*MJfX|SKwh92O-E1)!OVR% zNH1m7FFUuzBl}!26rdPa!(Djv;_Kt-M{|8HN`+Iwi!Em!ds#wyr{_E~Btv$YHE`dt zzyC^mhgBJ+jM31`DQ`}J-tP&GLh5D5vFB!pjK@_iHQYI)B?wAY8E9qp(k2ABaWrpU zxA3~(PEk)Dj-h|2r-&0Wej$Z0J43~FIO(a1vwq)4rg?`F=mTIr7~9-8Jm=L%w^n9Z zR+L1wOZM<=OW34R>yZXS{Tg>>H=DZN;eB}xKcehB&s8mb(lBcDfRzx`|A3J{kr)^F z{*M&wtMn}A*#WCJ%!U}%Fz1Xf7v&~6RWx>gUFHrkI@?ReJsFh0B7e+ zh-LKfz8NQ#D^7h$EWxm!4N1=oYK(wrxh(>ed^YKknQjm z4n#&cTzDqd4~IVEL3+=l;M3J=xJzmK8It#@yId!uurSaZL|0^*m5iS}Rme?6u%G-z zT%LkSpB9;$IkP05p)gGVBn>-Q~MU3a{Ka!#)8}oa_^$bfMK|+``br35M<`nGXq{KPW@gup<*ARC|@Ll?(gq zUvT!$@p_aGl%1e8#M>!Zn2rWB4_F65q}3~5RU&9VtNMc)7igx8OQx%V-@&DH5ZiP? zDV3RlbjSTT%(kjpN3Y=2T;-nlMC{hw~z;yEdOzVdL z9VKmAf>YEUQ^US!_LIai@NDyxahV@9Xyi?rbw-a+T}0$pPEtYVGl4@gi=p-rVX#U& zAI$7N?ImS}ttXM-Ar@n6efj%&_P&e%&A|$Dp}G3SEVTg|xTGK(NB3KtQGt#}?mtl# zbdvK)b1ePg<=5FigPtf;Uvc!U(!;@#K5{;H42!%^`icy@KNx&DELnmh_@Hsp9{JoG zyS}=jOE?xEW3tPC&`k)-p+44MXwp{#G8;8;nd@~<;ns`T6rf2|muQCe^z_O`LC21# z`Z(2ZrjG4kfTmh7jgQRbZ=yBW-20yg#zF=ODd>nUl>Gb+Hd;KrYuKSLG2~lKHRu~9 zP1yDDgfKi@d{7{_F0lu=>Ccj5Oy>16N(Ae`DH`G8R^*z)RI|phR7ly;KhY~Ar5ORx ziaCj`nZHmE&sw({&iAArMSnDgSii%RqyGf~9-)}V^Kfa3dD?s=;*bPS85ER)^Uq-e zom6DoSqrQAP6o1siDS;G3fREZQl(+8>Q-)&q&F-XHV;PV|Cx+KuBN1%v^9+HYD{${ z;_l5segF-6q#&JD2*Lp3 z%JQnX0=ux%_Capw^{HtnltDQ1`nlKvdmL-TFsMZpSQiQd7`gJy4#lK)ZxlGX_@*~E z5S#WxP)1B4)4Fpj~V^69*6kw}4gk z_6ALq^q%E_mR|?KbG=HH{&s|f7 zzH`DEP$@|{dj0Dv?whFh6_C}b`P)epHc4*TRwIYa>P=u|?4kw^j^K=i&xa3uA^YYO zl2f##U?c4^$LTU1B0nob{rPYEJPDnM0Baw?>14)k9+msjEGNq(quJEUm_~ZCg&=_+ z6|$7@P4!K30DR0Q8xcA@seiOlyn4EYgK7%+vP?N~>Cf~B-Tl=ohf_WD1=ein(pp6m z(yid%k*t<-hmu2SQkN7{zra91*ifDJi*1{nr-bzA2<=#2M&LQ+#@PRCYT>McJm9cS z*=%||_%VZZRA#c`KB>5PE`5GWJEThFC!n5qPW&a7M2%ZD^PWa~_G}QbcRWgROR5eN zrh75`g-H|*;7mekRG_j^*UuYk4y=9qU-@@SK^wi%tSU6Op>H=DcrJN%?EvVu2v&=* zPOjYEh^d)Ra1*py#{ez@q|Wkl-Vkyjw!5ThjYqen_@PY@M|WAl(XJ5s3Q!B&+sSh| zuTtgOdjPGdK;gkWy8YiC6jN!y>Vtal_k%ZBsd7bD(dFP6HghfJp0YR1*Q%fX^9f#( zQ)ejGD|g{l!>)Eq>lV1+jWRM0UTlF)eF}RNC`ON^8v47R$I(nwg^Qj|(S@&B;l;05 z9A_lDee2L{0w9bXC`QEhE&+d?sbK6Cg=}7rJdG*sdnZ_MGG<%s#&J~KAiT&yR?`I(8Q=eeKKh6PEDn56$^Gh`NYb(5+F# z9z_5N%qpiAUxaKi6r24hAS@qLJjUfV-tgCl7uvzOC1f6nb!^qN=w%ITqu z@Obv#3h126u1ZK-uF&#E4sW3Q5s4pxLLjG**1rcinHtGqf_@U>!t3QuJ zmx>ia%ziR=*FH+h2P;(w+{9!0S5O1faX-t8R!K)e+wCRUY|7BK3c#dNos zV68fGgD4ml$g=+^_rxd~>$69RFBaJ!D}mdRr$L^n8YX{w&Uj39;CiX{q6#Vtn_P%2 zhHB0EB}?;fOGB({04!=6AL54?TWIGPnsm286qy?<+s!77%j^f|-l(5kF2y;)C9#E&TcG?2aR?j85_upaNvj+Lq>EtVpEz1`^yHKh*0 zwe+?UU2aIU-v4oQ-tkoT{~Q0DV;p;1Z}>v_EwrXdR+)%t!R8(n~J(6A?f zD24Gc2ZcLUuG~6>E`;Ge?qgt~LZY57Z8fOR>S%4FcC`PQj~-4rnVZO~8p64B*F&0J zo9F|zrurf&5!RgALlV>7pE;}ARz$jQ6$SF*-V0wsW4AzSmAiaS=B55(4kdxJWZVH}f}b(C;)G}L~tQx7jfF#%&&j zaW*P-%`MYvqB*;8@QJar-Hd~V@OyvPMWyg{JVxQfSJHm=9j1xI^l!L1?>{9`W=1`a zu2iQts(exu|8!QX2P5DQms6gA^7HjD!OUN)4FXz2iqetbp~$9=r17fD+1DSeRk&=) z*2qt?ukgUmx4Q6qaj@~`u4+YmGY7QHb$ne2VGBI(Ow)$ib8m*CF)V8I^>ZQIma{)a zV2gWfCr2d42PFU1?yN=G+ndIg$2k`xG;Mmbwe90`H$n3sThKQWnpfn`LV<|5AMv5{ zeVLw!cRbMI_Ee4N?&QaOuW8$LW8$q|U&M8$b)gc-?1fz(Ql8z_rnW%(jywBvNZSOn zf#TkplZiDq>nQQlm9(C782UJQ} zQ;_nOT8_$JjW-U@n0QLI)ZdN$if23cy}4qBuI?c>D(Al*#2O=~!QND}6=tZ8Ftg>WK&{AwDF?0S;Jp^2c`6a1J485e!x3gO!0z6q&rs`x=jZm(6j4PAF_OwMH?2Nkl(Lt*|QYr*#eFu z%6``)Z5TpgJ`v8|imB(-#eTDh%RRc(oR6fc%f?{CCE>TuNByb(!t2&gliTxi*YS$J znbU_*T_F{Eqb)ppuG7!Xbkq={-o9t4pao)dg-29^OsqkWNc6Mb1Qo^f?d-PLn9#2* z=aRLfmXDj@#WEtOs>GlT>v+5Gh9M$cSQnG^A_|F71;@*PpGuf{tgJESLvaP8id{i! zwF_J&X?CWr;GgJTCw1Lp-Q8V0TNh$7Gee9%=4)5!XCY0~{(^&3FA!`lFK-Cip+Q2~ zf%hMx4y)tCvLPhhhyylqNQ%2wW6OVpMho1=vEgQ+Jvy$lwRR?d9mFz0^9lb``=3Pj zZsmj5wM#ryI1DijHjLuJD&e7VcJ3Yd?!VVlfZ5GXOn0#q)2MuJ(uv2muW<~|%4FZ^ zy1)2BOB$~#$y!+exZCb_T!pWVg})nFNZIJSHPKtd6;uKPuzG6vcN113m4(Uc9`TV4 zBFG9RxLX^FX{H?Aiz=F=j}fe!rpdBJ2^SPiX|*W08LNNz0XntSeVKpJRL+|5x;09m z0qLU_D-Li6+bh*j zXz-lP>P!Pk-?nY(7?Q%$XbXXux1aM%4I5`I>{+%zOUMqs-G1 zh9NbrCb(qP*&9p`*l^5(teRL)!vrNjH91@4lgoKHEBD$tS|QcYtB1G_0wFNJqF;Jj zU_99_`)9;91cp?GD9qT*(M9KX(M@%tszov>)g+Mcyq||9ddqMo4fBkTo%QBw{?7^& zqZs0>@(sO}FZJ`Vq^s0|4VQh>9CB_iejv+m(veq>Eew%4Cc7pp;-M195TkKL;=WBK zpM0$hnD4R04R7&$jcQGc`EKuzhSA`l2zAqA{n$sD3JF9MDfwaB(T8~c%kuZdgSfM} zlM!K#=ZyKR3uCB%^$0lSgU6`U{xh0>`whb~v2XI7(ux9hB;h}Hgmdb*h}D*f#fw~p z3lw1{{^*d@X}ZwDAue3KnfKh?E*^1%k+4~?5B)+XcvN*Co3C=1QjtwB{kp4}0xnBh zWSmQ^%<1!~(r?pt=BNF!8Pv!xmSK1@&@upP1Mbw8(nVh8~@S^JQ>@7 zMCW@gX3Ng@w3lA@?{?FMqrio7T-91^l|N70DAr8ZF6nDX+pDR zm)xM2IuBIiFPYFh=eIr3j6o8O*%2E<@nDQ}^2%fo{vOEH6HH#~$Oeq4K9@REbvEb6 zuZ;(}Ov%u;6NavBm z1oEJo@Qt4nklK){^Z_o0a(cfU*D}H-A3+f(pa;523Jl+VM%2r8?TYpGbO)KSF@XT+ zjg%YKOF8`o{|N9LH@BhuN>`lTY4j|`!|*H#6b%cd%tfQ>&%WmNmJj26>$X4#8 zl!d#QHXq|ph#I>j`XKYvVGQBd(C;wa)S(lTR07c_Po0DqM&`<<4Mt)kCKNx&{ghGH zyg^fhr|p&)BVOmEZdF@oNmI8X#`tNTt~&@_hQ@O}JD(8fv72>wLe_hvV=Dog7sfm9 z&eUZa;8shV_3oA(`8sw^4|wSMBF2M~zOP)_141?ZVg7*1jNb1Iy`qIO--i$j_ zqha$^{@sCNTV^j*Py6zOtzePjW6At5H0QfCDBN=0dOKHctZrs&{5lzY;DtwZhyC3H z!!+0Hb)^-$rF)mO!wROS>3XZ~T5!G8j`c>c&Z=#;EYWN>-hI+#1Xaxl-dDrTy;2g}$Lg$~GJ1(QHA?Fd_ zLL>@)c`qO5+4#eKgIhb=tsT{l9-KHBH|Dg}{n-{~ZM;WfP4mAlznK@tWvKG($5hXK z%Of*>boDI`NjP6BvsSW|&^Hy0f9<-8+I_M=;hx{mXX9TW$g7FbOV^Jxi@l(PWoXu1 zH-!?yS{6?d(yY_|wFeCi=0T!g#lsm+2cJXEQ5lv`0-RPOCHm~mS|j^!Y6o%oo@8ke z)Jb&nAy=5g3j{hXhHDIVo&Sw|RIEDgT!v1!{UZtAt3C z5rk2A#CkHzd9zBLoqo3{cJDW*@TU2MXHAVh2nRFErs-NnxoEe0BfNojbYHeMULG$S z3%HVhswHb##TEFO_X2EIF_ntf?L`R!8uaYvm#s;bX*qkv!gnZJ#X4%YIzOB^EEyM-MhPr!7l{ zc6k48lb9SOe{OG)P9SfL7Mql>wnN=xOt5?F^IQE4B<6N<`(koVn(xQFW~!YHx$jjr zk8~~s)O-5?yPm{Q#e+Z6u@|2+qV5$bWf-K40wsN=PG| zARNH^fk@mjLRX~YvIKdpjKSL*rq8lSxpW{9gZj}dw|ZkvZYwWriL8K-nJ1_OGjpr# zXV}<+Vz7#*qhe*^DN-~`O9s!2eY??i9@w>(TYus7Dw`f3;xh2d_; z+>6!bkfV2i%e5g>FQDjaH`G1Oi0>33Tky|tyP693#MPFl zUbH|Hc}_D{pjb84r~VmXn*7$c+MbJ{n&Y2W|JlDwm*+IfAmce2V)(FD;F!zF!A0p- zpW>Q3@4z~loo{5R>dT2|{Hc9Z9{9t;VI!afgx%3ZrAy&O#)%!ylsUMlWMD`g;Dk|| z$Z5Ti_g5>_d1LCTm4yA=4PziM1f8P#4@F|8YM6p){|9ljo;jd@QUG6<8K8L%NxKda z_u>2yKq?yN+*txQJRAt(M6zq2ylJ}|C%l1JLA+aOsVk!WAZ;q{mC{E5u}0BClQC^G z*xW7Yn{zpm{=4BVDYUstcvQQ~1!KMlu3D`a#LScBsiH$z5s~c)%;f$DIOAJKMD*BK z8+k#FHwsX<_I-j-ASO5Bu&A2k$Me3>7TY(u;Z=ei>I7Lt%v@5>Ig?UW=lbeiev zVI`z7O;hJnwCT7OM~tj<+eo4jNID|X796HsQGCZkVS5*Hq1y7HKkOyG7lr*E`_*hV zm(386p-M98MzH@%A2sx;hlH1@OQ3*VYgO@dd3fS|HcyYI0(~3|&-IG8Gm3Y869EW- zY3ChLuxt=l<;5-z*|oCa)Xxh`TkYo?;luDuq+^uzd%K*XYNnJ)25?VA-TC%Eh|h|@ z{-SUc)|A}*SFm!Lt_)f#N76JH1tz^enNbNh!fRAO)`x7c$qUtl*-xQZj7Fc}U>js6 zbtuZjX0lry5R{Cq_nKq}?HoZnZWR}c@-C+@1sD1aEY!9^vN-;Db#V!HOt6jg6Km}^ zmGmBOdp8NJDqxn1(K5`d5>;axG0E z;>b!J=V!$VFXMxU+EG{aB;f&BXRI#_IhSNt=FWBPVApB~1QhYif)kzhA42Z2JJPCy z8j7bA)eIzrq_Zc(1H%;sdg;)+tGVZiUFw?a^q`NfT1WZ54E@C=yK3Lfp&2p}%W#c!T`rBQ@E;XRhu2zAJ6c?7P7QiHp+G2hIdFkb#J|DR5vYf(pY@ z%s4qelqa;aSs*TfHtXwmoWyLRB**-aT7kR-x9Wq%~GT&j;w|sq`C>jnDNO+xbf9|xlsJ*K9 z{X&Jy^&eS~$h)W*Ek!WpYkDklZUwFr0PDFg;iIHtOVSkBZtqlUEaERDt8{m?ba~YF zUa!&cUI^8&qVdf0?eTW4RAIYyd3qtCuj%BZZXF1S4id75)KP&JVHijx^ zB+?^4TS%dKyzyBFNn|Z>Q8TIY>?lHp)I#gLj3r_vyq&OhVNZg1pNZxke?B2QN8oq= z2sE-@1hAzhFBNIOgF>7v-^}$5==c+XT@8UP$c+_{*1t2p3U`b~18zdKgIAZsf`<2z za$5QYnt}<)`WfM7gGQ#Vp>-e4yOl@fbgCd>y_{!X5Xxsrh#*N9x8 zmr*K_#rX_Vw=TEQk&~_ZI*D;YS52XA9^!!t!+_1r=4NcNV#)eE0r@_*JWBvVHme}y z6h9AxYMp?!gAKKz3)zABU)b0XHmr~={$-u=-StEUEdwKJ33o3eF#phhS^n!F|3ba>2)bwCh5fy@~w$nnF1eh|dKV zjkkBDDI}1uR$r*do}}yXv$`uNj-sth{rGVCvK5QwM z-jYE?$Z|?JtF}LX*v%ZQ_l>D>RtT#rLUBL63Kd?k_hLvoRx#O9BhhB_h7V*dUnseB zYM13*dhAbKy(m<+dMo}ZFeq*&!GSK(`b3QxP)O+%k#ePZvUFCH?Eha z*H5YZkEi55n!_c<5{tm_o^PQEyJLTX(cDsoiY!`rJTgy*lhs=mQ+$%OO~A_WA7!&n zDKFP9nD^0flenR@et}oM?v6jK_STq0u`A7+_?gA4D=MEW=6UQUk<~S6ki31@~v@!R+_vJJ}tRO7^$#gyv%r5c%D#pz)sq1rzc zfUVB;a&6W3UH8O;xV*sJsB70;Jd)QUp5M)zBuGxS%ic5Z4W|#aM9;G9GmeW0idm-( zd?N5sK6G!mMt-FfawN~iA|o{`0COl2TN^A?UH@nExxrIq*ZM8-7{F~X`!7@p4; zvs0jR^<2!<7a7rT^Y$Jt83hzSyJZ{XkILDrVfmu>0;ko}QZymu$npBgP$=c=e~1l} z&Cx7LoLNmtv4>?x8-;@EN;YpJ_Q9q>98mGwIg!KpaeC47OD1S;%O~k75!4p=SIQMd zJX^0!$RZ{``4#I{WxSoFA?TEMrgu@tdMU%OvfoFEEt=%Sp#MQ!b9X|xrthbJvL&dk zPSZKvuDhh4bolb$%__J_aKq%d<=u0TUnAI>eO2gqZ*pcyi8E(8Os~DG$~{}#nd}J` z@kLhdqVGGX|4hu1Cael_+O^zZCmBsZqjVY3ev3J}J-Yr`tK}N*&BEhWlq%Po{bX<| z@D+SYo^BUA=1YjEev-Yt`OoAce*q}_8u@{R^<)3 zUhLy!H2P0^wi>KdE)GTPqT26j^F0S4F43(v(EGz}g40ocZQkTMY zryx5|jQYYWNeK`Rqf=)(zgDj6l+S}yQ>)`sy7y|NsA*?O0no2LFw!!M%J^EA$sExv zw3Zp}X2Yd~0bc--6)U{TeeAeUrLah_<9hMjAO589vB@)M2%l(0cmCpoHzR25&FN{{ zikKYD6Yd_0(B*3F%Z9g(0&i;lGenS4{GvM$~ z0A|7R2Vhuym2x-p)9RVxDfD58+|GMRKy9gn?ig(IN|>cyQS7mP@|rKEuOxp%5t3#>)>jJWs0I@O`dqS_|<6Hb3Cdt?!Q7G zPvd(TrK-(CCTRAT4RrIkW%cz9GKn6W%Qj73^)6IvG&;0AzU|laTM5rB!vC`4Sa{~! zI;IHE*3B0FJ2Z?KX8mY1e5v%mNlx5_g>Azs&Z-Gp95fPSMN>aO^hp{RKz_C8ko%do zJloh#VphGX&DS(C$tFe5HSbl8UZ(*u@NoE((EO<3yzyygKIw}r-nss;hyxYfpjGxZ zjz{A(G?ak}NPC&yt?M@_{MC;pvy)E0`?&z;VT^1E-(iajvoXo*Y=V*}Ei|T~_c_d= zYFjk*`t>RF!w8ZVcqO$n^%M8V`~e~?vrs5576P|hzHzRELKzNrjd;6Hma_L;YksBG#X5N_lP3zM0bAnS#izx5p`4rI z7f+e{h}XHD+ZPs|*DyQj~w`@kkRi z3IpFh37R#c|5kV&H5El-sFrFtZ196QPfiluTohg^CGdSNvZo=te^rF;fF_B&Y~$dt*9rBb6_=_A_Yean8UE40X$#5^#r z2Vrp1Bf3~>%_Up0JSl#qo@gu;eHP07q)IFKs^^L-i%^@2iu%$U6V!I@eS#QMqT|cm z&la(*3gdT$#)MRt0dePen86+u?Yo$!4zKNvj zEZv;0{LHxb5^sJCnH(grm;cEW`dvbt!>&0 zJ$nzRT_S5fK|;wz?dMK9#IxT zZ#;Rnvg<&f(8CJmNo|xj^$c^RE%P3Sc~!0ZREQ+(V1fU3tgsDlWmU%arl9(R`%dzA z?}1e88NWrAoS&sEJb!nM;?jdOb7WX(2z+4ac2g%T?f9IC$<{2gN!YEBcS%>f3S=aU zgmcKdUwiB(5j@YSeOsrzh&wuT2 zYdncRBf-3}W?>QP4|hnVTq~m3m{uO~Ot(GuuyF|q1H5y!I`5?wJ0qQ!gZKWOD|(^a zpg$)?RKDCy**0Qe;dm%Q(a4*g(XNQP_Z!3)y?6 zH(`!zX;=w)Jei9yZ{&e~lsZqBx{V-c*&^&`KKGRs^XRYMv5EKwoQO|-T?~2|yT^)~ zkwcF2brtW%#XbpPB0wlmI*qTxrQlHRS(?;9zh|HIQZFB3t59-Usr> zj!NY&UhB-;*--X1w-k}v0Sa-La1*s)w%XkP@&{}* zH?3^ibhA|qLdyU$o~hXt;VTssC2_^uQgiIPy-GC008_e&-u;yTxqasvKX_>!NK1@W zKY?8z+?xn#ryK?a-+Me4$+MW*pH;QwtL~IC1q?s5?L%GP&W+`2MVS0F7Np!O*A2KE zHcYz;Ja$FTb`{T;P*T~84J5*Lzh0g;4%zL9+BcE9S|&0C+8`3XBL(oTdsM%tHloIN zgYFgkedExng49C=!p@drzOltLOrZ-U?XA z@;lS?xg_U7QI=Bo9-wlF=zH)4AG~@m(e(u!Dbmp#XEpO3D!U$bb#_@+d}i>B0G3U+ zzae-3=~}OO6!)+Lej?Y-*-iEM-8W4;pWC6|PRTh@Ba@qs5I09Wmp6qeOc$nUf_<53 z?`Y`VmpA1ciR<=3<-SE_ojW)A>U0_w8_XS4MPR_SB~AFbq6BPr_Jp%V@_U5ZpbP%L zsZlppo`-A1olV!1AdWc3X0jrZ8SKe9Sa^D00&dtcCvsi0usn`5oh3|cg&gOKudX`24#OG;Qo=B$`q z5-5XE8*l&nKWJRta@DJFWpJJ#w=9ioEZu)iTbX~MrtRwN8u7v|R{mpgo+)%M68Gn! z6GbE?KWzJ}UA*=`;&rSZ|QNz_}jyMGp0;Q1l{_j4c z(eV#uP+ZaNhayuCCm`Ey!-63zDF)Ab0zMpBq0zd=g>n^jWo0nMGeI^j>8A@6hsp1tmtmZK5L?gSn^_vAR=v&6B`cjh(ti|l8Tx4 zcdbKvL9Ksn1dR}tfb$N!tm3!by;y(2uB?*lULxrb^rtA) zap&H8j$i2K{z&UfK=-~p!#Xmh=~-Zi$`qFTnrlKrpntbcT0TNH1YxO!IKK+Ws*la` z;sA=GWAJ9;)@te#^gmAWgR32@Y37Tj{cQ?j3 ze_ggvYGl~-VMl);vSU!l)srANIF@Cf>1nBKR3tGo+RaJ_xb9rdM)$F^h=BTP1G z2b)434$L<8Exoj>DOlSe3HJ}Kv-5GUjS+UIZasDAjiIC1)R)AN*FiB*qw&L;%LF|I zEQ`b2e6B^y!1o%nX(X2L`(pZf+%zEo!;dNRxcMM~f`84?ZN?V>y<539>FO zd&MJPm>OIjCC)9Dixtwk*Cm`>53-`g?R|WPQ!q5V8R$Yn)ke8CxrVT&fDb3nBhpVu zx?td|4IlGHw*zK_M6aPvTh{9}S2=s-QcZEO%)8BP(AfC=nGF15Ww^x`(K}Tc(rmGy zN$*uhZSN)Iov4VpQ#%}^ng&ys(hNb)Kw8Qq;tnlKT=ukk!={PO3j|dXK0j_cZWs=m zYHx{f!+aE$P)r{XTA6?_ujCp-k!d5pn?s6wFCAoP#!u5=X)nQ>UF9Kx?L6g?&jK+E zVM|stI}#>)^SBoLb<^aajX`;w8RQQSU@!4T!m3VPKF8U*=ckq&sJwt@LpOK3^E;MY zJW6sIv|l{6%X0qNbkb3XCow0h&C|0Pn)&g{j`n@H{b!m#uO6~?s0lL|4B5_=cBU+@9vA$2>pZ}gFvF8u$>tBYd|X%HRTAWbGP)XgZP|4pzg47A zDQ6Ts7Vn-BT(F%|i&cHa5jHxdqwqgVoS;F&#zx(vT+^v8FgKH+j^%ylUrsC)6>&Lvzg#A~DX$sudIH z6%TDs&XrlgLG07GvDD+BW9NB}Fe33{uorOS&M=*c{9?%X`aBv&!T#KngzZq!PNDCw zuU-Ar=<96y`x90GPGHkH3k7#7CjX1b?{W+;jI+BCG@dbrEtUqK@oUi95uLaEtfsj7 zNI~vSNuHRtyOuu<&iF%J4lhho=w8F6`|9O_<{7!5Oa>q^(BpkG28^@Px)S@%o>#n_ z)!bj_{lfz>+bxCg!bfn3do4PF&!2|R#KtR)c>(3zQDLBE0Lj{AoeYAz4i8tAQkn45 zgFq&ZB1q}YAAEG1cye-PpK~gr`l-6lDO11t0aaueIQQg|&pK=osk0Z;N~qZV|6VKx zH1uSTLtkJmQKBN14pa6h^1@~1MkxZEgx^JgBAss@vOOX9@S+{D<3zx#C2)i z7J@z;J@3%=D+@X?G9Gl>Y5covCdsDy!yGJ6tsOP1VuvRk91yhb zMXW3$G5Ms3X_^5@?RIWbvjD*}_A$Vol zXzg*QvndQmDQE?{U9i#b+7%tWi>yuAem_mKs!JUXVdE{UATAE`4w_=vMQe)J0j|+w z^IXlj?~j?XgEZUFx1T&q&&9``TETa?%vj5n*lf28@_=J)A}TYEJLpna2N_Sv(wE+70?i;{`Q*wheWyMr)@GJA(L2XOdw zhgE`Y@BbZpy#b?i`Uu8t6A_R=qFQ4%c zHKxKc8J|Re+DDnCp&8B&$B82KkzwtlOPEZz+r0U|>)2AWq4^>rdTMg62m?d%Q=6pF_H#i>5v%6g9!WM}}(&LpawK-aV7xpKm>pDhf z(u3QIrUjwjl28FUPnydxasRmfR8pAIQ%Zwc1)3b4HPxGQ(=?WPTnD>dgrS$nt%=?z z(N-&nfq3YCGjzzNm~WTc^BQto;7E0MhOJhs%w`GuczXkxP*h1-KWC!mQ>ae8@K$)G z(8X!ciP)0r6~ZH^F=r^(v5UGwBY0~{8QQHkCJ8obwYlfKx|JVg`I*i2>j^l|& zWE)s1OiTrxh2RI#5lQ69FiW}oyu7<1GX@aiN{YNPLf1;<)eu0?fEJRE5_BP0iXa2s57hEEdM_K&yLA zcENZcmvS@MkPc>4qgbj+%~Jl52&#;`i~q#6VQLiPzg4Q%28+ zJrp%yt=7eizqk_0U5XsK89)*3_wW)al8BG?!n3*EM@vD1(2oXV4un`ZDO$iBvj5nt zi_*FLSiuux`K&LZ27o*VlQ`YkW`_lGv{c5!TZ?~yBc2Pb60=orCMp?;P!r}(0e1SH zZaW|5WW`FM%Ywog0sLK$Ho!BryVOP@FC?|ApDx+WKJdT&m-2<32WFvS!sw@t1$GwG zLg7qmgHR~NI4DmlEevjkCa}TXHD=3{)6M4-CH>v&dChxyD4a|=r+rS4yMKn`*O|KDr zJX=)#)HP{<;+J0 z)i-z!edBytgW5Y*smZu*Fogg0yW6s;;4d?d5T=G*w&YhL z*(7=*fXaGxMK&fY;`u+i&O>}oZ;DFL4`Y|{XZeXg`?3@F3AQGo_b?6;sh`1aF57uB z`#^bP@Z8v>q6zm1H{0l~j3VF;S*CprqsQZ38^N2+{@FEXrxxM>_xnwc`TPLJW)iO^ z&|xnq^~OGmql`wdfgqmjXl%@wPapNxQhkq6zTv!9(tu70hu=U&FcBhzJ)xWfAA&NiRY20;>k3fzDN>(XecA?jq@HCJIkp2#kv0>Xu=dy z(-wPRlqse_zif38XLiX!7NhHNP@zU-(q)cl{f`RRqgH;ESLGOxWGp);Q|mVtHuqEe zt-wq{5T&Pz%sSjlVtRb;!z2A9i&Zs`-}X0`<>g+L3s?8-_`^9>wpEniU_|HFJaw<>O=l}J7&B(M(<3!A^oi;b64KnQWE>;&S+kbaH%4edTwnXId zYslcbW^nw*+CYJ6NJt4yi}Fqm2VLSmcxFtYaD%61sY{HfqY-(N4e!&srTz+nuPBRY zgedMeG5TDbVV4Q&q6VFAACRMDrmiz5Z9S?m8M&%!p3QvCpUO}s-y;^zs0<6XRdne& z@v@^mjhLy+GAVaT7-h2nNK>w{(|M<&SlW%eW!|%Nu{twIwZG__OTdLY;L)22Dm19> zOoXM2=)2YSj@lVIqXfadzh2y&MG$9o<-azfBc^>V$(BN@x_D->>8xIey3X@aJIbR= z{L=ma$UTBSgU%aQKOsb`_=RORob&XB-2o@CO)RiqzeK!Lr8EJV?C4SDp&->A>3#amIktzHr^6&nS212;9@ zlin#;TNf}J2}cT8P19e^pFb#lEL)f|GBv7mVP$qN;n%D1%1iWnWlLHBpinR**PvI% zwW?@6XN$Us-_QIh>(7h03?<;jo{h%wjwQltpMa92bjt4jDHeW-k76iM*U2 zszzeUkqovD{vVWu;X3zDb9&z}?>PPESP2aYJxynnty76lKV`=gpAMHI>}}tD#nTK^^WUsZf4^1lbl@45#ghVF^ZfSu zin1B}os7r#+Nf$*&*Q+CA$)PvbhnspQvI6^h2^CvO9$sP+YgAyS3LbFl;C{PY`VDj&kMTZ>)E|mcqk1$`=##V;9fkr)0$O%tvf6x%KGi_g9DOKPEIE zhB`%ohwJ?H6&HFWMsnp-O$<+(+_B|~;C%w_qDdHfrirPpM1Z_->)#(r7ux>WBTlv! zTCGyKX9%3~4r=lbsm`MIZuhV|5jF}rzkAQpFWk42>nKt0ZZpt-P1R!avCkZyYtMPzz!lM^?e! z4noTGR+2%$XJT+qQ zL{>7{rl-h9=kH;szr=Y*b3B~C`1DRQm}l_gBAyH*UK!sk)0M6@Nzv&^he)f76XYHy zI`eR}d_e75lo~fwN-X1~m?GXh{|r5KNT0-ij8TegQR&+JIl44fSw~c`6t<#!UI1Bx^V#`D@g>&E${*)1qw*F8S36kjDZ3TWW7&8IeLvc6Y1sBr!%4^O z;K!UP2Q_k?z(vWOjPlZt$U6y``y=J8a{9YGKb>Y{aYPv=*D9-!_$^F0m`$tC-X~jg z;?K<3;(zu=ez}fJ*I{A)OD%&E^)8(rKlKrt-oP&fjomE@B7W)n+30$sMT7WL3K*#Z zGReb;?lnAXMeJo0g_i$t|1%E_8OEJ{0L>=i%yb>+A&cMaw%w!sXE^>}k4GjTS(z}Z znc!PbM=5}*jXW`UrifFZWbO(-j!zYFdWaIHZL6y-Y+8)g4ay5P2&j-u3 z6O*qzrdEKIhe?JH@ok_B*-_^Ozmr=E>0^UJ%^^z;&R2GkSi4v7Bm$Mq9I}h$qgN8# z`3}>x)41g9!w*lHl!SHA2Nek~7$qjxs?tj3!U|(PR zq-nqZ4mDqmo`6x@qsST~*VTSYu8^qBn8yF=%pN-6U<xBK7jriY>Jsvs;9DJ^K}zOpiEBPNnwD4*-$V5=vVy+h83wEqze!YNW$YylS$2HI z)9Y=xsGYt?SF(XqJ=an%@jd~&M8g5+Ozw4EYj!KCJC{Cb)=~I^ z2PPEnty?7OdOiQZl7;1L0iOG^Kt~6e{JG(tjiB{_Fc?2(lnVx78MGepH|k_!#?7bR9IPfqkr+!A&YWL`**6xjyvFD}yApSE9nV1NO|n3QO6%Qw zU$hjAB+(V3Ie1!Ha<|aO^_j2F7WQS007pT>2(*kE`JxawiQE5|4mu_=PUk7sCB}Rr zoz(Ns6--_70(*&Jf}5qRNz;ZklB0e7-mx;w+KYi9e;u8BbNd$+cQmC=Vl*`>Xm9WyJGdG(ht99a2b z3C)@WFl!!V&P}oOAYw+=7iznGqE|flFGhk&MwLawA=#6e@L~1E3bUu2D`*wAX?8)d zUh}!Xpc~SE<2B2gu*4{o4x`7JL+00I%*Bl*c_MbMU9|2tNmV){>!G&(95Na-zSWoA zkw7xwxl)H_f+lh$jH1Kq)=B?P^VUXT&?*+-E7i?KYd%I%A8D%1H7bu%qy4j1X+?WS z<=DDl6Y@&<^QKU`{fS8XKv&TmNal6~^Xr9ohT!9meg||bisbK@f0?L^2mhWUq1}K* z*WPeF--1#3Q&BqENAn;I&z8hg>O1sPOZrc*ob;n(qc+fv=wPUQc~1cKPvZvvVZ z(mA*1p@59Fd@K3@Tu?M;Tv@lnxQ^_mw(vZ4T;z7q0^vx?go=IUxw7Wj(r>S!3u0MS zEsrv`oM`|rs z@C)M)puI-TX_|uZM2~{+7t(n04rL9j1SYC0!8UHu>>q27ni8jc%Y#&&96zr=|@^I(l#sGzTS z`RU{LbETWkav%QdaBCJ0UHJi>5bJG`l-`29{tW@6(;-j5#!ekfpn`NWp4q=TXZnwvuIbB>E;#1^n$`upVTmVbyd!FA-`F*uGa z4xU!4?t1=OpK16%iq6BIs{fDU=ejo8M6P+Sa9vwm8C`oMdq!oikdYa>c69A6bjvkM z*&`}@uTa^MRSMZj`Tjn?f578`b3W(&dA(n+=d00?P-e-^U=H+Y%~N6T-WnzIdtxQB z+JLgY`o0mucE8Pjbiyr^nAvH6QiKcC%9*5@~zoX{rR4#BFdn}2r(m647hgq5- z?FEor%jWkmFNgQBYJH4c|1Te$tRpbyTUd!sRq`cPED0w@p9bwH6%|gvm3yP$7l<#D zbk%P1hng-lX|wJ86M>V>VSm1g1RMFkyQ;*GpX$|enj@m>w2g+x8zHmCk7ZFgV#7fg zTWU~BWYb=cRJYdQ??lgR;L(GKr=C_>9Q`_Q{8h)2u}{otSQIOcrrEoHyW3V$nI4u&Tht3P zT)>1x%6X9yM~!l3G`ZeIlNaBKU0uS{Y-SE?#ThfGd>@UFskoo%`)x>P5fBw|O%JxSOQf5leG%aQ7l%Hr;7qxr?mo{m8 z*SPMW@8*HS$-RySM}wQ3WOyD=om_O`c97F)QYDLiI`pdJg-h7l1o_FY>K_sLv+CKX zRI4q~f(J04U$PfIM9e{n{4LCFx!F#4o+#D-wipo56SP8IeQ!?zDQfz520eN>opz?< zV4|li|3^;f@#{IxT1Oc_9JB4PgOuoIg5r0B#{*N-F%Pcu^|79qWz4QpW>wy~R zAh8m>d`3VM(|oAW+C?~*3u5#8Q7P`dr%k>d|D$zK-?+qGV<6S*PQ&g52(ISkZod35hd-Q)(En0B+9?DJSBG>q;RffgeCT;Gl zXa7O(KsfS(?H$ScPoze|l3rPES8%3HCb=7yPtByC5>6G! zkA=`+15xkBoT5i^nIrrAB*o*B_|K`@F-5>4xllv()1V)|(b?}cB@ZKucMKowAV2B= z9y0Z+ZPC~H!Dny*h-$LdOqOSb)qr8mbZFengU-|606ArcSf!yG?lAir{yJJr` zqeQ6bT=d3d)QI#oV|4{OB{B!&&XBQ)A6J6yD`-AriDaVJ zBA+TOQAK-H`Oc21FoFYm9^WeLfmFnnpT_o8P@@Pe6p)T6rsK$h8#l`DhGJ4*wqG+Q zmyJ&&w}TbyRMV0!Yr|`?rtS2`9~D?R3+ZAfYsgK8O9)jd>%3 zP~{69_+W4Z6(yq5|9)R-UXBii4-hHoiD;P-C5Eigb^PK!@QlbsCyl1*aSebH4@=sh zgiICRfC1G*=r!+DbnAam@Z^)mce_p}i6A64bN1*|k9_JQTt6}(W&rH}Fs%$sX_3cE zS+#%vR;3Y_Ye2N&)SOE{jZ$Z6zR;LU4oW`3^Snu)o_swp-^L4Yqcr>7yr&y4 zRM~cgsh*v%K`uk_#!}tyu~%fUiOT(F+b1F4cX6yW|3Ml8vN_zXWwuIk2FV8jg_ocV zD>Po1+^yOz7PXlnTMPnZX3*cogU-!T?pf+?mo~Iuj4=V?YG z?qqGtRV>s(YU9;p@&h7IKqT{}y?QPG-ITR}q{0MSe*_2r-`_;)*7^0Cem;YLo*pJn z`(*znWiv#`6j2MK{TK!+&*Z)JSDRcsI{FZk=&hMW!r!H=-vf5=+;JoVbkQX2x}sx* z+N4HR#_Tj-vPh>F26{r_cUm9&LD}H!FHJE>MQaJ!ZrB~m!Ab=4@BdbSwTCw%q#vb! zJ(u|Z5PJVVNc<#2DmXIm$^DT($%AveDl^x>64QbjhT=w zu3L?YHW0l6gTJ(`F-6Yo6CNGpf-!~l=hlu*GE(r!*?a{z4t)7G`0cJYCfqjksDZcF zV9r~CmK2`4q!>8F`~IJ&?uXEi^0LUs_JH8VAgTVYpjcYADHz1_q3^^fO$X)FcsdH z-lcdnj;)%DQcTWOn*K{Prf3mXkV4m-)*qOCG__)`r%2mNyE;J;!#(ViRzthBP~g4( z?$DSp#J1uG;Dn^43C1hdOpK97SKquzWJ@jw=22~CiOtO~ToqI6xV67Me0eEZu$vUK z5!f^`<&u`wYOUaM-0!?m84i<%68lTgEw$1|wpw@jxg9B*Pbwrv2sV>U-}2VMR>wzA zqq@3}HXro*eHMJxMmi3f@?v4m8*dUJtMA)dt2^~nT1`sYl5dW>sU_2AsQqDpND&{h z`RJW>K(ub>9Y^#}S}4~m>dCN>-x-!+#Y=!^tZ9n}m z%z5Lb&JWAYw1-X&)t&BL&9B0YDIEk>W*aCFl2?Q>U)~Uk-H{>6NbiCMN|m}HJQ3qW z&MusFMQq-7@s`d5e`242-P&q8j`{UmN&Lol=dSo6VGFUjqI+mhnw;hk()Ttt=)G&k z<;%F2F|Aw`dD`nyv&}rxy^vns`|0Qx8&LVoCF`Ucq7B)!Y^Ie+e;OjqD*^h0;pPA> zrXp&e00`9dI1` zD?MX>l9WK!>eA~`8khVJc!_6;NMtEw+VEKFNh0Mj6t^J7c`R2?R=|cCsrWEtUAzcR z%i)GJvXpnX8OIB^scI^23DJ(gv$yvr$WX7>#fG`njc6Gy|B8lJf)g%qm5V2!0Ky$; z|G2b`A}Db7+C?)f?Q+sDGnRpF9R>Gjb=*R$is}(^$7Mt6PSEGs&Q2nYTef$Jg=lNy z=4^zA+oQxLZwj&<%iWYi82u@RSj-4LjTHTL8AB9?4+-;4;yGN!&_w=m2^{vDn1nT5 zx9}5*?}m|%vS1=|3Lz@{S0%(A6t8}&R#`(2?bJBDVQC$o#G~{c_il&~82sW&gI${R zP_V^2zL1k@G+wshH_GF}^jRD@YwjY zaBN(EG&JJea5xhR#F)vNW*VjKR62~U^`;`SRH7#7M84K%21!_k^|QgQ|GXQbubrg3 zw&rKjSkl#U;GF#E8+R0rK~FM+=j#fOHVs4jd7${ain_t$2R~#l3R(068eq=M&kW7| z$JB-Q;VsXXUy|Ob!KZ_PJCxVXRxT>9=BKL3QB_zhA?Jf;=}AxcWY{_wlUqXtf|}cn zABKRw+=P~LPj#&ZJ$9*C?mTdxj_B)D8Fc8=;ERM+tOgcv@uw?`=F~xyRF7(_|jkE;?7CE{<-#NSNo3Cdb7%$Xh z{JJVj)-M7aO}m4|(}T%-J)O?|OQXga)T|qVd$2VqL=K7)$D8)8H?3thBfo#j{P}70 z;55paN8vIS;6``72f~{)hS(fBccgDb%TIOXU<3mi6YO~V@Dig)LBiTAqsT^sh5WcK zYEHqZ4>d+0TtqKFP9|r&0`s>?FL5l7AWGl323sv zx+lvY*oXRdm2ko~!%fj#70n3zQIdB5K`$1pePyG=K6xuvn2QSY3(#a7Ib_{yiM%`4fb8objl*6UAG5!n5=tm);244#}A*gsEKrB{}{)) z!${5n`}j~=(IMJ_b_li#=*#;N=Z4a~%uWfMTR`WN*q8CA;nA2b+*VXH>oY0`1gj&W z8@W9hgI|)4K*~iW9j%{WxhKemMmeuc)tfoTLX8crgWUJJbxdCl?z)2ijdqup~k=oidoPJ>KM@rdmp&dZzY z@c+Vz*?vdIYt-6mp(&D_$A4x3-Uvyk6KXL={4o9U9H%d@R#LK{Fz#cR8hE!=(d2)=PM>VuAQCIRA(0y(5o?zI$5wrPy8ibtF&J9U)| zgLU!31et7T>)?FUJ$;?MZk8tS{_i%TC^ZP!;qJt3ozGE*QvwyO($J zV^mwh6{FtFsmwY(LGd{2!f_Cv=xhpO*nM=(^NgODG+E$RmPREYiu2~xhrTHAKC6H| zLfS@;MBZNeGyPd6G>*n)4jR068?=9W2lF4TotfXbyn)8o^U?s1hV_VVZ70Gh$s(~k z6%&tQl0E{8?>gBC%h9N4@@6&~gaI79UQU@kKKy8LpI|E%}7$@G8FQ3aWxP)Lt( zX0vz{p&)~k6dSWHHlSnJg-1nQ<{nASLGfONeYdq_cv))EObH@$k1JZKuafDFB3at5 z7hV)jdtgk7RLJUJ^53x;_|dFvk>_KNqeTF`PU9wOk58!Mp~0_+DwvG&FpS%ouM6mo z#UCd47Ka!fIB`A?VRM-m%ivCeY|~zZ2;OT$JMkV8;#jQc+1b2!pHz@xXM@PUWL$gG zTHetmvL11^K+ga?70g^A^Zfk9n_QLVc4E%GX(0prNl^B%D16v)@t3q=z0gRN|Jxc+ zD4YHFg^eA38kVe9oO%26_Kl)FEPt@_|3UR0A@Ns&8uQiWI?_|aEwF}P`TP#p695~c zKDZIN5X)3OrX6yH-qFL>XXOyi2K_2iP_8@vkz^nvDk=b1Tyr*tVJ7q5WDl%u50?GQ zqOdZpi~l+E6^{&QFV76qZ})55r!aiq-qp0ybOotgc+?9S#0<4GrYXos7&UqBlSp#5KVk_(UvGaZ&Xs{euH({uAD;s7K*(6pR|6W~}Q`5)D>-oSnPJ;MIl& z0ZS_r3U<@d;&GGLir&bdebH}4Q`s9Yv$p;}ias~eEOOoVM>oT1Rr9{wZ99#OX_&R6 zmh~@3bm+L;K(=>YVNT+^-wu;_jP6tP&(7pls!w@{ob`@v&^hW|RB9F{BEzlX>z>2Y zg~Dk1Ndn0FHu==x8#zpplvG%fk%UDmIWu7uOh(tKZ>V=!?^9*1oEJ9?E5*uxuI*Up zKIk8XbU5k*F)r5yx8Z*F=sQiJFW?}b=n;Dh04*}-szc7x{cdA(zv9N|F#bDB-6nUNG2FuBc;M7hwM6;MJe9F< zsPxzSYKU^+wa7}m)O9}`X2_Z!Y>O!GxkV1BC`~P7a)&RoZGt$MdT*JwNN~l#AIB$h zQD4Tihnh7&JtCQPw-)V?A<7j>d0zBjGzq8rH`C3N@C*Er25Bru9_gHQBvn_E>UD3! z4hT%{WDjgEeUE*wtUw*qd}bf?g7!z?y^9oTdGV%UU-82J@mx;dENYuiEyIsDzRnq; z9rF}Bctl`12TMAaV$sy+tYg2>nk{whX_#8hLS7BYA<#&XV{P51`JqQ9G*Gyp8u9DP zt?HNbHqAaV3^8o5Adk@n*}0g+>ku*=UpE5gvvrS zO;toM8B-_D))9<`WN|FoU5*;|eNq8mQa)a(_0oS$Ak$WXfl9Z&P>De5lEl4xT*FSS z>~bkjXKM4;=ij*sgo$Uvt%|0}07t*5PC=X6Y3@D^gyCf6M(>l>)!-^ro_I26x;8%^J%lm2!X8zd4btoCp_ow|sWFr#PK(M`d_=l7YggYSTL?v7BgoiD1y_AyS`e_Ke*}zS2_Dyf8@I_ z=c`V6=}OSxH{jqC6Z+R6Q?wD{o1*g1LtUW7fmgqryclfaC2fZ2-URG36aH9u8xKjQ zNL8>ZwIxA8=`FXCCo^7^^G^pa>9+ay(1WgQiZ)w02?3E=PAumrQ;(0Skqie#2&!-g zj6qiZT4}U01j0#`VQQ4zkHWPJ<#>14RGC1Mpv{SWEj&$WN@DI}TY46Y4G!7^|8v#+ zeDT|Z_+=lXzDi6%a`A?aB=c$OPo)%hxc2_nE9u3~O0)#*c7Ir@Y=c$KXIKCYuT6TG zzdPAjxk%j^S0W9Uz`&LYT5my*QkNoEnHfhX!&qB3LBj|jzQ&${=8Z6JmYg5Jk-_B@ zesbG%d47}ACt&xc%(_shiN$({ZVbIQKlo{HV|TrUXJ45R0?4=q1{UYC2S?K6Z>zLm zuEpcgI@}uPRg#M>*Z95G!lv<(`7LgEtd}2-Q7HIAZ3+zYwM@(QiF)LOjePu!-yo58 z&#r#+A-}U@^?bH9DVtTP>p%;b`y^3;0p~Ia^FVZs;}rcSS$wH3TpS-1l3&FxoS*CzVM86&T9?-^sHEq z)M7dbyHpre2)1*qUb%+-eE0FPOJ%8^CA_{;+En^bw3RqA%b)m5fj-Hl#k4nG&Fpg{ zWRrH!C^t?xtZ<)NIjM^AiBdpYkoFD8V3MO=sl_^8=D|l@1X>>Mkm?C#bGdLEeKFZT z`{&ul!W}||EG&-3bHB|y7!-~cS&u)w(Hqbg$_A6=lb>~&Kj2^Eeg6D__VQ}S&j29q zruA@`rGy@yJkwi5xWxbT4FG1^0ImZNSD{L2$SfPrmrlD{JPl**7mV3&^2_k+70eiG zi&uXYW)EfrPu&kJd)sJ3E@>khhI6$GFbcf_lj5?c(fi0WEaL18 zc%$fLSqn2l#EM1BJ-pcc+gWt(4B>@bPA!^h`RP|n7)xP6wOVlC?XR+1zPKEf{@Ghe zpzd1RD8!Fu9P;19a{GGN`UO!H`UCi-!Cp~I3B)niric9j)_x2V`md~0Z=*H6L`~b7 z0Yc&;^zY-3-vt@i;P4TDMw}Xvs`hk$dp|t|&6Bif=ky(1vLuIEF-AzlMpoV7V$6>i zPzp+R1-?UAn||nyI}@bHOKrCT-lkykvl8KA(arS^MASkO6h=Ab+^Kw@lO4UhTF6i& z0`tWQ0p!F|iG_+TSZ-U#J<5{gRPl(Ww2h9br;k=Lrs!&OSsYXASIstE&)zW_34sI2 zi!H8;sj>}8*uy1H`K6X_%MgRoa7!K^o$43Kj9*_AA^q&fw=hLQ6Qj1@Zg=0 zSqlWCP=>1*je!)r@@Vv`PCB`Bdb7_?2#`1r>JgclI&&xx|4cQ~hb~O{I7+EumCZEP zjyeDyu_ zFG*>LcEb*dR5JqB@_6|;%w1l7i(Tg&4mUG@;ck6uPQTyO|6k@CI}#!{GP6` zL2V;`8Dq1Nlws%W5A|GbB)tVKsjS0X7e`K){ZeoGD4$+$rsGsLrqryhH|(sVi&$JL zp4PaF)v>U+g|9^!lTqerTT?I=iuQ-K-sLPzoK97hEnMXS+?;+|lG(t5JPtlp@D_dA zWRS31aRGEEkk^t`!N_ z`zBRAq(6g32RGd&>KMGz)ZfIJZCuSO*+C+8tSe^cjSYie^A*S5lR*jGg<`3Ir(w_& zbBs4Kk%6ve+~?{?9DU4??{l>w(7`feXXpMzcNpiZ##7a9+FL)|_wm|;lm-}EY{kFH zzp?fw{cmLy@?_$^U2O&tO51o?Zh#o;bm1iCb@N?zvH4)|Zi#pLxMosx5uBF;zdqBo z+NOO}lKCCS=;HEF%x_FV`bV$Sz_?(86E`EAFT|K4f5i-4x0$UJQ9Jqi*6SxMnu;j0 z2rL^cnc~UNZ5?MkY%}~d2HwJM zwGt^$>wKJ3WLveZ!^{gJx^;Il$Pk1B9IIhIGHi{Zud=TrQ9!#-ks=`s%8_S~vZa3? z5Lyn$^!ZbRjcYP7bTv7X3@Pe5Ze~^O zl(fINW|v5ZUFrJ@ZcEzW35O4FZVvy12r_6jL002}&n6r|%D)ZUr@I z$zwk$`X|Jk)PDzn--e+oh-;OsqW+PyGqqORrtqu(gUrV|kixTg$<{o6w}Y z-?wlAuC?)WKVDSYC_h_|8?$*cHMyidn+XzIIe7k-B*k?-gKECaS?wFQG!n z4`*wld-&U-m^kq?ol>DH+b+9#+dSK|Qa|9CFe%nR1l;B{kc~eL_?Y_JVqbNN{9-<* zR2mMHuF8CMX^b*|DkaN;|6`{7`2acM64gKQHW{)h%xQ8o8Y{>CnXaj|6={a2ks{4u zrt^NykhGPs^3kX@roPHfgnMvi6@1Z5`Otbo_jaN8Ip_w>+^#PFem`OG)u+gY^+j5D zJ$-VoQFTnpOxyRDp4m}@|qJ#B+?9F(sPXw=w)zqK5t}y+mnf|d!2?4|L}AZ$8p;# zCrC39ZvpXsxf!!geQ5u`=ZxOaEz%T@R7AzZfjb-yeEpP;jF@xcxIEhCodB!5tUV2Q%bIS>U~cG zFP;`ajfhPa5epPg)t9l#Ez8H^+3!xeL9zP(=Lm6+L{TL-?hFLy?3>h^NlMXwUha*( ztS*E?7FyPf;RZ~$b{h9+9E!$cmWU3?Y8}SIjmEXWhb=$OtS?NELAA1k+4EkoiQ7oL zjl9%K1qaAtO6?4_N=&JI-xo31glo0ulgTTA_l-pi}x=JoBSx0K!1 z!k4AL_2C5z^r|~M5BM|url96H zbnJj*9;0p2z^C`A374=)iQe&eb0&h2c@6H!WSp-*(Wn*4D;uZ}TuYAFI?zenh_xrx zUL^cM0Cbg8BYUpE1U>V%&9M9N3&F0Nx`@9+egNAgzrs)=F+GFAQehsfPU6wCYL6SUli1Y?gV}PU#Rlngdt`WO zSKj;q1=Z}~Gr4pRTVO?X{EpWy)wW*^{jgfD$Tty8q}}<|jwhvG>?hCsTggH9Ir~;z zt&#cGLo{U2&HX0FeT>?nVf#)Pe{;r_4&Rk{7!NgYL-1jAWa{Gl64~EIc;3BiB%W=X zTz+pAG)aHOrhBz_aOGvEw>>bg)t*q@FdmDvB=-k???VNF&16HhAA~U4kv9JWhdW|p zR!aa*94*2FLkbJ&ZWc_W)zpR3C}n}$W;Kp$|7B`q73>s8e#X!kIql_wT5T2ngcsG< zY-ZU}mPAaBC%CFpl9gS6EDK{_bh3swL7mW?**=oSV3_k?94ANIE(EL5d96v0JAK8j}v@ zD&ygJI*Nb5GgJ67K#DxgSL=qHeRNn3VzY)wJ8xbXQ0S|L?jHzrwd}J(@`K5<<_#v= zVmVEtclPa;=+~JJf=shO>OPU2T#p}#Hr`j!(nEM)KAhs4WI}WNp^ebgu_vY%Yl~?+Fke>pZN^?|IWh842BARcv(YB zDvGXE9(Ty%^fCQv?QuE!U03%yIq-83f)U5xKV@-HGfetE01q`&jz9!ws$tgQfG{Vx zS0DTbRak6t_L~?(NTr}mLNF*VSdaJX0wh5?7PZYOkfgV%DF>ut{}vbWXDX+{nR9qv z&Tamv!*S1K7p25L>CeIdowOFvj)>GKy^W)PRy$8MhvyMh^|=V=Q@E0Mx-eqR>mL9B z@d?2ZEu;LQD8)W1;!hD+W$SeDD zLnT!oN@sd1c1={pZ#*pU+;Xk2TL@zAkXhH-f1-k^%- zVqHr^{xHH@3`l)1^XU#)Q(CWJtXQ-}_T!-+-`wGjj@(AF%W1yW42?I(wzdA09Qs8A zuJ}be4AVmMqN{i7Uoq~7yD8?*^!QMeG5J<{UHsf$`ncHl>-3nB@)6UqfaDe;WzNAy ze_jP~#Y*{UwX&@)4T`m&T;3f*!zu3X7IT!=swXQU!bwhGCvhqN0*ba{jsE<~ZD((y zHf2DXF7bkj{0b^seK@)MuDxFYDWg~Le1CxnOI79$+ZljSKhZJFCLa;p|N9~1Kd1`} zP78tNl%U0y3f{Ljyu=?91yDaTi30a9@8I(lSGi{K zwvnX=ltjvbCFeFZF1|qAm?)*gog&}g+W>>zgqe-#;Mh&MA&_I#AabBaQS)Bn0%Vu< zLV$lQ<>Q?ZQ~8m$hz_s}Eyte@%N$+n!4901_hTFgbu7c5;ouk39Leml=2-IWG@`zs z5%k{e={9P;g33d-Ww zAM)*%Jn`kE08{}^g~|i<7cz&CBT3pt6VHk$Z#DtlV9&Hu{)qPodWqJUITP|}sNBgZ z{y5^%iFMX;UOfmW>_BO{HNOMBr|ickKyEIRF|6+jg%Y51^E**&dkeZ>L})9hJ{Lgm z{f#olIq8QxsoombyYBA@_FddfHFPI47ybz{im`K2S4}amqD8iw$RCA|0KY^ov+Y*e z&p;ZHHIU-W@s^kh=bQ&leGqJSbc#BsUFBAHqgj)p&t)*u0D7UQn=2)kOnlYNSPcLjRpSB+76-zy%{SUeT=^<|T zdCHvK%H)1aT_LyXYq`ZuGWTny2$KSf%-0Oz|0cDQ8?rHt%JNlJ=5fB(-ipgfP~h|j z8zb6&zOhjI5`uZI<2@B}|4`rTLF_1`_Rgw>X<`25r~QaeR1}eoSBP;AQnpME^xc35 z0sMvK`S+!Qz3xczZWdhE&S0sqaMtD=S1-d$gvZVLYcQ0&5*1E&!uw!v%?yC6i;?(o z-V9m$tB~^9K4fCI#OJ<~O_2YzGVEIkdYU5;uvK(nlB>-8-I%Qo28H*cE2Q4}uU1xB)-6xM@j+<%!zJ zLmo84(1S52*7iUPiw+7wAhpepMZ(UnU{*|7uME=g%8r{xfPByU?-!+B4cG|!;LZin zFF)0=KVkmW!k-Sh>#viqJfqv-81mKIHPiDiQ+~tY1xue_!gHhMpV^5nPMn#Mn>w(l zO95`%tyDoK8PY4?S<;O1dknu{TG+`3UNBaNjgGIN1QpHoc8j~_o$Cl>ht~(Lm7B&B&R?yPW(x#xEv*6K+c~1hDG&1s2Qkp%2ImeKbwlI{Klv^UmKXC-p@Ac(+<37 zjM`0!$BTJNOy0a1t7@CQ6k(`j+5W0IEF_D;2EGb@O%tO-Tco0w61M5-osR;aC6O>%Ya^c6omFpvFxY82Xevu6?~ZkE3wr z+Dt7qO;MB49U>#obf5#3nuq}QR?E|w&CM%J#?Np_fQT0K{b0Z=Y$D3P3Nqr^u1m%? z^6lHHl_GQEiX_J=3nFp4&kR(nKb_?t<18YX<9CU=aQ^AA{H7>OWq z?0h&8hin^S!a3@F#xe1|&=nV9ZajC})W#{%h3>$O>ggvuIu6{qVAvm1a^FgAINa;% zuF@x$!+~zYPZw<=dsY{A`1J`Y6{%@Kxd}%B4{J93ra?f4^k9{L*#r?!sb~hEx}2;= z{ex^8QwUl!Y{rd}R}bMhYDfFC9@P;eKjQ)JYa+@)!uv$%T?k(&gO1(PumWM560sBA zHD3G#xsqPGUe`P7j$KP8D1TG6l?cmSUwAP@aQl>P{xmn`A<&97tLC$E`282DR*Y}d zeKltL1xhuJh*Z6_G1K%1ZOVma+7n~iG$sVt7m%oSNbKWmRU8X@U=epulKj=rBQQA& zrYnzRS@P2tm8Xu8?A{ylfpHWLSE5pBD~PcdOT{!Pr7UGZzlL^bQ$qTT^fg90qV{(N zKy5uU_SV|6J+H5a9_Bwt5GcVkkh&hVZ$KKO;$fL{zT)j7ZNGl-$;siOh^#h=DO^jB z0uZ7&b0?Ju#<0k-r5q>fMK@rCAkAv#E zf1D-@tO?6tfIpC#$&gnr?j4x zG1zJ?kuHbnWiEOgpO{LldfD}L%O*q^wOaEBv`28b59Yde?XzpCM|+Um1Rk^KftLg0 z`J=}-Z87v_@s<44U1mH_ZW0t=t2@bIEQ8}5! zz-)W?D7~FYc430zpZcxiC)Q~LUu0#2NnO1d@1f>%|bWG*4A9vc{sF_c3^^}bVZMs7+04JJ@d9zqy zN;M>H!1@f;GR&ZT0WvDnODr?%K!%uYp%rIJd#Xxy_Ei*#RP8GQI%xV7-^#5Bqef{9 z!Gy75X%t~5{Xb~j6Y^+DVHaYh+oDiqyt*rVZ)cnw7K6L^a=9f&s6#C^%b5lJv7pSE zwlgncM>+`_DfsRaD0x}v4q8UlXKi|1aA5M4s_BgCA(Zot!?Lf}F5<)JjiBMU=XSn1(8~AQw zh-{$EP*q^;!n14I+e>Nl2M{)1-Vl`dzygRY{Tl6gX<0uvAL3JpF&s^NG|zCp(jfioc(Y0{z~I1?|AE zW=e1c6}KBcJ`)IJ`P;poeU<&*&3KQ35F}D*p#piWcL|T_vlHdN@PVgJr*P+v$*vU{ z%5n>NP4I7rF*!KA+%rdTY8odI=uZoZSU|6KZOD7W|BUu^wBA>uYrNw=zxa$ebalwG zsogH7NuBM?;>SRNM|2qtoKrOmCTCWje~Dd^TY{_2uUCtWhH*~&c(&P>HwK6jnin}H z`slO?3WdBu$7ND_f;3T|o6A5UQogQeE&GGo7uwE+azY(xjO`+1G4ofQbyq^iZfHl2 zbnu&Q7benz@{9D2cbda+#k5zx{+<`EkZhlG_Vv&s;DulSZ~OGK>UCa(&TUWO(BZtV z$w{w$!_Fqie|2A$Syd!=N!9m%fe6hcW8@QuF83{T;!GKZ@-Brd&8-Ks3;6dJcj0-w zV#_xA#{&8tv|7_iX;))|yej)maLl(11p*hy^H0^Iraw6F>ya{azTQjkR0i5h)=kpO zt-%g2=aVZK)AK*xb@Y-b-YnRa#LFLpQnNO>D!gB%uZ9}O2g0l)EA z2D`zGaxEHMW?fHxt3A0jMZLFSPfk?|89tYzSGCMm`|-gu1KBY2ndNk04gGou90Qm- z(Q5*o-wzyLH0);!586}l0b3AtRQTqw=oU3!5t)twx(*R|884hSi0}~8;L_$23h7nY z_ATVgTa5wj92n7LtFSba*0C_tSv&2umc^nkQuj(hoo}K*5vRcZ*vgKPXA)i8w_>A3 z9YIL}zcJ7;VWes#YS-7nWF0eY85=Wd(nrU84Gq0i*G*nymw12K$YI<3_Dp}I6QSM9 zw{sY@c}Z$X7QXWe*CmBkF+K!t;9QQu06)PuOmvhCj2bu^)q;9G?t$N{ z9s6_-LQ#9~hcpXU_dw?eiKRw{DK66Pfos{6qj|io@z9R&1 zj)Z)=Gb74RohM90H93+dvE^=alW#2*`j)f-dN9>M%IdQ z(L?6~V22FbNv^p#Es1@Pb!)R_LmW%4rQSWtv(Arx#&|FJuO^szkW2 zv$lQ5u`@@xTEEkE@W2?ES%PKBPkb>%#)I~7l*Tm z6z6YbKMZD?BjRn*z=-=IgWf*Jq3d{5=3b^}6agGEwkfOGoacyug8QM8oaCG{+gRY5 zu}xNYwnR8Yw9z`3D#Y3`KG{INh2#~vH0bdOuM~T&Lg`vjF8DrO@Yizlt;N`CKZyE~ z6dy9}V3!t_+`fln%JuEQn=@P#H*K$4WAEU%Uu}eN*0Qfqr*EPrCjZUi`bEcd-a-|Co&0M7jKcAz({Yy9~ZP<^k@@F_`qc;^c;&i<^)3< zBKl%f%H}6RYCe~maC%G>j{cF$UV}Wxqi(u5{uIS|Fwx2ipx8UV{?${XhXpM32%I1Y5YVm1!(Mbhm{$&MG3`Y%PAG9DnYp!ts0i% zICWeuft;zg7DsjjAzsN&75MT=4gC{=twsATFlQDx49gq(8bZfJiqn9UfL;B7;mvpI zU`rZ+qkh)xq`3Xm%lzBp7Ji3uPFg=4`~Z{MC!5+`D)j=vQs_2`XZ86SI#j!*6rgW_ z599}uGc3|rekC8Q zIk!1l5#`qlsk)_5XBIZ;F8d{g89X8hB2pA7@qEXFtQ*V~)ukd;lBqp`oUU!etzIam$1kO=Les%v-*GPb5|9$66u(D zqV_|O)d7?G)d7MSH=)`#WdM{`e>GGuyIR@Nt2D%*;1=mmsRb!I&lhQKFi@8761 z4Fa&zi;ao;dQ>KFB&aKpZtk5jjOC?}m~PSaU8m1HRa}P4`UFN}e*kGwJTO3H4j%pW z6n}o>Y%R#45dBLQ$B;16obO_err}cgVXs|cW!u!B^I-@24+=3RXBO1w&~_EnGt^QO zZ%7(TODkox^A{l-mN0PhJD{GiJu2G~{Fno*c(s&Qlb~^Tb=EN>(+iSW7Qy`3egrsx zm&AL!$!LszSiXGzu;$eHS1v>dVuL)B6f_sD*UZU2W+*xbosmFHTqVj(>2jBlAqkMx zk?bm69CP{y1O&K)n~8*4D~H26`{X~ixg4+ZN z-)bvBf;^|pNagP|64^RfDnsp;O3@=wkL%==uEMf^>oBz;mHK=W&$a5$G}+ zEfJ&aQNxa_+_-i>O}F}zn~Hy!F}YFr`fPX)ZxUsI#cIaotg9Kis{k390k^kLeWGHB zzH5~t=74*nD;4W^$=F)!?=9XRre9*+$>bLk|Ou8t8b^!_;4S(hg4XV;^GaY2`d(g^g)LdX54WFsB9Z% z?ol(#=x-s?9_Z3PXQh*ccL$N4gy`hLVeuE0fv*pvkfKi7+mMJo?A86WcUq>)`Gzr) z0+Johm$WzuFDX_^YUF9qUL8wON%<=Ud>!T&Kk$TZoYd1Q)`+&4+F*GMoC$)DaF#GnP;X-fYe zMduv|_5a85&kfGra+fpj%1l=FI3qjzjHs-T>=~gmlg{4KIc1NMWQ0QYu2eE2XCzTZ zN!0K6{{HJPo$LL6y`Qh=^YJX=c*>epGk z`b#to_?_|c*7D5OhuneLaU5?d8}GdBuQLe!@qDZx&A}J&JON+=UFnN;{Y@nz?^OoH z%@8;F<5xf5H*gGA{R+f11G1!1YNG6VL^N^K`r^$uJizq1GyR*o-E-ZTaQEi-v z@DD? zcxBsG*BH=N$Dt|O? z-WC8274p8Vo?>fEboP0>w!X7WD@Re5@cMy&8}CRSM!h##!fH;#x!jrW7k*I>r!siV zZl)zFAKccv&>WYovM`q~B&Xfq57|DaIQn0lMRMwv#*kKP?M6tV^P}loV0$dY0Q<#z zsW1Ar=HIU!e4lzUNh-o@;!v2UE_#Sh`>eoMRZ;2BDTggT*rOneB3|r+p(Skg4B}J6 z*&;cXr!8@5KW)vH2D=qoLU&?L7OI zD+{VP!7D0YQ0Fof$;V_cmar8fcuzhaS``1pC;jOqXr-1whF4dY!w3CiHa;{d%2t~C zBOt-311f{xa>1yoCjZ)&+30AvE$Oc6raQmf z-i;hEsxrp)b7mPhsL+lU*Lk4cqg>ug4>5yXb|FIQJ0*aCP`d(|Lz5=?%+VJ9i0Ja< z&42%e79j3Xskr=8q}rw-D_!X`@obFX$wS}kkIyuyO$hvo+A#XI^Tf`Q0lcD4@EAYB zEj=>STl{pl6>c|ct{y}I&FQryTsSM~I1jV={-KK@1GU6M=wBBkY*yOQDDmamPTX2t z{wwYhpiJ)V`p=Op8qS%-vmKWN2TlL6L?-=S7j|D2wlP)@#^!7H&jN?nVca1JS{L}2 zo2l#g%718SYY+%N8;hYOCBjRwVc2X1+1?g~4}iR&YcgQ03X*B+u|(#w37vtPw9_PB zK52ReSo^Sm1aEYuO~Khk0anOl@D@v86iss@RY7dzwhHJi#i4H%ve_;d+iqjXO9!CI z`4Pj{F|`#In@l+T;54uwN5}=_XvQ}8N0-PfYp48~f@c&dP>zGhZy-Yh?VD&6@YQf* zU^?B1l8CkYq|R+x4!GDZjqp|K41sh+3mMBjgv}|20!9Y?1LaB^&f+LejRId_s~{cC z6~+2c!`i4GB^eAGSX?KtX=Jq`+9z#{s7W-jY3|Yyk`Q<1!d}Af@S4vSMaHN{H+W!d zdn05I?CbWXz>onTC!SOKd>fhmmsK$zffJJ5q%=c`FBq)(R4@WT7SM6u3wrcFrZ`cCKwHx4#H*HB=QiTEJ&zIJg$W0mQ~ zAc6-5*NWXy!3JOZeQBc`>^$%jc7qG$VkE zose1OyogbUIj~;iOme;iS=dT}ViivLYGG`q>Yz`Fb95m=77X^+N2Ma>7UAW0$|ciC z#$f5e%Ic_BanGx^xbjx$Aj^FE4EXK`Hqr(h|f=QJZif!-~CM}<||e~jVK z@SORoovNc*eP_uMs3R7nC&_HcSDnf@_xSpq{UP*;LoCT?GmF3|b)GR^wBJN)*YKs! zB^`~rC!&IQfs8BQUNt7hd&GzD&6e|7b@N_Xo5Wq)AV6*LX0?!Vn6!AZqecZ|bKYSH z1pow`Qr2~(#WzYjyK0ZAWO+7bkGM}HiAA~)!CfO~3M0>>IKTJjTq;)V+$j%8I1Ky*g;F*?3p);!H@LfbtX+3m4ojQmi7Qc$Ac~+XqWs>DcdXO zMlSV>3b3Dk^E5<_1v+36WDnb91KTWI#@Z}+6Ik9aK6eI}@42+J&hy2eNw4V~XBk#W zRT0|HHK=HEII;>{3&OCjrh^gLYF59zAS*X)AvLKi@as{jtA;z!wwAN-NS;htZHXFv zC+d7V1J)n21lITbtua$Ul=eE9jmQM?>C6vrn4akWpi#5Ej3|NWCq$Mp)qcqFE|N{j zom#zZ5>NK%5fcD-2`?xs{Zmzi1@=j@!(MJ6ebC!aB+)h-&Me!j|AX@E^DdRM-M&Vo z1Fv+))~or@Gz~*XC8i+y9R9uR5dT1JwD5RgC9X}sBTRrIAlTSyv2bx+&lkV_Bs7m zpkSxicFdP;K7kSi#Be9md_EX2->B=%zG(d&J4E{jmH82rwKqabv`3@Wx~%>1dAxUC zYT})8o6@l~(qE946%uillE}=NF8LQW_C!uL(9vAEReZP<#-PcQ+zM$Y9OzZ!Ht z`J%B%nB#e{mP^PYP{N-*)$!j{C`)?rHB3CVSx@MG08AsvEo5`X5>fbUdMxGl`NN19 zwYOgJd@;kfPMW4OgRVy<)3@#TE6u%ubL_0!l1iZ$p7?nMf5Zp@kUe8V&q?2LazQZl z?3)U4Bdh1;+B(hvHWUXeem zYILZ7&z4qAh41vq2h12}dZ{c~e<$XmCmbHH6PT|H75L7x_=^eVVcV7J)x9#sVeYMP zWXrn$lzHU(@p6URYbODcKk(%-x}rwHk!i>7v)N0DEmtRSSS(;{1bQmeXP+=D%?bFC zp=V8?ev*GYFAiRNO=mM*LS0a&5(KG4zeIIkUoRDYAlz!mjby5ICX6A%Z!$d&E9`9S zmiL>~1eXQ>L2X&A-R0Zw%i0KE`bnJWgkcEqnyWHBqF=|$rF|4ia=f&P2&M|Qfj31^ zZXmAP|Dv%f&u=9CLVRwx8{@~Uw-wO2f>ExT6+zX?1*cP;<<*`nov=Av1H@rqV#dG9 zC1&+OWz*Pq+wU~p6H#Hn@`MR=!rr4x_M}#_Rb3>V?(|Kph)dsQiUPzs($36x1E2Oj8bbUz;33Cr9FkROIRKK( zWSe(;lqh_5tn1I)6;9KDc7A_sNycZO%{YC&=dSg$&`Sj?Tm49Ra@|lg`YS>Nm98G+ z4gcDh|6$TCp`A=lVEJp@aQw381G$>B1_2bxKG5I23y;?10#LrZvG zXzQ*YAV_v)?9>d&HI107FuekO0(0uaa`vy5TG$P$&s^nWQO1VP5je-nyskyk-S+pk zvpUd!6m-B5cI*w4b@SV8R;rtel%D<^c8$UAW>qG4Pr4Q(6>&slDiu95qCOI+E!lw2 zHs4fe+7>9NfW#L>x0znJwT|8HlGPlGa4JFYw_q%O9Hm0pzm)t6IK}bU62S`>?Vaug zb+Q?tZ?2J~^pC2t7aFmGox75@ccp}b+gu>9tv1B_v9KxRqG3-iq@QL+C&EcrTb0Ft z*Wc`uTZ#Bw2YDZBy>PrN8`SD81cH>hg8}Uk*cK5gx%*;@A64-yaCJ`-F2e;F4}vW8Sw1ervzt#tgYz(i zw1IrgHJv^x6uAyeZU@VY>c;Dh)v+GX7JqsmSdYMMx+kJ2H?8l$knRbTgbGN1wS{c_ zAzeF>m|~a%^J4Armp@4ol}d!NKhcU|&Tbl|_tMu$Tn29ay4lpyfvh%F=XR-dz|Qi> zYFVTE+sG!t+vjSqdZlKP2MUm8TXmmAvgMsNN%HHrh06vJSXKn;Kca;rw#=l1l{>i9 z)~i5U?9TC+-m1oT#Ju0>t>a50HQ~6%Z=t^FV^H&{QCouN-K8`KgFz=05YQ0=YMY+qn zW7aWvx4gN?0k)_=03>1QKNrs%RKh(Wfp;4#Gup@uAy2`{EF1)y>RLlQi+ZbEbU1)o zots#$%4y5dlMr8&Jp+iu?&|M&vf|R)%3euwXFsfrN=St+bb90r4KQw6ur+i{v)!O6 zLiI-^-0GwTi#3+zc*IFS6O8F36%# zxIV@tI`?f#w%1zftBkjXkyjgyz-9crqq>Q+eXbDE-IsDfWOFS%q90iw--wB%T-W-$g~4s^n5=N&nL!vS61FdFk%zN_)7yMRRJ3F zSK8>V&{@G$$)}NIc@H=C$0kr%i))8H(XS&;Fuwd@AG_d^8B&B>Kd=f-T;{f;3A)p?^FNMKDGFHd@95)lOz}7d9vb( z$y&LikBd#M4QoJ9CUBXr&o(D4YpN9JBtX>V(4QLwIy)Qgsv|VV8Pz3(U~eyjisfjsxAdVAJU5|@S9FU4tb+{e@A_Q*IuR?HV` zDHtcJ!&lCkeZ3dGy5u+P3?!^{{sFFREr%h2MxE-FVQ z)uG}r*h1*EnvkV8d|}!&u$K@Xp2;R3a4w03Q6GIrwbUh4^zTWTS5(OD-fPYv_m2CD*AY^iv( zWYuW<^Q`?!PNoyPZhQQ{??!{#?`LO4d89e{GpkjF(&XGzl|1!^zD7+ULo~7l`7ujD zZyGi}yJUT2TkhVWF>~ zUjDl9s#M+*JRK{&w{eb6;#BU_vAG0yi>4m*Rc?Kg_(&`e?APf$m?cC|Otpa$ceqSd zQNkSIv=@NTUL^{DVIAq8pnlu=kL{BYf0J?Mfo$0Av?C!aKuOs8EG@vfo!V(Rjk|@k#Y^1>@p7F8mY*({GIRsAXwK#`0rg?WU)niuE0K}l`oI7ma~%?I8wPvT9@Pz(gO?lex^rg4^=F!y&_%w<0%Ab#VhE5iV#)GwtOl7+0v`J z-;gJJKlKgQ&Fm0B66{OxNOcXPVCnqi21%@v+o8az{ z%#D%l1F^XLiR|zGYb>c;bxdQ)CO@y*~;t7oR)Du^rklf#Zn$;!*G&}aFUX^3VJKc zTK6%Q{7O>mi*p1O7Vk8lLUF+)2fj+KQ_AY=D5CYu8w~@S%Com8ydv#*E-NVur~2A^ zJCfbb4XPJKfvH`X;)s628R_D|8+-9Sw?xbRJBJwra}a_Wi5IqUxlOW0Vk1A=9v<~da+b+3-f+6A|N*}COt|m{Jz@ye#BtvaZ)yG~Bz=_;Z0Y`8s z!~5~paF^Vr11TG)<9iR0nQ3{G$W-Wd=;bJ9qqO!?x5n>V)-M=BSrEyRh82LKn&)|S zPdnV)$hEu1EhRB;Y%XqK3PMwvl4Q&22makSU7lr@)#o^pzH}0(COpkKvN)7a$50pGcY{1A5Eq_CL5|&zq%!F;r zdS}rP(j||9i1u1OcV2X4zelC2Hd*&`I5z=VTX{lSq1mhSV!}!=8cQh;1#-t~!WdX9N1;cVguR9B)*vM&7&kWw47P9vJc|bzP;K<_yK3oFuT;pl z>fq5MdE2GvE=eVq`iV|*&^V-ao>j(Fu!Q#VJ&AI$PYfUw>L%QOm~lC|xYj9K2QdW% zKPC>WyMSDJuADi{OyUdZ(CDB6Kqj7$M45a{h%7P9TKs zP!THK0tyy)qPrQLa}NGH{Y>jp9^8$dJSrDZI7st||B!R|=J80-GlxJr=(2T9^Me)G z5TfDq*2eu*0Bw!-NSZK(4VpBK+08{A_hq&z!l1}gw~B6bql$A)kWNl<>A<{Ece3hF z)7BriEXP@w@|Dt6@Gk5%w6*J{{tr6+z{?<>NY^&Nj|H9^H?ztQ&;j1|xsCE1uU>Q^ zKS6tfBy;@$c3eG(7rHn5)a?edN7%Mp#(R>?c^33M=5M4bYNw6^Z`JsB{$57({~&Oj z)DEthhfIDZjyARRnH_F|WntNk!6TUuP|bV@@FWd=j^lsf76Kfo%(BkdDuJ3boTw}Y zC@*$jujwYM5HStRYhJ0JZgl$%C*%O*X}dL%tkWu3ZGqTuy2vfmG|V5^4L=V#HNT~` zc5t8X7GAme{`~&qbKmA(Y`7@wE5fAAdUET_-80_gUW$wVW;_@LxzFp)H~UoWjvKXc zN^r7-QR$*p9ohMB@G6>d@rkr7rI$>>O~pf{Q482N&c|;C7*#o^kc6hk2)dX*slja; zO0!V-FKZYPH~m9mTpdnI`&6X}Q&)?0v@|_>Gg}jBn>F`_BvfreBwE4;tHqvKv)*r0 z!L>F(3K3V%-M}Rwic?BjcsL3Nfum5=PBTL8i(!Mt*#e=7?wg<#N3$HJlqB4$Dq19+ zL^N1p1JY^!Yq6cjMIyj+eQ>VI$JHqk_k~^8(1yU{xk3u&dwGsQ=F40lPbd#evwl}a z%WeD5jtz-KRsuer5CVsM%MTu}nI96LJ&2x2m#_mW%1M?TcS59P|Ic&;!;g7*QXxfd zW(wJI{^~W<`GVD}+PyK?9v)c9D~}EDu7Cj>LM)$l#1zZ?`&dpQDIv0sHGWd*j?`+o zaAL0OVyQ$|tp8~1%>&Su&EI(@;}aZZlaLx+!zJ?;{K8mE_}sbnOg{Gyzy%UjCCA7a z-?%@_12t}^DzRvVt(?q${nX9g}@u2->60`uXUWk2s10u&YAY4Y$@j3?ky(h&bv!6 z<4X1!&JxS3aw{FjuSh7U1Q=rDq&f8B`*-K~;y%>WRJ5~PmWr5t?S2Z7aiBTB>IaVa zVP~4A!shuq6nR&%G3OPf5U`PL#u-QVp%Quo_eUFtV_-~5{U`T6kp#lfxyiFkrMZ87 zN&=0!=>5iaN~$kM0*&|uvbHrxqqx=qk{qkW64K90c?Ig%4CrY>a&e1^c zQdC6cxyEexh%UzsBv5t@grhSZ<0cUmSMJdXVYAEG2S_4KrvO_BFX{qUBhtmxB72@v z+QEaWZ*@h0CntMx!OCW40w0z(w>F{km4V7AY}e~S=lg4a{3PXy&*KI1%2EVq5+8cg zYm5ea(vED^(VF>*a!6rn3L&iSfTQ=qmXs_vP_ABumY_g!xN-WMkyPca;a{~oCY~7w zDOMkVA}QU(6C%hkl4JQQ54X0JK>0U7k{b4nUXEyWrrfAIk%l%nbfzSwP~jU{OaX++ zzayp+=8Qq)uwPVWib?%9qeG^Zb7~?-1->-lf1xnmcDaGe27BmWHuZPL!!2SiXM!X~ zTVlUZnbua@@0^wo=Wj>sR6Ay^oc^rj_r*d2wDp)>)V!BWZ_giB&LwUk=b-lZg*a?y z3>&>euNgQP=IKWs$4%{VcA*Mw+arnk)84i`=o>NH4JW=P9tMcfuuWD?r=()zQD0 zdi5g!${Rv}qO^_Iq8E#BXh%ks(+^v8c$=4&KL3}ptHdOZOucZB zh!NYzOyy_AG`*WMO(s$ddS$!Wa$(vZAzg>$9I9A_e#%MR+hUS2?L5%bfW9pjb`Zz2 z6kU_8%$jKcAcm4zV)vLK*^kyy>+kcouZUjyS|xA7_-mQO*xoig4_l#;-*!G@apb*S zJprZ_IX-0HjGLP5{~(QbgfwYyY@RR2t53ib_BylUz)z@Gt%P{zA(#F`IvaG)(iHq+ zqXM2R*NmQt`13A)QIygxcmeWKOvs-Z#8YCsYOKh$*5@Gcbq#Rpm2nPXUY$VX(fn{v z<>l-624jn;j6s;ow$)4Q4T@zm(1e~~*B7|br6s9gRTY39CyDyXd{d(#LyDp6Ds;Js zxbDFQ-9AvRYVuQ4Z3-E@%kY&R+E={GR40W*X$Wflgf!^x=t+Ktsfv~SVG>}53@1N) zw?yzf0fVLs%aL1x?0g~4=Lht~8)nw;?xygv$|R{|`)mn0Oh6o_zcQE|Y+<1*ktPxR zs8s6ge`ZVM`*okrzFkACd#R2U*Qr}sZG&RK3m@x+)}W)MULRs=wComS$4jD~&%ncqPm%llV@>@juCw@B3M)q>ux+4I*)3JkZv+pl3Z=iNWH*4od$D2j7#> zpzq#Lr&xdT-m|+{EDLz&i>ZjD2W2J96}vh&o+^9-0N~BAlZ}`q+Vn|g_IpgWe{b|d zCf)Hmc)|Z<)L?qks~N({NG2$7F5Y56)6hAr3VW3#iE?|93k|^acFR09pZvYnk@DG4 z>vp;~B%{`*{aw%-95#JGuent8g~$EL%9NJ#$~KC+39e{?_+SRi{-jCpRR<5!ty!Sc zz8Y7t)R~}JKRl7F_sV_o2NijxqZ>V8PNz9t=Z8y}7P<%pBZm%kv0g(cfxFqs z#P!~_?~?%yvfN#*oXtQI$ZoW(uv8XEI;W>B9Vs9VUu~Ru^u0uUpLWh%iLFhXnG*X; zj&S0~spgLdg(2heXWA_dwYmbm5$5941}JF^kz7Mv!^2 zD6hLU+Z+CQN+FfeujFooWxbQ|5&K1_HP0Ju>bhR@#So&&9j$F;@>ZZ7mo~E$Mnw&b zbvXp(m=iEYFk4P?D3$RnBWFL?lB>A0lwPa&6kEw>2e_&a-1>LCocXI4c^4dyma8UX_jP=`hr7{J`Lc_g<{bJa2Glv)S z76(s2nH|?h`#@4?&xAHp4pmBl5v2p(OK+76R(n&+BDH>=tgGg#!vFoeLEyKBLPIlm z3G)o~w=@fbVfz4xrp%2`dS`_H6J&BwZ&_D!S=Q(~xJlhS@lT*X?p?mMcR~~Gy5(N_ zf21n&rjIB}mpIJP9f7}DTzw$V z(I&3iTXPXARxLEDJDVf3t+4T%_@J#_yqg!5;Z9)B%p9pmR+zDUa@7S>#`%Q$hJIQQqn*{C}Pg2$^ ziH%QL0#hqM?V#(X7&Eui>2&jQCJmub>4u+lSC0*nj`#>0KirXl~HqwJmWu299f{WR$v@=3iRoz^{cKyUo%xD3Ae|U|=izy18 zxPij{tdbfMf!)hlddq#0?D_6il+InBgx)QY8~0qWNdn`LH-61RMwKQvW?=MfdWOAt!tB%~3((gq z_|=40HVH5I3@eEV!qH#q()5P?4{A@B)yQ+X^<2)z;Nj>T+P?^*gb=uN*YpK+TnDbb zt_#RQyZ9}EJN}IU{hQ#MAy&=1INHsing0gZk+Hc#dc>#VbY#+pz3jv8M*a-i`*GuvlY0%R8WZdI=QV4 zJpTBd`O-#ZHs;qDV!W~J5ZoW5 zsbIT|nppJz;x1OUv`mr_s1saB9lNP4>{d76FqT8&_8#TcaIvH-R9KH5Gx#4==uYu@ zx27zoeRFgWWO}cPh$Ne`{8OpyOO^^{er7azITy-m4o+*wuM6+fmv*dM_R|B+Fgg5H zn5vlfvA0}5ixXn`Li;etLY2Kk3v?uu-aFll4_K(_?h&>`n(lraiwGR!ORXgyci#ma z0#_^=AbeImJ;6C(t~H$?Hd_RTiFW#to(s`5Avl6!|wCTW!@g`EbVjc&!>=j zeo3>h`2F>ETdAk-MJZjPGv_I3K}B2Ar74P}{W1t&@&7YD@$tEokoxls6UcqIeSy>5 z@r9RY-B@2d@1a!>mG* zjuDoYKXtUg^tKf$m0(lf8}@~!kK?KUoxI7enpg15`l z%S&wSEn3h;6`28(=7nJ7^{}j;yHDyCV?w>p=P%DiIWs`7Bpf6jrDZ*d!UFAL^m#v4Gk4a- zhxg&6)Bo`({(k8BV;dp9=wV&O?-T}ERB=d~)5EvbV-#$&92*v1lJ1vtzbA2tHWGH_ zYl9(NmsPW68z70+_vii(3hM#`YgIH$Rq*GOK|*g{h2`BRFiB`IgutknNix%xiPUgM zZJ0Tc^2wR2AL<0xK>(Sw8XNQCA!gjjz?EZ#1BC=Tik{$+jpV~S?i=ZJtU;plcRqwW zF>U?@F8khBl11=F>&QW?ZAmI5xz~Y924OF5>JrI#x$s+AJZPbe7Ho#G4%?@k{udFh z5og_fQr-2*kHL83HIi7|Rk^aPjD_Vg{`1RP-SRW6i#*U_*5$z8L}8co_J?EkF!>JK zct!zjP_UKGha~eMGFkB{o^dc0-w0q4R}OYF^yhL-%j0BT@hdL6#K)rMiQXw zfd)^%%QLVSYA|~J}p*C7d5-AGAF$zUuHnvK?#y%dD_DwJR&h zlUEu{CFTM;q{y7vz*cgLbIY$RMU(wykhOUCd?#WGMqq^J7aK6mv&lxiWqN8phnMVL zDqC_RNgFus?H+f#E3SQka#{gzQ(hPuKl)ZM`QiNFnluxbqR{F@f^dgmMajI*O>Dig+6@VcD=TD%wFUdx2Sft3W9Cv4t40)h>W*(wzM}=W$&JfED zP?E8}L4e=?I)pLyU7xb%k%})5RvFY@v4ofK&J)8Aq{!CEG$sR8v_EKq=p-eGmG#SE zT1(iDkERgsNb?r>J(7p%{j7B z0bK-kv_rRxMfh4yh_Gh<}BJ5S{ zkf-mrC&M_vHg{ zs)X(4o7yCo@Xz)c4UmP}4>Wxb0{ZJsPBc(a-DVu7(q;A8QQ0sP;qM&sQn-e?SAH0x z0p;U0mjUYG-e4DA4i?A&`Ax`yVPWwT+MUzjW7dGmH_1-dvQ|xvfh8D$qYg4bayapRELqYil6sgRP;;kx4M z273&>OUlGaJp#-0GO=9+38%9qOPVFYdQ?~dR`P^r+QlzVA1TAjqyAXogAK943gV7{ zM8*SiRK{+(S!nz@vboTDVGvmgOp*EwCbg_x$srd%37it?ENZH-M|>j5Ka_OHjNQS9 zOrCpql_Sg-<&uy+K*H-LmV|v{tdcI|SLUR!sRE*@)we_2^;V7PYi$BO9?n7gf2Qbv zf^f?nwdy6mepiVUL4@1@@G2JG*NtzpRP#tpcdMPd!(stsLFPfO7vD=M7Ag$_ir-V$ zReSmUeXRBr5C#@$Lu!vpqzmvZ}_h#a;>fBCot(KOuZOgYNkT| zx9y~c<1h$FCz51|T8R$RQuZ*w5^pEMV(To^3%92x4vPARVoAE(L@nBYb*7qy>Q z+xq%W{{^AeA0XHh!f~kO5>unc1**^iJ78F>AbD~$op6rfp7-CGA&ui{=!<~IsR-X# zDz|yy#w&xelzs3g8Pz9@aOfuV>szri?Uoz>dYeY|d&1c2NXedg`*@E{Te>`e?)O5c z7T07fkO=F-puOP17wkTeH`}emB-|=~>iY+E*o4Z;6hiAalABB-RGG4P6iD+12LV#x zlN9CvRb5%ONO-x7ZFF5b>Xr{wEL4)`p0+5TWo^PFlDYfh0f`@(Ld+%IOphXNUQ2pc zk|ihnSP0BK4{!{ZN6lCNqo|G9^z!M#O5(5?H2%%2REuF2*@d;lr@?59S`Ia}srBix?AjbBtoB`KNx z9^aVR$HC*HNPK$xG6^a7ZIOe>rt11Wfx899G`j5#F(`ACgAAt_*`~y>xp2sH(wu6E zK$jC(wxjoQ`VDO7heQEO2UMQ6N@087h%(oL^ieO>x#=@Wcqgr`A3FUGJr7w12hHX? zETyux)3NK!k{DoT^WgHb22v)gdMmIt!E#H4V`5g400B{PWHnN^|pnOhpg|yaZE?%I`Cu9V4uqafsB<52BQ+t?yA0 zh;J@A-C3ZrMbc-;^4q1mxcF}lyL6DW(uU;WI?KZv?fwfXx~DypHB&<}#o>Xg=tiMl z3!1T)s-we*&5#{krpB=_?WRGo;!^uEzm~X+i#?hS2LCwe-VYag` zLY|pqQ!SA;t=kNU60e-EKGm0YI1Ijp;s7S=xhb}c)N|Q*T>8l3yC?*pOu)QgpErzq zf`dxmKgd2uqQA!W?MEH>w~pgWLm39!htFx&ToX|mTUmX@I#@KkbgwzxwND4lH7*Ptm*2fY8#TlfFLEA zbaI!yh4r<^fRWvV8nH|`Uv*l7e=0BvdlH%;8+(3zeIa`thPlBT!HMn(L*Vd_&8Fy# zJRC@&djbV5-~9_4(9Vb1Jo$E4q=Ikz0=1Dn-S$OuI7(`B`Ued)&Pt|*2;|8<=dpj*}v^Zrb95-*9!+I~#nHxO&-^Ra9{5H$V{j;p z<;`$v;!HjGE}qAoS4?JfWOVOFWnRnX4TWJC%~jOBVSdZ{OsfRs&R(eY#h9KnTQXNj z$>UurQ+U(xx&r;-)z6u~4(jUoBdOdZ)S)7jdE$;vL2gzfb!RfFPVVU3;AjFyqZ-6c z9B=1!;pbJEbEf~;C?p=0MX=p&fRx;eV6zpCa&J+o`uc6bobg6%juBpVO*4oGE_z;?<^ ze~4K<=7R=Jpe70J+7S+pW8FuOVL8ubano=8okAJz2S*`5KF8=)yqe!Sru>z#9pj=v z)DD5`yb7m~^woCS6wxk|Z^UCM2VAk{t?UV1fxy2y$}wl^anlmP+-P0yFYqJCd|>Dm zj1qNo3Z9n^V(aaHO5Pddc^n$S%n67tNHef|C!?1lVyQ#k`CZy;)b)q zgxCnd-&Dw?HtC01V*IPe&t%ox*)!qsk(=*HJY#=(ccVb5SM{AsgEw2)!5AuoKH({H z!)g*!Bb?+BYtC!nAQLx(}mL@A|V38R1&Z#UY9LAHOFrznD%k z6-wx)GM>aQcFp0Sy`J*fTEG&|nFs-kf^)s3hcyEX2jv>kscndK@L`xQcRcMu4>|u+ z>~b`PB@B2&6KW+MT#;o)E<)0(*f;{?TGk-s)xJUSbm7(54(`_`a5zlDJAJ|hQl6mvSQ=78GQozO;y z=?;j30>)r?8g8oCGA_mc@2!QY|~JIMZUApbGd%et=%Eq+GY<} z1ewaLeddeH+qlTNwXo(bJjPAxtu7 zLydw|AGb@kEKj4##Dxp~_lzThd2jv!IbLY~lD}&qk3W&xK^F}1Kj)sYgmFp+HM`en zes$zqWk1$rN*IT521@XTxVh7bm%4%|+JE+;CL~5!PIFjO(lkhv^$VY5)USTXd!f$4 zU(DQrd~!UMFWZtxV$30&kZzq|FP>3fms*(kyP}wd-PE%PTn!_Y3gTpTg1L`E2v_NX!c(u3eJX3aK&j4rXLcS* zNCItd%@f!TGI~O%iOQ|!^I?^@g z(27B=+!W>h1xQEmqEduwN26jBpHwccN_d%cvi!+xt+D?*yr%;eDwjLz2H8;V!_=yr zw1uY^vY6PF1H}y&E-zk$$Xq5(Ke29<)1fYd=*u&J^m^ycDFfPXPThx3)An#|_bxyV zi&NK!9osC7_O$ZGRKLHu^RxLx2dU2Of{2@Jy^mW zsO`YHd_^%Xo88>-V!@=S_VY`i#}C4jE79Nlr;XPtwE{-en0WoQ|>?%GLG zo!%Yx`9KI&_ziMLU%mGXf|{km)+-qQ6jX&pmda zuGew@BP!>jOm<4%(CoJb{zfVTqewwyx2nqEIcbakLDEBXIui=O9V04DZDGJAfNrc- zsdk4)H!YDj@)hoPXH9$2-)^@ST*#*}8`Rh%E$%UgPW7r@j# zq!>I6F5kLQbvW&hlO8LlVX;8f;Rw4hPs2T_Ia5?T&4Nwn`HAa-%szT|KAG5?d?szx z=b68(x0^J#{c6e*en~OyIx?lrUW~ECEwe^{PY4E-Ko{}M z^;0i`4GX(iA{Hg((~UA$LpC-npVpf@X=+)rcR@Ild(+PP7`5n5p$0qeo}#iutQStK z0aDRxUb90cbz9z?9&YT%CyGnTnnX84A1=HemJYfdo%Tx8yPfe|WB4gztjkBBQt+>~ zhAJdyYj_x;q_%xVob+&K#$>LPY}@qwNrftIhMel`*GSvp%vawzv~A5d_Ny{@UBfOn zZ@dzh3e{4af5au(3uu1DTVvt%9Wiz!{@s?(dm;bM!!K2*($4a;aDHfju2Afa&5XhW zTtZqkeY|^)F7?Wxo&5%ZVKZx{4MEotRD8l1273);8ac2IF~FuR?HrIMSRJ2Pk12Du z!5<N)4C?R2eQ zK|t)VFFqfS<5x0e$y9qY_gR?EDn4B0I{{TlH=u*Ql~?fRE&j?vX4!LqOrE}zOblGw z>&RwhWG_lSRG`>W035%$ZdY`m6AGcg-$nLGiC^1($M>iopv^OfeYQ(IQ@x121+FAI z~vSzI6Frh4gxdtBR3&2}msU1A5GkQme|AbCA`#&Dv; zWE+?QA9G1hCNMXm4Tx9F~kvJ>o+2cIR(stn*`RmxanlU_~NqLqp!Rc6=A2D zJ{N^&n=2huG7Qkx+H8AJsZdGeO@sL-gZ{bnz0p-J{?sW1lW}H5boR3e_>Qq zb(;N}7;8857DKRDNRPyGf8Qi9^M&DBAxiT@EUhjo74q*j63Y)TNp~)RWm>>Vm}gHL ztB=_)3I)fTJoyfMUe~4l@8$0Nd%bx<8m1oW8v%^bkibWEBh_JCmx^GK%KGXc7^UGs z_p3CYz9~+MfY95{C0;`JArV!9$R4yJE_*4EfWm{g&d=^4C@@m%|LnbaJk0i?jAW}sl!&M(OLkd{86=TiB-cnJ_RF+#KbJ%5VNmGE3 zp(7*YkOZ^m`OKvA^LsSEGw`G(n-$Yhf!3wX_Egfgl2|6%vg-VmpnEtuy?Z8;{29Co zYgp!oH839cY0uMYIu40@iNaL9eyhJfA0o=Uzo&u=2PBg2?O#z#f zvmr!o(9@oNF;AG$-p|3iE$e%?4t_Puo1l!P39Px;4R7oZ+P&#C-v8s>p$%ZI*1q#H zN3i**TpF7a@S52Gh3knv-gIq!-;5YRUy1O^6G5M=Ch;g4?Z#q-7a|uzEzXMCYf)!| z@=m|@Pz;g%xI0gD3l*Kb>wsX=Ivk;@Z==Aq`BS03e^@}2RayqvTwO;$hgvP{*gL8z z-PG6#f%WoKctDX&4>WF7rn1Ub%3}NhTpE4DA95MXnNo|@O!9gvAx@33e#?Uy>tKbe>&Z0gWKHXKZ z19n>{NIm>31e@vGNXEjATmQCu|7eg~w_4BCvB#@GaWv-1MUp}q0%QuLGk?Ty`*Zgxj$t5U^>gC4kp^(P&)JGot$W_2x#`kbtPZ$;a$E>dhOTe@&qpV^E+WV%an=@1Q_D4u~Q1Pv8IGkrDJ+RdFYd9oz=A(p?>!k{tiqD7v_BF13dCe z(xv4Z6T~9%B7d~>!+v@iGlVuwsAsIdxzE+}r^2kvfT`nshtlBcPQ0MI4T1Lc_)0kO z!a^~3$o&>;ZK@wGlJe&j0@AJxI(-nT3poT})rVnqh=g^#cb!Q!1=EJ!_ejMvu_)yq>-NnzP(;MU!OL}CTuqZC$nrAYaq77xg4>9ohVIFaLoMu8UG0@HS6hc>)ZtA z7LRr~qdDkRaY#=?k3;`~KAtaFU`^+wI$e2P{GUXn{pFEl$25swjJ%qP?b*E7{w|~b!9qpE5WHwC=IaI7f*14CVrRqocA(YKOQ<695TW0w};zZ zhZ#g&jtP5~a=%>D0J2pp9H57DiB_@0E)Hbn_xeXP1tbgPl1BnHZ=%Oj9Ag@DGFWAwXDEH}R829_zB8}Xy+ zr8Q&W)3*FGjWOSgcMS5Xk`B#>*qQ~$olLG2bOj|JgYU>*P5QYlzFGnd3t8=VHFs1{ zj{Z}8!Ix89N2E;;8qyRUz8r(d^X^#FQE}vulo+ARC?=;uI^nVMK0G|P)1K+4|Tezlichn*#Dq4ch(*l;!e7ES`ZIr!2-Nwg=`G6BqZ=M9fQJ^RwQ8Jg9 z2={!asB=14w(17L@!l%vh|B1NscQKP}WRP@+47rN4Tyq7mjiTqt)?e5i|&Xn|lV5d@{cX$R*+PifeeF*4qy zzuIUb@Xg4vdrue=T@{Ho%GN*Ha#76TWa)T9185#t7pQScsZN9iamyNXfW#exp=xH`6ZWoe5$-nzt1L#K%kuo@2;9s|a82 zbi@6*%tL1$chK0mP}+sgpa&zW_gzjkk5rPuxYsmF{WWO_v;V< zpnhn*NuBMP8Hu8F8Gpbjou&6gL!pdAMBFk;$Ze$$UIu2m1bByiP&#_tiM zjMjYH;%|M4VEW;a2y&b0`akK9k$Q+D*B;tzqtC+q;7UT^>)2&<-9a_ylgV&0= zQ}fm~A4(NbHND?qE<5uRIvt9UVkhLF=3-SZ!!5>WLO{k1y1;r7)!oMTRqFGwDPI;O z!U#hDRg_>14xoH|F~>LIRLH~@-yT)aPr#ua5@T)7u!Vj%`emN4^@3#ACNRrm-N z;%9tTuOO%6XCRRrpXfvcDP=(-`0Mn7x}P&6BwJi2@q((>jJr=#Be1#AkBA4@dQXfQ zo(0j?Um23lPu+C^cIzzFM9&uUI~D!VG>5Jb05Bg*u1;I%zQskoXO&t#>TU>q2G~i2 zd+T_Ppy@@P6H7dGd(%+JuxRiHx3Rg~rzTNF2taNT{c!FkEe(IO0`2LC{ZBIhpMh?* z${Lb@bEm2>@vH2Df-UZmNPx7V6L;RrKDSm-Q`*+1-L8w4n7%IlqIT)D8_6%PD2RN&>IwKj8Av4y+k&9UpV2XbM07F9^p7~?qReEy^ z*M9lZy)Ato4!nwFXPO45wK`v+rXX|bi>Xig9u@`p*eAcMOgb#RELWRKry;LZASgb0 zlliKctq=aZ2`8Pf<1zLIw*J_RQ>ua=naeGxw3UzP!0H&UTYVJc*0Xu{G!m?CYp+up zu_$-uS}EpHk%%wTY1s>~&=(AT;n(HOW5ZD_FPsnhWS~%HFB7~0h8BKGyiU+cT|`R( zWDboxoD94!Ue#NjrZ8R98;Ol@4qHT!!2WEcuv$hit~=k~_*7f-TN{xGIv42axFHZY zPLC+T5Y`X)J8|zW-L|%Fj=|G zf*-BE{N50pK1eu+60+stRe>&CMWu1btq`ni%HnK~KD0?%rx|Xg4tlQfM$pF%9LE>%_L2m|ym%gDwXh!3^H&M1CQCWC;^L8TLZqCw2U zRKxc#ShtJ12Qt=(@Rk3)D*=KL~-UZx-#PPK3%~9OOKB+;Qg{F zru8m~1Do`tFESg%PL7${{E%18kc^@VGyF!L9l27dBm_DLp?>@{v0Y84KJh&E@)>jC z>1rEXE^$K zOms$jQG)O~D(FciD9K{_^8DgfG+(sUP1o0~)9e68hKKU=R;iU={?4y%Ah+?omFE5B zUi7c_lCcXDH*tyF!c?+Hh$r6E7#3$dayD2_ir!#{(j5(>Wl*FPv)EWSJyGFivF#+f zdrE0_{6bB|v}$ed3cN>n1Ned=BUWy4P)YszYvSuq-Uf7lI-cg%9cIcOGQ5$|XBzbo zPMv|)reTum8sd_L=cc1a9@2&!B7#l5FWA7|E4{hJE|QDX7A^3%Epx)M_;o$`!tDLI zWFzC6DU~{bwystY7*S#KsK=ib6N0Z6Y9<;8o=#%mr!DGLbqSrge(9uskD-4O9cZvQ zcrzZU!XGUVadXw3$gd6g-O&Nny3P3d-ao|r8tIL z!;%drKAPGNI}EKQF&(o=E`OT&vf&(uN9gfBn(Xdzjo+DhxIq$P1>@t10AQ-YOq%nouVziQ(bo?% z-UF<0Ezl*_Sbkymcg9FCeXyBnad7PzY8jNcdesp129=|w_5HoYRjJ{3C3SU4htSJM zV@|PuW>lKFA$D)@-raiHT7c8YaUT;3b!iN1Xtzws>py1JYlv}mm;pU`hA4{IN8Hyv zoOwkCdL=HHWjsWM$ZOL$WJH`i6tO}bJm8>hDz45Y66VOBcpa1#VNBx)d89X#kY0W{ zJYUvr)Di_hnVa|&gf_1puMcnJ)j$mAOk1gE-;S8`Z5KTtHCpP4l|>5@?um#_-tupF z#B)16^8!pyT~C}4u&V0kA4q=Km&4L)%Xa}PEJ*LX9WUYzhLbAOer^`OGbVBg_mEf|b zDy6g7qXbFEd7dj)yE3&9Zs{#k7xmK(=}f;0+>^{T37lz)DX;4R$v{s2$gd%MAWpx=(m%BTr>SPIu| z1$nJI)4U4~sDj4^UNyTP6g8DJia)V>CAd0FKJdOvRSQlDOJDr3-iHg}@yP$-EdhT8 z08cSKaCBrqc2r1B%4>ldHJ^`GPRZ`-WZr{7ucA|U6zFuxMV;}^QKNoicz6xVATfWl zQa42IhVY{|v@)QI8l4?DN| zq+m*vX}+0hWJjP`iN)&*8fIV{q7qVD1_wgkCLzYpsXb%vNrHk-ys+E);^?08Al2oC z0H}}{2qDGP<;l&bKirBcM#;7$u|praxs7b*S*-4sDhZASc^rpa*o5TQLej!H9j5K& zj<&YK0f>8e8!@FjRkVxQhaM_zd;N4txV;a+7Lo{r88Hcw%$$N#Joh1zh{^Wtv5!^; zir>`07qJZc5IwytT&EVZUcC=}Ne`vE@3IeB@{!i|p&o5&Exa;Yg-*W$I)t#jAxYs_ zu5C^yknSIRMn&)+_TGos&N%fln(sr%6b#D?CvqBYiFoi_*tTaFtfQ)G2{?K^I)EPp zTlFTV*G@kRrPSkL%li<`nC{#@bPYoi!Hht|EDMUBe?H;`<2f?xj3Dfe9Q+XXoyHx| z-Ut&1kI@EPdwNLbY-WTS4s|eC@YuudH7Rg%={~f16Y{S#PwH5fBy4Zh?L)1Zm=PM> z?%}O5G9UPE68yZkfjgyrs%w59B1f?dYgXEH;E6(F=9ujb2sjk+3A*9!M?llrN16V< z%uQJti!wFWFytsml5zxpM9_5>&&bk`+q^k49L0NnW!?YanTI&5NV*mkm@P1qoLl?Q z2;D?L#~1x^%GjSz-&DOjBK&O_*M6Q-pG>Gi+TS3*%lGZ7o7#siCDC_P#(7v7?Z!y% zLwUCd+sYO8rX;tiipYbBQp0^#*pe!sBJS0v9-g!2d;I&*IlA}oHWsjYN6Q%Xr$|?4 zNMI>J$?@l+N9Q$_X~>hSj#?B2?9m+WOQAdTjJrF0`sca+46FC*7Z_h&uQm zS`+w(F|u^1mMEC34(9nUJm2G*U*0cQfFj6UzCI((dwEO&okOYrR6U|4a^l5ye7w(z z8X)#RQ~1sWX1TU!uI^F_4lP$KF)&nee%ZMTKH~dcHK9f_mO9mn&Zd-Gxjj>S5Oliq zjG7|>@EoiY5fW-Ac)sz3lfDtM&c5;gR>%HoGYo9s&xtV*)z@`a zNpKn|OvH&9({fdsh{}*bG{nvA3f?o_l51QIj6Z|z`%fVQl_eDAtuy$refxge>rxZM zLNEMab{SyrSnn2-w25Q~P(r+w>iqK_@mCbMoe-GpH6+u{*2T>w5;z<^+H}F$FKkBi zn>xu^gbg=J{+l~I{W2$(9(enFGj;;ytpr9S4xJ!nw(Jrp_32=D9PcJE42!q!98fp> z%7hnAhs|;enj%3s^x+N7;juHNKevs=)Idw59&OqTwB*E{2HUN;2&G>1J<1dRYVPuo zVdMLz+V9XgbD%is(|2*DrYyZKR7ye(EgK&)M{Il<+(X1EAHguOho6bZV2Z>>KfSsB2+(m za(FA{{H0@PhnyQIpg6MFbe8WcLQH(* z!Q#y1yps%#oi^`K3!THV+&zk|zQVOTKef|d6QRsDHIDo#2Z}ocC;MQ!*1G3scH6tI z_KH1(qq1ATgH$g#B0d+7ZyvlL{amE?nr3MiNT&yTks#h+z958*1EiMpYl5aNmFtJV zcarF90=?-asEFHz2itO2pu>iw-Z%Y^?MsW8Wu8mu*@wWP&(Vs!_Q*P=KADU7CLAo< zs}zYsuQo*=H13XWswC2u4%2_|8mRYDE=@!0y=N6(_91AosQypX1|%Z8+H5n6KxUy?#a4$%6L*Nh2qAI=@+CECDv{}e>9DD; z_5>P<^*ef*P|DSJjVXD2cHl1MU=NX_NNHN=Rq=z#SWS;m(GqyBdo6*fAbQmw{AlRF zxq`1@V8beM51qL)B^VzX znLc-26OrcN=>Cr#!ZMJN>CH7)g4h{u>n*+~jf-IW19COEfQ<{u+ zrA!^XzP)Is=k!yWXFC`&OVKaZWAG-X&`q$aGx@$kdu66w<8uHg zGW~U_*d@LcwmRYAmA{hmS&*D8xN#DE3wC@urdIk|W&ChDcyRylAn1+2! zwt1jJq*<1FLbl9#w>0o|d^iY}mktg$rIy^A8KXEDH`a`~bE~ai=B%SQ`FAMwPAlwG zRGA8iV#+0CZqk{0+@*ii_$ezGKa}P0KEF?Dz`ECIOOnu;{HA(#p7)%T%#^^v7#`V( zHe-)J6Vs8l2B-1&qvd7vt9ay8!@<#{X-Q^dtvM$KYw4fc?GVB~sEkV;OJA%2$0loJ zKN^c z$78ZYu*+SkNaQ`I&E^D*GdLu0wUO0NKeN3V#js#5?eG^X@q=JpfzSgNk&h{1_WK&o zb%%)!veiQKYjb@fp~#%X4T9K5)!tulJjQ8ITZJ?Mix|6m^g(wCwJYB&kVUY75pps` zv2F@=aGyJH%fa;@3(kKJ-5vX5EcpAU(%&4SN-qN9yekFC(-&NH&m&aS7bwZx>9ahn z=b9DJKxj&tu;d3vK8aZu1G9USM2GQDHx17a-dqJX7=d)$7}E|X?;Z@3+T@b!95`5E z`_S&jt-Z6TTEwgnW&^Bb4we=3Lix#8Y6YibxGV)U=7ieetkFsFpcrr)@y98=Z`b0+ zrzi;rc4qLNFv54U@o4)|pslI+4*76OXN8q&04pGW8+}gTwn@RnU5C)~Twr=|ziw5| zg>Df=ofAN&0bP8&wqd|$p!O9vFfL9lf3&4NU%wkFDoeIdWUr6njDb_$g0n@1*}g21 z#Ixw16WImq&v(xdfuwJJwKJq_K(xF<;TTNu%n#aC18b6@^&BxJ7fMmuhvqXNMdk;7 zxcuu{%3v`!Z?+G2p5Y-~1_WVd%%&BjvaC%LnISH8@P=7^G-~45SuYd&sz!ZEJ`~}m zl&C$rcdm_|icf2sL+~53!QGg_4cUE2_s>3{mJKeY7Jxnn?aY{U5xwO{PDR>%1a%xR z!5_QvOWI10xrh^hc>R)!WFYSSklk24_{eon;D^ugJu$`4KpRdjyIDWj;(~A=4u%R# zuLAgX!&CGXT|EO(4hO4-93C8pR>sSpo3X4^qq62S0|!BNOlc`JsPh z4}`Ns%6WmU2Cq952}xy^xenN}P@1qzb}%2YuL01LMlkZ8OvB1PJX6Lv^B`C$V7D)e z6Zf9)>_geWe7vxI1Q^m+_aS~TG$_6D?XZ;$GV$Ph>S#LU7suZXgHDbICMoh6Gs|5p zZU|VUvCj^znOVu0XdO&6iH-oHnyAPRdCDT>y$^-z6nQh4%0=6YH&q?knczwLZ+ zR+NnQBpl3;kHypZTIe6sUz-YiSW(S-QH)N`lQ2Y{RbF1_Aa15puzxm@kHsFMbO^HS z0{Jg&|P1Z0aM9@#P=B2MXs;3O$~t7=7R&ptl*oF9O0U7dE{rfQumZ3}fxnn*{zX z-458@Na`B3%{TQ=-4EU~>36?XW}T=asDqMatiq;pgtj<=a^q3UgE zM+S$~5aqv=T#Wt0lEyLsHC?o7jEQRWVZcS(hKIDxV zx61{uA?%*bpzfMZV$|Z8n@gH0{go^ofy)R{d645%niq=oRB`9qeUuIq=Gn!?l-=3| z@11zqt~PDc)#})9QS`B!HR_{_v&tXV5m7?CYQ_bv&*VBM{dJD(?nL}_pyWL9x0Y8H z|4srOq-LII!&D&lfAWW8oNqbK?G?p`Z-)*;IfYk1eZSpxJkU5Ce--=&x21{xAo7;{(pM2FwS;*A6n-CXQJ{sh&yA7p5S&51Ale+ z65xyR^?k*VX{~X~0+^a?CA75Ir_NR41I>?d#7vU#)@%+)5<2S$a^i7Yw z`6_&_p>>rV;mqst+YNm`$>>_{sZ_2<(`KqFdsiV^$mSQ8_9j8E-6qY#_9BD&LzA+I zYagmB_+z15n&tlQd4>&$U(y3^-4coYN45TC#7QJiLZ2dWvsm#rsJ`^9sv^y6$vD21 zW?h@^QR=7y>Pn3AQ%{8BmI`f;2lA3{NOI1d9C*+LhJa|nj%3CG#+S7f0r#LJrmugO z*MF5pP86DbDEu3d(ur&Y{hgeMt4JJA$prMxx2?Ojvz|^Vow= z^7bmw(*dVMft7@i0Lk8$<0WO+Yy-S*84a%zyQlD=7#*;NM@;AV))v4ho?$=}Rl`K& z%DQy>$-+!_moJ45a>Yko(TkY#Sh@JfUc9{ABwAzasweaE9Tj+Qy2-}Jvh+8VsDu=R zFDP^*i`&-UCGL^*_n{#c0NCBWKT>hbV6Nwtc~ABmih;auMb5xtH?>fRB$i{(a`b8o z3;ET&LnXzFJZKdeTXbk?fk`LhSRP2bc54?}?n2$8F4{d*d)@x}R4!;UOMVO3Qw7w@K368C0^XY8|3gw?UPbE)JiRKN^kaJ(*82t#C?|)0^_y({LYUDI|Z9VXN&as@IV$b#aOS zY){91XgPNybY~ODn{RykP++zqUQSb}=4*6#+D}tE+GLeuE^m*%a@3e&RCwHmzL=@` zaaymdi{W^~xrOgymzIAxM0g5QUk`*tQ9lv|ik8VS5x0P@Am5ZldC|8t`Ov{Y_0pd$ zD?&P!F?~I~YMjfT;u8Ejd?a`D(t=l52d&jUR&N6HI`J1(t;dG3`n)_)QGK280JMR5 z(&$-&1&IJw&ELQ&Uwrj*Z~(IBjFv$SC+p4Gh?LmMp56=TNW*i(ALqg&35}+p!MSAx z>m#5(jEz=RozhzaLyIG9YDEdv@3QcIVuQpXXcxAf_r*Jll-k z^#6vD@qfZ9Lz{w)DPEx+j<~ft911oD{s`OBgukxdaxwSu^3u)Nrtw6Jvr}VcXI#w_ zUBzk6<0gb1Xi8E9$TwE6s~+^trZRlVVQeTQ!w& z0j6-^r69#ewT|?=`7CRQ?AWVOJ|KMa_{}FPg#^uGy9>5mKWIK$)w*zOZRN```MehS z7&B&Bu9gujg6#Ku%75eKqGHRJSw_m@6BN(u5Hexb)!waniiv3 z$=6oyTV=0lGxAv2_Cy5tZLjh0E1!3|SDF6k5q8)FQgTIpb}QJ%T=B_zOMllRnGYUz&kHY6=`QJ&Bv#)nRtoTCQ~-j%~#%oz8&X~u)r z43AYXZ)kiP)R^=c^pd(nba9_@V)Q1Wxc7vq37@}~EmLxNi9MTll?VgP&rKb}Ylqim z|HEx~^UiV<$_hhU3pGCl*XmL|2S@#|Je_B6v7R8IGSo$;7>3 zR%ZH9ynGALJ@5XZdrbAnkQ*@ot?V$$P$l?rqKa=f!;!hsn-B-ZhbKOM&^0occ`>dI z-D=oUQ>(w&f?m8ICNmB)#vrFn&fAH6ov31xVnc=`siZZ?R!0_r^uM@pjI(n$INsI8 z(LGEfAuB&VN=+%@>HkQFr`{SZFs`82kmCL zWNJjsF|>PxdiTv{kcBY3PM7T#`Q0^^`Q*@MoZdbZo$=gyW~=m^M&(qzHe6$Q&P4CI zMHka?2^{hBEt{5v#vKKjJ2DgnT&QhihvX`AzfgC-Ge#mpZi@J~> z@zgu?oCfNLTAEX>?c0UU5*%&I#g<^}wBl%uR2x#F>KWpewJb2dZ37{nSxR$*dEdF= zqM?Lu*51;l*^(RVcZPZfmq@(*oIK*C^&=QU@Iu+9L`8w(*7@O(CYKhcQtKXg zL+@?OX&Qu&3gkZMpJej5UC!XleN*CF!C=2@Q`}3o#7;aFjPlyFT`ghC(IrjbQjF zs$F(m`r784QH9S`>{LymuF+M&wnB0X>X<315VTA{78xLgJ8`)e+-*;|DgGu#A<~!j zDiMr6_RMZcEa%(uJ#~LCWuZ|kRAgF z<~C+nYlmkZw~$VGWd&{QE+2;G(Kh5{a_D;;_?)=4wAWHfo*)j4rHNuH39Hh;`0vvq zAM1e4t!u->+?FV+q$LtCMc#LQW%0wUu>lG|<2C5+p)vYxpH%J3zbS+kEo&w>#eF7Fl!}IOW%>8^h4Ud{>|) z%*dS{a;V)V@Mx@nvX7%{uIQg@C+Fa-){2E zdY9Z{HTSWf^Zkxt<5fT`t!=3o3L-D#YBfL+?Acw$z&&qDQQ zr}EjU-7iTtJlq;B7k_S_s9yMnDq6Eq{ACqaHt=gnrCv(fWmcvMa^_>p$>z_?%MaB2 zY_zm?+d{tZH7YTgaAl3el1u)ASzy7;mo2S|SG=o{B!a%DP3jh>jyx&3c=ENg;p?D} zPZoh?WeO}Sf4q4thQ0TDL~ZWXibjM2$Y~ZE4$Fbg&}|G zt|M3MNRoK;6RVro%CagH2TJOF+l4v-AfVLg^>bMLqNdVRaZl+}^QVOm zMdSz$gJs$7lO=Ee2r#>{Rq;m(B|vYn1S?Kctr z{#C>3ub8N7OG4Czk4yiTZ~3oY=KtV-<)SUoorA~9|J(o<8@KL{ED35f+VHpAS89fC ztnj9$j(eH0+Vz*dY(DyO+^OU6rU$ed`chB&h+D|x64O4~=AqL?)TLI!XlKN3*Zt+q z-9a3)vL$<5df~uicAM0q3o6kcBp$DNZ0$lTp~r>{XBAPUe{3twI73VB@4^S>oAKM> z;!}mo1+S%#YA}ARa8ys#e>M;3@KV!BRR5<&I5YdA97su1i?yL0MX;0O14Xj;C*C{DQNe0$v@y0_)FaV$9(IDzX8MqcoK&<|ICvdN@10+ z07MSde8s626p!NQ6xUex<4->BS>M+$ZBYgpf)*y*h5))1EG^3e&Ja&rlS4cNhsKgPCMaifi`vW7TrJJ{-q1Xi z${uxit>cc~H~`y#s22vlB@p#ozlDN@V*9>#0f<3RBD*PvhvWWQ1{miW=8%3L*uVVz zvd~5!P0||&?(gD=5D#zT7A*3{%Hf((mZMFsXP(|G>6?%mcC}G~ha?m3%qt>?3|mHO zZB9f!+3YL(-K#p#{KKhrWd_Up$D{`6|Lq93UH6m~&5rYDg4qqx`HxQO3a1RaG(j!| z7HgZ~<#A~TwZf+fgO6>6T)OVbOv|iiu^j8}w^Bru=I4brCIYE%&t08~PL!j@B27j_ z=RD_LY!ffQ_xr6Dw%0b$G?LeChzgGEPT_kA$glD^CX;n{hIXNY;kkHGphC1FcaI2 zU*Cqv@9f5Rz8SY97|IMJJ_orEb8~fQPjs7G+AL?ZYT((#JCdxKuXXI@AK~S-{0pU) z-5hy}llv5!mo65ul|KHAc!KcI^M7S+dy$X%Zm$6mm@-WO@*U|&)Z&E!Lt!~;<6FY0 z{MFJrbTMkEq%bll8CE%!$uYkUn?x$CaBlidb)T7N`h;;GjdRorOEF_s)M3dPG4h0- z!U5Q;4fHVpd&NI`{WoSZ%Ru4*&ezqewR<3mX`)}%Ug==>u?XC&XJ?Zw2NB?6=dDhK zKI&XE8atP?d|i&ssN(Mk^IWr{kcnZ7yZIjxG4w}7(5or1fLuD$@4L-@BTLLG4x*F0 zQ;OCOtc!g{-r@1*K{FD?4PT{y^qY~FCuBb;+<>8_E1SnSYEXk7Zjw` zUs&dsW}bUc()Y%aqidpeI0Knh_;+{sKgY51$N|X>4s2Bb$I6(%;3lDAGd8@_{~H|3 zV1-jNw(qWV^RJj3%x^y-`|c0SZXXGZc3Hz^W%<#XdHwUNZ!F~t{e=>JK{2#hG!PUg z{IwVmrLQ(+OZd#R>#YLTlyE z2e?7Mo!3(`HEIfv-z*y~_qnCj{{g9oN&pV4sCrlV>)OGw*FuQ&~s(`kH-{B3iprx|cCcok0x6Djs5pq|^?_2)1^wD?)Ng{4l*aT3G z$Xo?eUJO1w1oj?f2X?2+eOrQo+*-B7FbnE#pi0W9@Cv?vm2<_B)q-u8B5K-xhKh4| zo3)@&g%+@NZP>vA?EE{B)d&;t>~W^$!8(*319IvQGL^LU0+|oHj$<`;^ZkB&s1N0h z4n^6$PBDKHKP9+(cx0M-XpRV!thC~3pk(zY0}6qXl^}!zB@6qg+Av~6+t?;o(JNI` z<&uVYQrV(7PT^>KCJKrWr<1>c*!47Z%UlvbJ%hLYpq?sO&=bDH`_Sa+eaO0E(_`uH z2mJp8qVu1<;}Z*r+1I}F1cL)nga@r~!ZsW57x+k)z+dPs_rm}62L5#((A1teIh6c) zs0TS{{wA-E^f5RNhamlaYPfX7z+%zH?At3XYp<1Ch3A|0&hBBvr)bQ}A+?c_6kZMH zBEeVIl2JEL8v1r*GRhLMP7B7FIolB})%%b+$;_#OP-^!&Y0OqKK73Y~nvZf!5kw~x zyN0&D<5q6sz7v1#LsPvZ%C;6o1kVC6UCbWI1UGaDK)Sa)oqj}n@h`u~+LW2nLe&jS zW^fPi*Bs=XFq{0vHTpfb4y$|yFuE8Mask*g;qBY|khm79vQ(z#5U2^BV>A`GcbnOtCDPV3cIVqHLLz+tBSUjE4EUmwzw#e9 z*)ALQq?6jjh$<_qqp z^w7cmJeCieMjpfuF8zrg)Fl7w&CQ2R{Z-lKm?SY%|Esi6`(CjVi0eB~PQ=Ua{!4Ho zhyQQ)!sFlQH#Ai_1uyn@8H)S6@B8OLtAEby(7&A7JFV~}H#Uk2!=E#oe7tCyx>B?U zeLE-HH*HBZXi$UlUX~;9+u_A#1=t2VY|@MKS4H8*%d80vN<&MWpz`9gl%V({5~{Kv zks#I-{*6TG*@vbJl31|ILVJP7Fq?q;qk7TqAGRYMJY*jl*9N9zA#PXvamyY!btunJ zaz4-tHdthZu@Z3Oc$N;_xL0?e&}!uIxF5RLSVv_}_}axZGDyqTz`YXRe{v&MYj1lz zcCvF2%r&y@{Q@hIT9{QcnrhXCD=oL*GJoMIH##YnyFj{dHebAX^O>t+`O4jazXeg@ zuoYd(ecWc(K4g~7d}y-pEr_RqnA6vlTs_|66Zk_@v6GKtM-5IzeOs&j8M8SayAI;a zI*%=rf9L!Nt1nr-FRRDrZs-xdj-tb({t7q1Y1Ry{{jnr-K(Oi~bN^8+C#J|bbocNO zZde&)HG`lckxXk>u`I9;)!-F$e|k{O_1gZ>|2dAE6%3CNG0UDG>tInfR-?R4GsZPM zy_!ME{W|Vt$`A4nPNf0`ly}H((JaA#fa!bAb-cYCb5!>;xmND9<4xad0|_hhx86LS za{Q)di~)^&6sX(;O8njx2ZB&xS%mOIg&n#wyB~?j;$`x1h#oKt$v3;Hy`Ywc2-QO% zo71?<==YgVgKeD@wF_`UQGtL`_Sa>An$~L=|7EGYj}!#>S{EVqwtuxk#HO!sZv`3l=(nr;xJo&m}C&%0{Yr@@oL|N z{I3;>`kWY1%-|G8e6~MCGT!B5B67KWvqa)3BFrFuvBa!Cq#TBFMIoLfv6yDFjT*mh zVLTpOo`2tTVDs%83)6(aw~L@ zIY+-%;K~*hp(T`<(n@C%xK7)ZKkzd5Rqa01p@9;+S$DUGn1Osz7D|@9Rt7-F%{Mp5 zrp~o{HA;Y`HVGn#Cbyo02%=!Ps|t+cEAoU(NX<;9TYI(di}s=0UnL7| zs$P$jR~(1t{)_7eu5e-iIrx)wxZQYnLulR}DbMfjfQEt=D2D`px$)HI;uF%-8G%BR zuetb7vIEty(wuS^KF0LeNgEJbH;dI-UsLo-Qz(pOO{Ijgl5sEienEt2IHi{j227ru+>A{2z}CRsI(Wq)}1O3TM^HT3rWpAvg#X9`9yC#J7L9 zFdr&Y82;GgGN?WgJ@%+OMY%w#RyTr0w)v?x{QO9k6tBtM!rWrZW2aqh#lv{~go1o$ zjJvQ%*)es2!WGSVsTDQGt9e$c?`}Q|bfYQ_roNiL!s(aaQ}8-s_2!yOS;V(RqY!oU zIq_9mMji@o!3 zUN|x;_rr9oOWp*|!f%oyxJQDpsE@iQ3hb+B#M`S8XJ*@U=c^ zSx9)mz#?C_=K74}YI<1fTIdyk8~yEu{N2L(L(ZZi?vX;$7ZVYon zC5xP}xDVwHR5|4Flx0#cv|JmTwxII(&&`_riwjyL9<{`MsO|t`J+)BM0P6zj>1Z5~ zY=zD*fn*bE0+MYY=(K`_f~YRBy-6t0_MN+##rvL#WO0$VDgey1pweR6{xVd(`A8-| zN5UP`*|g~+Jskh6nExKaL4S8Mot*bc3pc)_Xgd_?GJhXC=M69)I^VIJc^f1H%lseN zxkGY_7D{WMGbUh<+%@Uxu_it2KSo+BY6?3w?_A^c{*@s~egMF7r9NYiq}_GkeTi?0 zt$zqD|CR0Z9|BU)0-72v;dZB!Jz6UIEaibHRo$Bh67r8y{&NBKHvuYa_4QQ3ryY~t zV-(k@1cLlI$)I&q>n#~HUy@B0F+natB2@~e#%?&|g3iRt70CcDStAxpb0D)Jix?&x0OlsTycr0vy zCItt&&JqI{AVVKwqoV+e=|n{#U@<)@Vl05Q{_m~*Du8)3sKG9gxaW)I#w`aZ<-;^)0mrMNCemQ zt+_kv&`r)wDG(KneiObErV-pw95n1Bi`byTOU|PKTGSOj(9^TyL`=w*+Fv3AVV?ZO~&!;!Qb5S}Cm$IyL{KSuyP(LM(w9;b{HVvzaqri*C8bn;^cW zxHPlWS-|hZW1+|6mJA4lnaBNZ4B>^TF&Q^b4I`4R4|7hDFM&&IGOW9^W0VGh*|F81P4+S&lVte zG1$u>;>Y?fz^1mgZYT103W7*IDRYHXd?bEBTLzKndvP|*&5C3B%k*tdRt?UTt6cQN zsi8_)fSJ4P{W~(5$8)|T7hbgMjgB>jOl@bG>dea$8L_%V5btZ+u(@Jmq7%J4VruZP zW`s+?4FXIg(Ju5!QwIPf9{~C1+_o0)R*s$f;V3h$0zC$CFm~8TY+;;Kc;;18`(OFF zrVc?7Wq7w*WWjWF#nB5TX9llknEN;r|DSfQJu1mFjH8saG+kNdoeERaspSoarf7Ma znPoSdmNiYxspJ*GOPNbg&P(V~6K#-|U0iBup{b!(7_(5bbV!Pc6K~<26BB4J+Rofo zJ7;HY`=>v@k8?OLFW>h(zuWWj_hLEOBBO@B@QBP{prCMifVAY5#T}dDUuov|IYit> z_k|M_x+(%u7~rHXO}PM!)O+!@(mD!8?OjWQcFk*N@(4#2*e@#73c*fO)UbNbM2e~7$x zK73E!MfrIZt1A5qZ;j;i%in6G`R5Ek>FC+=?YCc_z03%hI<(D3xy&xSI6*{y8481D z7sdtvrXv}dexUv!502qP+1lY`l`mWeAGHP4C%p@u2g-`^^DX07iVz5GvjKd2!t-dY zMzExWsVJMl0|!<77~|H|ZCU)X#jUo*&J>U2V=d?)NiH?S*ln=&T(R=*&V)uW8Oy^T z4gjF?fEqjF5<%TYrYH!7WWRw_8Hvcq7S?nrMx(Ngyc?|*zxuCtv`cN$#2t)vo))Ta zS@Yfa<}KD4Q>Xk=AwP+<`n}?|S~OHsv%JbI`GJpK6yPFZWf3C=yo@*xY^hMCJ9zrM zGLD4bn`iC9z19f33PSd+$t2IZjuE8FQ@Knmt0bgxC$5xxb7nks2DK=v{6Ct=Z^P0b zdIgWW%;mKRtKFWTGsjO2q*pKsFY;)pLmLSteZb5S~sNdK(C zNwYiqh5I6iO__cft|G%?ih_GY6DhRD)cu4`@;)XnDX4M=zGWRxwZDe;dcW`NS^$5s z66PsObFAzzAzJ4y99xLXNwDuKiX4jNl{gt*SwQ&;Pa@DDBD=l*Ak>*`?7~*ar-!i% z6)9bSI4y6H6ah+Q1a#jvIbKSgnqSe5JSSIUtg`hnY|eqtHT4Bn1LUT=D9FX~<776< zMxc$$9(*Mf@ad}B`Pw7&~3kmI2wfB6f#_)y}J! zt|KF{t<>nv4Kt5sD2jHHHF;u3aIUd85Sfb?Ur?ZnugdPfyBLBKuKxKiGCbE+CqEDJ z#l-`{+5`_o^kz!bi>>-l3g9^NDBqenMU9(_=6&X|gZw28twtJ5^ecBbOSmPMY3oD0 zqh^-IrCMp(mw7B+G-XArQjBTy_I%^CET3N8=pQ%+W#Lx4$HEwPXCfnin;4U!2l*-n zhw3UFjoL^e*K3%N>hJoWww~214=RV+}aM-zvM!#+vuQ+SPt z97Gv+R9XY(m+|3NAY1LR>b0Tg^I3MmDE^hors!9ztmR&!dCNh-`RYU`zzr9@y`&h( zo2tq%Y*7a7@y6Tt|L;FongN$52I>7r1ZzDHUYl#bB`jr^xFBer1i|%*II}YWd%~`} r>i9mm*|ajVEh|j83l{cnsPLg*`H$cCFV)hI8~br%-=92Fdc*n!W8NX& literal 0 HcmV?d00001 diff --git a/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0040.jpeg b/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0040.jpeg new file mode 100644 index 0000000000000000000000000000000000000000..2ab245ae8da7a700a30913e07251d32c55d6cfa8 GIT binary patch literal 629981 zcmeF&eOQxMwlMsCCxHNBA~kJ;O#%v);7pYO(mh#>D`+BxUE*Y*7IJkOuc zSzH%2Vo2`%?!DJuYwg|nEAuy>a#@$X{T)JLV^LTikK5p}@R#S%;IRmPmj!=X;5!f=bKuYG@V%7w z^Xn`;=TISyu;I@K@V$LM<6o~ygE#mKgOFDO*NHX+ybu_W6!e>bmxBTV;5q;QFK>7z z9e(uhKZAcG4tV+X-)_hb2n+~#IWRyJ5E38`33$mnATZ=NK_S5}k+tvd%Gtd=O_AXZ z&j^HTCzXXif07MwIaR+r-UN?VeEjoOF1)_qFORn^ynQK?z{Qb(Kv4j_^yg2t8J^Gh zz|tgXa^&FV`jt2%rrA^}95~&$0`*mHzL?&hWkl8d(g#mJJ`s z>6f2t;qlX7{(PMu!Q*GY{I9!3YMB4Jo&^)?m&Xg&_wTPIeuUh2BV-t!@W0akU;S)& z=-2S;Ki}Jh-{-;K;3bGJd_M)>|Nc8X#IOHH1d`r<_pOY#w{3hsbLY07RZk zFQJ#Gr*Gi%!e70xCSVQEC-|kH)i14Dz2*gq>Eg1;b-zWrtmSUOw6Ubke2!zCl}e7rMGV^DNhM+4C#?g#N37fAiu?VoAu_ z->!>_j){%ixarl`UVr0v$*J4kekV<_eMfpmX4anU997YW`##$L@h8Ox$}2wm)4@ZZ zSAJPjYdlhS^w^*O`fdHG?@pg-`2NDhA6i;XZS6mHb@%-9a_^O&`flE`4&ENJ4UgOz ze=y;gd^k1zXvPCBnZ{ty8O()Crf7Q>KCB1BX}QSxsmMg;Tgs&?0t;L?FP9xY+3e~Q zlyv{;t-CL|J>wgE^ZD_GtNzzT|LBuIiYz_bPsC2 zkGRueBzw?3NCbKG;{ml>G6`Bo)&$oRxsb~3l2hx5pEX879)78Qsv^@abG=+ll7(b; zyMh=;Ws8c1lrm-=S^lMZ$t9WVI>M3DmZkKNM48hIg!85PS|K2I@Q-b^k2P|}3twK<8q;50 z+4Xg*r0=*Rm|FMGvX8e!I)eL1oRApugNdMi;e)*(-cquI?2@Z0KYN|XosWF}Wye>W zYZH$(hgCYNBAWf{`i}L{Qza|Zd}UqE+On>{u1=*J$vg39n8tTDFVzd_s~%8d~q3hacR8AQis9ysh+mxO#{`*-&clx>Ji#tPDmzQr|#QBmUrHY zWGvIO7ya$qoG`^e=jQQ}OoI<5Q=}(fq1}p=ky`Zr z7rP#@@;}?JTvhn``yDdR*F`5iTbp?7DnY4=Dd@i z9a^>6;N6AWrh?PQzA783jem7Qt(uWL zJ(dP9`CUJG@p|@H^5x3SJ6}BXN81U$beMKEB=FZ?TxR&rHkf?+j{G6SZ|kz(G!2su z;(Iy3iS`?p+Dt*?s(qampa0<|PmsWmIB`?&(xtb*Q*C2+9&Y$+|IQ@wgCDj$-U|aGFgNJ-y|tR+8@1>@!3(opJn)Tki*nCGRI3CmCfIp5Ogg=GoLVPo(ptYJ30h%03Bc z_53K>$8Vne6F${+2K+cQM|X^V)NFt4wGOU`chYOMRrbumwHXIr_?45@$;Dg8`s}pR z_dl(^_G8n1%EvF5dcN_m(JKx$?aq^qcSOB#{{iQE%q{h@hyUo8y)CaBl4^H&?6^Dq zYx}`&5y)ZJ-bUC)OTU~0r@71>pST1 z>OG#;@%|Zg@yq3<;)nK8_}8!h(i(9#(tIRu;*EpyD$Vtjm*mdR|0DGKZwPOIb?Wm> z@^bBqq18J`*Sgo2zh3?3?d0#a>>~jK`l4X!{IQavl zX3p+SCYQ87t=M(gB4nsdlU^Usldyl@?ymW?Z(Zg){rkeKCpgrO(7ldSuS1({io|Y5 zN3tT}(8ov9?_49|73$|>!neHovhS8s(bN~3y>TBX$Cx{AU%9D%a^7}|{Pw#$zU&G2 z??N8r*c`?eON1}`lrJyw&YCp1J?Ixc_quz!-lp3-b3Wv&uvh-{8X?Y+yh`$y6~1l9 zJ>Pg@527jO{^K`)Y5wTdcrL$2%y6H}i#Sf+zVZ9E6#;D}yg^#j^`cYhr(33;&JZUS z6gBhDSCK7$_(3&Wqqwa!KN@-CH2v3-rdF1M{#vW>>f(+fbMik=xg9HbP~Od8iCW0} zeap*ge#_g(d&2+K;cnAst8f2d^sIGJJ7YlZ{KJcaKRj4J^ugd$p5L$8Tw<h^C+tj$eh_9r=CeR8VojkBBAT=!+0ZX73Z&$IX&pa1Tj z?eI+M?DluR{6hzK^OCcHOgAwjowqfOHS$z{NVlO7g@Wa z*dz1AZg8=QnwP$}<;Tb0muco!9d>^0#tL$=fv3rxC;AF1CFb?Nlap(gUo)?rblhRu zez)gOcLJ8|OZ^*nAgj$jS~jhcJFosKaD&}@IpzJ4`K{83gO5i4an-P$R{ujl=V(os zh51sRV101*MM1jB>(i}m`}1klU)1d&f1})P1loKw%dY-4mV3-zzxd2(_k>~b-(TEQ zYC83Xp*6Bu@MQATf2b^9Zt$UP*|XfIn?rTUMy1WG#hTy`Pd*_k>;0$mTNNEY7P0^I ztoU7r_4EV7qE~{x4gA|z&usnM!7~od_*<#UollR>lPs6b&s^NepN6R@q$Qj*9`E80 zzSnl;jRVRnxsP){0q>=j0uyo-5R!I_T{mSoD#PI#=Kz(|!dE`I`P>?~UIy)cs)2y5O?w;>n$73vS$aWO2Uw>{O5@c4^dj%k9j^cbsEq z&z&-E&pm(l z49-cT!e4d!cE5AmXVIzeRtH1{74%e+PqRLJ?S+Y&ybCXR?f8~A5YzRvJiO64B(+y! zaDV*OXALaU;OsKgI6Cf3HUOnxFF^#|99Vr zs;;g3b+TW{3kR;Oy|pOmyN5S7Y-Eex{_HLJB1uX9MenUQL|a*-7w^d#H5Z=*zPI{; z=h@drcdst$iz)l<7l)tnVSKRe;(M=cR!4WnE^exOb0lB#{rSq@{~@dE!u3Dwd+UwM zYnk`%-~3(8qjT?;CM0dT&o^lg5v5| zLh|Lpf4+V_?yuvTlvUqG4ywo4QVbQXCDODE=iA|%k9T0s)zCE0rePF4_XUNRl&vOe(>{rX<#rJO%{coZ#iO8V{Q(Tc*~ z^VD_pp=2_=`n}0N-M=gP{k6M2YcFj3tneA3??&&(f7`>oNIfVh@Bi-iKTWq~?~%u3 zKKK6U_LXU;a>_O+h5n~pUiEyfZ?Jq~_3Q_?*Dhg*raQtSww1-c|NQjX{iADw3@a&7 zpi|!$u@COQ^j0jR>gbiHE!z`Q)*bon=?TxTwZCic%Ni{I=IK`%!sU@ozV7;Tnb-VJ z^JDX~o7TOtiI92xLID&&0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$ z0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(1 z6hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwK zKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$ z0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(1 z6hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwK zKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$0Te(16hHwKKmim$ z0Te(16hHwKKmim$0Te(16hHwKKmim$0TlS3D=;=cyJ_7Un+W-zOMsso1yBG5Pyhu` z00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG0 z1yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5 zPyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu` z00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG0 z1yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5 zPyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu` z00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG01yBG5Pyhu`00mG0 z1yBG5P+)#+es01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW0 z3ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWA zpa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S> z01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW0 z3ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWA zpa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZMWApa2S> z01BW03ZMWApa2S>01BW03ZMWApa2S>01BW03ZTIM!vg)}L$YL%JImc|32QOyDUT&f zxx!`KXP)7Te4ghELxQAXk|6O*vY0<4$bS3Ax|hU>JCoknlB(FQ2#?>bQofz@hj$d) zD37O};y%q?vuxR#Z7+*o-u8d|HU9(kB_Wg(^{+4T%NIqXGn|~6F0O8iC_?}F-{8ML z|LcGI$0dYD(dje>-N}i;fS1XGeB;SBAu5ezQtUjX z25sG4=<4>&vs}+*&#&|o`mYN9&5JLIB_V5nyDlm^CN^&4rdMBk{f*xxr*3=uoixSv z9qAdFS$nc`R7D@|`)L2ipA;V`ulVdw2M>K-`DIP5@krg#V}JhZxAmvKJAJ0%`wJI; zXlXUIwg1@F-Sf}Oy;pwfyLrnxczeh;JaT9J!GvS-;neh_84tK*8iPS+Fc&VFqV0ii zx(CB)xybpc$VBE_%B3p;3tTuammNOY?CKMgbpPqCyDzyt;~RYQ`SFFT{?|qS?_G1@ z|J6nR=QaQ5CC@hz4;N?R0(W9Lp?Oer4{E-TxYH^43*Cc6kb#&X&G=m*wM(hvL2ZgUO-JP|NPcgy&&OX8YM?%pEB~y(`p9PXyO!_9l}Jw&IlN2L>`(JKCpy zmZ$4V?+oW}W10hLS9CGf;U{^keqz$zhx%Y&GVO>$-2y=f&{oF`p#Aif&Kvfjm zOfNZ3=ts?*tdSN%(h7=$O7cZAW_+V?lBRaI+PgH$HUs^{yK;7&k#K*s@e)U6Om2G% zw|S)7VxdO$?BjeF>x6->o0AG{(m(~7qN$ZVY0heQeLzUkM5(gZNc>V=g)Vi@t~GgO%V`g?&B;~abkCPm4evxXAHcwh~val zY~E{J?tl@vOU2Vnm^pfVgMnmu=P9_R^G5QkUAyD+f@J(?Uu&1zeH($d?I8YBa|)Yc zIM-odNcUbisJ-ZwOhV{a^ZY||i^=BO-+9`t*KT2#UziOc^olfmL_+8Mng~TVUEa|?k zUaCuaXud5YkFSMZYP}v}q1pzQZD2V_?L=>I(P4wrojmi7khEeVGD~KXD2Ks`u9H)v zGS^8M+NU?mY5Is6M+G_4RO7WlPGgHf$IaMgM>nM2U6}PqJ9b|Qm!z)fA5nC^-m*kez?GREjKc&~kn2?FfYPRLl z6Apa5^3nC$5658&91?d%EEAdc{;1|mHk&!sooebp{@F@q97&Nx82#W99nzC6&yzKb zU8<~F6JJQoT>fw{X|NvmyR3E-J75-cX?qBZEfq@ByVR@?Cr0C(W9Xj2q)OS(p_}5! zo0cv)H75+$<*g&DgW7|2wlF!>R|L4N zN~u^OmozU5$v}ow*laxkcR%FPB!^^88OP?yWdp;^%P>^u3(05)ha{Di7zx>C^-gQ3 zHF=Kdfl=HRoHkER$XrV#;@Y~yNG zx{M4eWxU-=8MB*nsF_2)GLwnRMZI6AcghTOwd%)F6kO=BYjgN8Vu9!PQD$;iPh(f+g+1+4ZjA)Xq-ns*MN*Ng~gXmQ2NtCih z8o@{9H@I!L)X#=$gD#iz+#N8;6LnLDsfPx7*dVXV_BG*<{Rc$XwHIORa-~XBqRdlJ z7fh;C^6OclFl*ghOy|EA5~;)wZ_g4Q=yv7hej(`;;g33mx~HRt=QvPX%f!&Jy*O`WH~iCL)XN`*;ucBf4W1M7~0+8rO%!QZ=zge;4%Ck-aL zk(X|cBXL49xQxJB|EwlyLQ}{J8J8^{ZDCHp^4%(mPVBvJpc`tWa4SVo;UB5pl{%#i zqJu_RR4MLD*F7|lMD6sfg0ziAMK>6nq&bp^41^p!#)GUc_dS8)d zMozZBF76CuI;%-`L>$}i^i3H_w9Kfwb}?bNT`162?<0(@T8K)5FUyNs*AdPc&v|0E z(7fKat#1`lq4em5RV8FdlV#=*9m`=*2g#^`RK~LSsutq4v3PUted$rck3Pi*$!MRE zm|Y>u$maau4^uVivPHA|kMlg*SEyM}bcvr{O&$7|O>B>#hn#75)bPq+IVzOQ>=Jr! zn}TJSQ!UG`R=Lu;iN?!?|dKFX@m=oO@Sw8OtO-nE6iG_C3y*QHhx=b)Z0flm7hS(AUoC}|pj zvCrkFo0YMaIk-A)!0Sh+YE4~g%5Krlos5yf{diXTed3*HyNrx=PZ(E4MGh3nTtZHq zw#Vf_io*&J>83b)noS%+$xMRJ4NjZ)c+M<~4+t{tTu12iM!GiAXCW&S)XsFJF$-f1 zPE$Vj4XJ%<)(`ph?yR%(WEF`ggL(d06B;WlNt@?_T6hDH6leNOZSeNWA)SNGHY!-2PDctWsVMBs`gw3sZHw@kG(QGv9P3!&yK{{w9=zg z=sTLNYS!E- zOGYvB6k=a^`kJ*PH!9=Ra7(>Y)$7(Ib`ZX@p#OkE`mM}4$1rYs6Y^ryo441OvLhjh zk+kNtFt%-TBO z#3{8`;9lG+f?0gtxH@OFn8Y^@TZ@ShGM#3w`Eu!f$?Q=oww-I5wxsuJVRHC=P~La2 z;VuM(L2c3X(rp%6Nj%dUlVhM&>=~4kn3-a+cdBGe#^h}+Xw5c|a(m12Fgb0K>!@uN zN5DNeA{>PepRNz;(`*?pf{E|LC>V=ujqh@S_?BT?p^o9H0DnXV=`qEQOYCGqJ(k*A zeLMPH3#Ieg!Sr?42}(l)xjN2`Ky;n^a%c0PoJ&TY>f zaL?izi1RzmO_F=cnB5J$+$jU8zED&G$qC(ZsrMV(b+!9M?|98PhcrfI8`3Nu@Tq;b zJxFNu^IfP#_wFb8!Jm8DpgOS4SVAY7?J{T4Rk_RHu64wJ@PLf4oXAeFraV2#L?r~I z<)ckCk1RBspP`n(%;Z`NiX+jVj|c5q1sT&y47^k(Lr!uA^{)`CG-4Hz^hZn0!6d!k=oddHq;grhiRg1t zP_nt2yp9t!|zg-}^=r8p&I9H!~}DI#rs z8cBm9)O%!RNTW|LCMiGYRpG||RYpK>@ zfc1xc)Zn#)y!rR>TE-+~`_#}uJ1mw|=!~0OW;<+P)aZ3v;1ispR}yLd*+d!BuM1WS z!EBNPx)61Bw)K-R5F9lPrWI;-tLEWIF$wvNCqy{ikXj^jWA3aks)Ot**9I|^DQKuU zA4iyRp;RRanH)N(PM0}H?~U8AWf|Ex-32ugJC2=9*un5Mb-@a2SGh_KE6Ufm-i{*l_ZD4FWGJsC%`SVNB35elvXLxV83aIsN2Fw>>60n4Epc6#mKNb5wc3BgKJL?Tos{oR}@GOVDYnY|^+8`68Ljf6ipvJNJNkqLE^ zpWw(ras+~!Zm@htE?rn;OWw^B3J!HDC~kVefzWrLq!V^_cWGz!;T7bNwWm5R!s|&R zSbN~6+s@p6kG$MP}~7z_b{vF05qX9cgofKOjv-tD(g&~&NAPsk^zIb$IWX5J-xJ7n3C z*1TgP4Rj*1ZI(+#C2zJuMtLWJ;}t1$mag-^z5g&o;lc7AeGkO#OrAVfPQ^k5G==wg z#n<(z-F34+1-pCn!qQGc(mVWuNn_;fa9Eb@YXzB^nXGv(Mq)d`KT5=HR(nj|w=(AY z_VxBo1sO4^s9S3H^pgB~uO^7#>@Z7IK*CxXGqQI@IX?u7-v0t-qa?Q3VA*D+7 zsbnX~S{K@&8+7m41QmA&GFsVIxWRl?mlDc3_QX(0tNK~X-c=G)3bYZRT88Xg7)*8r zwFn>mtw81ypJUpwxst4n?luzt=7-i(ekqVOI>)E4BVHT2%1-lOb%|ql!yTUP8Z3uj zhNwa3kATut%W{dI29_GBBp4DssZ;@L``QjKGpxz=xloy%VdQO(fCY6~io^r}dei9? zf0Y2@;cnq*o586wzrL!KGSKzstN06v-H`<*T0yM6k_738v_Vh_+hbsm4Ga}#w-Ek= zoDOe$_kDwt`4nBPAcM5%1QpfH|JXqH`Xo3%gQ&7<3}et>rAs6;?-Wcma7aFz=F|RDp z#(Em4Y^L80L48#N3_nI%oll=u1-S?P^iEZ=bo{c~{js2euE~H&-YMaSZcCiIzK(E1 z(~EkKua1?NZGg<$lpd2aRtgT5OL|qVoZc8YX--&jAf|-_4 zi0e0uM8!*kYxquVY@R18p$VJD7kcg#!8jZ3*;7-<^qCb>yOs8_Sm-}GjAKm@=fejl zYfcEDE2yt;{9aDtodZH_!Sv2`gqJ=Nod7dbbX}uPFDQwF#cl6rf-6I6_ECsa$z<*N zHb^~OC(Yf#Diti6(^k&@erUs{o|#iLdom#Ux|$*%UAh9DkMSrmhcpZ>g@$M&5$KX& zCA(hKD>l&iiebofv^D&ZcyaTPddbWs(_6(PakL!CMEpHpm#4D8^*Y zs;=^3A?2@vl`>(cLzQbG;;>9k1GhC~@|Jq(`3;FCXjs*!>BhU%OFbQ&Gvb-Y78jB)s%4GeWi{>W#LOiP zq{9!J4dqYP5oSqxjIxK4QG9lCh+ar$huh46a%y!D6clXnh>25u5#GTtIc1<}D@dww zHLJVFjU=c-v^l|~=GaIFgPhOxJ0cYlG1tU71S!Zz3LhOL57yrC;CpB9OVtMFCmH8c zgcLjBxbO~yS&!~A*r#zaaqgDxYIJFa5%&_aB-zDel{xVv<~EV{?7GKF8RR7{LF+ZC zy^yEu5(mSrY#6a9V|(O8H6G`zfaYz>N;Y&}MIEalWt1Lc!}ya7JPEEKbvo+NrBZ4D z#tiGxNTst9I(c>j{S>S%CZo`L!Uy`Ucd|!XyzS7nH2ZXwYl`6Rba!7b*&wH`+P>bN zy%7p0vFX^Hf3&xS#=lhAh?7Ri_lo<6#9dGo?v5!LQf#}QQJbdWX9AGIWRvmc;3j3#D>sk#jub%haLmu^)0HaCLntA< zwEj8wvZb&DW?HM)#%kGhJ4j-)*KshVyKPB(so zLs*Sw&K7I+k7{}gZ)PFkajo&4OL$jKcDXi-;pX{z$lP*S38#(ATa}%VTj?HltPwbf zyTBF<+ji~`LKFK>(4qf8wLdgU?l?zxsh6nU(NvO1(-|XQK|1sG73%v6D&ECDlca=# z=-S>8W>R;*sc`P}4_6EK4~H`ABA4O+ocUC5aEx3~3)h z{IJKd*!;YOYQ^ZMh4zKEx~m+zaDFFFs6qxUR7^JHpEn#v4ym@me|9BooVG+k!ynrJ z#%4b*tl0q}USIpOVR$*&uFfgiS|tX?EzQgT0AXEH1BG{TZmFU6)&N#MU%^Y$!t8}4fNJ4N4D@1%%@VPZAM%cR zsb<^ILD*HzFa_^F5f3fg?9m(JwfqoR{J4V?kznKXDddt@Q;UUEdO?q)7TO#p<8piR zEwvjwTRCbzwcHC*o^{sqDgLf7l2>vSD;YUR_NrkQJ=4c5Zlt9(8~2}yUb}dbcz#TUl%$|ONT}4wLy@0Sdz*h z-S&HPN07`78g!LG+PzhTYcGo4K$kgZTaQD23Sw3Z zixYOj{)xF;Iy|PokxT-l;u>w z;>?ui7@MsSEOZaYqVgWW&b4Nn*721tcNvM-RUfDGL!d1xo;_-V>t|J4A=PEFLbN^X z?k7aFIR`}hdk%OP)WiLlAhi>d8^zrj3tG1b5+i*(k&CPX1R`*+oRuwnrl&c zt28tz%NH7Xj;79p;QhT3`^aip#Ol4vTqkFS)b8VvZ9EGtYh=B?@ku@T&@mR3I|vm- z;YY2oIRcHe$dRI67_)(}t(9e9uIYGSyMF}Y2BpiK{Zwe$xm!qP64eP3sHftusS*pC z#-Ky}n0>i)0Cti1Y)@;Y#E+(C1;HMR(NC&^DkH{MISDzO?e;((q|>bls)YDm3;PQB zoB`!5l%(`VPiq*oz=m_|QBYZo+M`0DsR%_iU84?x{7%%GTO5)JojQdy2db`+B%96( z8*)}ZO=1rOB-lbTg**12n3mH%^4`YVmQ~A-e)A&qcdX^H&>+rkw~coY-4;DmeQ~0) z@{)}*=Y^0TbOq+V_#TT5?`|YsKbD8FatuzB5{Y?p7aU^Xq`|tKO%LnJn1w{ntbBAy zpZz3*xWT+$UjVNa@9g3`2wU_H6P!<&Xn{{G(mmm1MZi+G=7UT~r!t|Zbq+Qoyy;~+ zt@IdK=qWWjX5{HIx4l%1vKJ;`e;vo`HY_I-&<5)jtr!7?f<P*QfzHt#b2jSdZy`M~BMKN6HAjkq?39PWHnu)pYIWoHMpU2^<12f5S2uVxsW+UB;4Tld0~z6pl9VREd$tDMfPBuUIjT~e08 z$;$uK?-q2xmbP%)AkaTO9D7T%5NAV%3YssJ$(&t=v_+8F&2(knb0YlEiS<31&g-lewc^0;RLRI;k)ce*mux+a=mc!ciAV0NyOcws3 z7OEfd zRy5qP4Um0LLLV{@;tsbH@u81LMgu7IQE3ty7ENyg8sf2Vt|2Ij9(f(;bw^ zbbQS*U%0T-M`x0(b>8k!ikni_F`@pxxW#=CW*~Hqw1=9fj6OXDGH2$yFezro;n7%y z`;$#|)b}63EI%~3rR0KE?DwDp8XEO=Cn6wEEVx`TPkdy|uz3;^5L6DSM1G|0n_6r5 zlM2!(zLO{9UWHaCO;AzGyZLPy6rWs^cj6Vz_?V2CQ+1OyCkG*(jOoT>Eim7Ec$vXx zxMR@Aa=8~XvsGKwDkN=^R>d9F0|@5>_eIg6Dwt31s-%*pVj`KLUoJB7C!q7j`U)Bc zymE6gp$M#;+XkRABWdMP0f8*`DdHaBA2bGiLS^rY&-VL4z1OEo*Vi9v6GI+E+0La$ zi6NGgfdN7InHs@>oN}zJnCN)`b>l=_dZ#p0PGesvxKT+We9T4xd@NDB#3UVu?3OXB zfCko4`}(og#l&Acu(UY-+P5;8Xz8&~fUenj&XHQ)U6M@Vs~(NUi6yWt&ik=LUOCmbebm$TewpKOH%6owYM=6IRdGG_Esrx z4`lJX*D(&{gh4&R?$SV)-eL6Q!%(+ON%#4|nV4JJz#!=O@f9l|WE@{r_|qa-`tx~~ zsL&<3aBoDN!nO|LT#`~jJ~z%nWVMD@(1Oa_pkn3-U|5GHI)X{sIR(UCzA#MB6hQ2o zsySwGYGSBV#cN@UmiAz=_b?L%Pcp@Co?T2hd!TX5HSZ9^r{7x^C4orS+gD+oEhY&v ze<(3sr;`>saC)^{%6{0lV;i~9K+=>X9A6pDUD!8~9u3n+2a`PElz}m~Js~9?GCH<& zL^S~mcKG=p4}Jr&ZF{S-I$69>rZ!21lry`lsz=#<)k4pXNKO%tLf!?3c>G(%Tmxg0 zV;In0caW;>(Qwj@9-ROiYS916h9hWHt7v+tKr>NH$mn#}fM#b38&)~5PnocLUaL*l z>ylXUF;)v~zr1;3BvuQzfbCqdB;+A%Ct2aN54`;v-}1t|7Vl>7r3DblLuW2kLn$z; zw40sPFmWthpEZFnJ2HCbPYb&rzq0b(37Qr1_<8ah6p`mlkdYs|WZihaK$RGh>juj+g9UCb=5OU^yHZnhb-0)Gm#+9Xq0-H$zX+ zc6epQcO21506*cV_t>Oa!Lj^d*3u*aFTFiqU2YTzhUmRZn~ z(L4Yz%8ycBhL#y~?RE7+mSTdViJDC74LFT-bDo4Hnm6{TY3b8)X*-8(N&QYSJ0T+r zKflor`zfn(i)4fP{3B}CwY;O9kv4;Ye%sHlIHV*AI#@ya9^DL76{*L)V6V9+eSI~I zUqN(!y|R8#2O9?Tdykc|u*sbg%D!CGJOHa*jy-nvjVWl;9oV%Fihu@_7-5e-$T8wY@I%EQRDyQRkY)k~S;-H@&?tY{ zx=R>Q!hxoVvriekX(1fZ@{lntnTE7)ARo9d4u-s(6(Cj6{h(FA-yRjz6y8IM#;2gQ zk^?6k=ShlIH39oN=aV;nYNBgk@>b0xwj1bTsl*5na$4$aB{isotxSk|K4c~pG6~%T z9Lic%l2SE1+X?|Sjd^?x0}>u*@xvM;dG4fc+qSLcrwA`ULp5Nb%JoMo26RvjE_qB9 zL)6eeGQdd*yRM717>=yGKzKa&Bso=`%zhJg+DcR+-2f!9@H#wOS5g10g36JMMBj$X zQktie7FvjB%bVNnUFw*HLnecnVToD@RV2J_1iG_Cn!dhYx{j=bRlWu$hmYh4q;~oB zkdd9>)SM!ImPzA6cO}c-vyh#1x_I9z+zYJ|@e8In5o}ga{Y8Q`}MB=siWa z(gu?s$D;st1f6s4D5K-AggZ!n!@~KC1LrPQ#-Du%;cRNMcC5dc?Bd$G&%;IoWGRWG zv!4=A)6FhTYUx6cY;ANZ96;nUdv+Li89;!0=|yRRJV@q}pUm71Rg0(<67CS_2#!PURFV?m0mpk%B@c~0L-&~R3gSD! zn}`-H)Ep$D?sjx~4@?1ie^GT;$%!}^^YQDr49JyvTMAiu=}omJDReEu=9tI%c_DD; z!!ix2e5l%(NxRs=JZKBM>p$mu)<`4OXYe>|#TtTv9uz*VmJvLTHi?Y{{r?GC`DjDzf=W1@u~2q60#1`yJ;{H?kqs zKC-_51QQyIKBwQ@@nYEls%cC{(@!@U1W?Qqv)8BmO_a=;++DYDE!D^U;cx*-5w~0W zO5s2hVR|<4axJ@E+DuXf=`U+CcNOW+!v2e2LTY3Lj37;^L^4$yu7gfOomUml!@Pqq z4>PJ$qxs*%ww?CsrD`~UVq!po6Osze!_X|FS#nQ71@$E9pIZdy5YG4dw5D<@XlDc<`$f-y!tt5KZj&0(@zOlXA z=n4O>?5o%fKW!~A!$zI5e!yK&-zf1p@qwI1@>*8zT?%(vP~n~q_ssNG2SWqfqMC^q z=q<3}D3cpWIa?O(1O4imiONtpHJxenWEK_4T%+NXZg|BRn3B8)5uY(n@Z_|?-Ro9D zY=sUxGY{ZP!+=pdpHWpu)3IZeZE)n0|gL0O_o`A|-cM1KI&H%;j~<*0$Z z+%MG})l}_DIA5C(S<(vq9h=r!0l}>5+vv5B`NRtc7PhH9VPiAILiLsGgwQcEJ0j9T zh5>^?87DKvUxVXxo_BVX=?4|-U;pDVLp))v)S;c!=RV5DPVucKQ}I^hJyDC~C>bo$5s{T(%lA&}L0?Biux z;UYtn(_2fIsODaQHGNth?UkS+DL>VYjg88Py;uyJQeOK!9V4)0hdqc$-n-r;ID;Az5&uT>1$1RR;9T+c6Q#2)UEK zUdG(mRo)Nh3R2)uxhe~ShlZp_iOL|O(~L*Mu)Zy`Lic-h&cfbn9GsMjSQ-l{>Ds;$ zIi&4fP+3N)((JJZ*c_rtgm(@9sn(PTA4F+^gQ7dgVkQqt_cnuIFKk=3J$zW}fR9jM z?2H+3&#xz2coW*tOZ~xQ|45V)!suSNM2%7hPvP1cYT^IFpmH?}b-G?>>nnt7aW(B! zK%H#f+0)Qw8Wd7eVJ8#Lx_WgM6X)o<%O!B|G;OlRBB&sH>~Ty6Ec;c{MsHTA?KoL( zMRDYHY{$9tj$y8gn}XR{RK4l&=13W7>tOPRh$f4)o2i()fR??^ZS z9qobi7G5f>KAKCNaG*FOH*pL)e4&joHD%D%PC4tYOONe@;mw7+8l;8pRY1%zqz!}6 z0!~PVa||cK?Q*f? z1?EeQw&(LHh;n(u7u{i}9r9>hQhu0)q^~c5*5{SSQ^9957j`V5m0cM7Zn;j`W?=Xc z?XK;yu+G0Ms*c+{uKfuP4+REw!{IuW;F=ifi1Kbq(GCaw7ZoxYnXUfejZTC+EL9~9 z-W!4WOH*Y{83cLkuNQXJa>9EWMOQOnbd?v)o|O6cbi<&RXE)g$#&$-g52Qk}Sh1gx z2WNlW(*%yHX(-R*L$?-O9fVUFv}}jr9%Pb!<+?6qdncTMY_jS~l-l~L=X_2=cZxiS zeX@y`rIVA0H*Z6a7}`cur^3?&_gDA-k@WUqO`duF=p6z)KrzvhvrTLgurMZc&#IVQ zI|(EZ#!DioyxHBem&>9pZb47S5&{CDKp<_MWzzscN6Ur~Cq!`fAoGGnrlQap3C#Q; z5cr8BD%4OzL_{QjA~5Ik%xEdMJm&N`z{69bj=cqJkIK)}9(*z$4_WU_Syx-v zjaXoHxm*l9wJ2Z<%4m(@0t++N5#QDa=NxD70lqgqyoKfnay?KJN^=0^Qx_SpRs%=k|S5x=n z_+`Jg;b)Azogx{b_1=`?W1K(Tu?dfHl$3C8EP|Knv4!u=KyPo8&!kI$vJUUY#du@t z(g2KTsIQ5ceBDHX*Jg#$66^&G3+7KdEpv(ks`pf>JdFm~c_tWIAb01PR2Ax*xd8-7 zNX$r@>w=!MG}$473PTYw=s-Dq{@pWn% zgjLv*eF0(~OPaJ8fG#V@Ye)k6^!|YzUbSE!x|mU?fxwCXQ3DFvvR%>rYjkl+k>0DO zRmM4{Xr+#pkQL6X+B&ZV5PnE}a{!6YZGJ^C06=K63}7=6-nE5I1{@|PyQ3!%U65h*WD790riw~eE zj9a>IF!j!nguQO{vGGXU#7~c@yaN&yb=$4#dn#FhoYdzAWTLY)lAFvesm))XBmsNa zycsi^e&Z?hqYTNyy31`Cj{r|o_@>XO>6zfW>8SzVw_X+<(n<(s0AB5t(^_DKGu?Dr!TEtXk-om1a^>5!@OentH-LVV& zal(jrCWq&wO_%e|&s;@5x;aK#sQ0SIarw8l@6U5vCJ0~0&oBXg;Qt)s4TY`Jd0iwW z#dbHy9}H&GG8+?fMn!-~38++bi>hc8_ClBdF4g%NMC#FpNYOB%swUW&F*S8_Xkpw^%hq+#l^`@&C)ZX(iX%JU`>z3I z9wY5+bP&G?U1z=JPo_WMqiAXSOcfH_GfY@Jv>w0 z$EnF>pcO3Ycq&djgL=WSg|}qmzd4YkM1ikpOZZCk2{c`9U+b(J^+^z2*-ooL98qE5 zv>$}s5uMPHEcO|$*60BPeIP~s=h4vc0X^essi!^}>WHl$n-faC;1eDPNKl3p68(G3vR|x@ZYR?{8=rka0pa4rul2x9ZuuGAQKw!|Izkqc(nQ_h^iLJe&wCRS zPIO{(gj9PBL_}(^;ze{U8_>-#svLvz72a7#U0vCHO4Fafg+l=o09 zFzl@lij?aiM49l*Xttl8^Hr;{{Iq6pXqZHRB3@QGDVlJq%`nWX6S4Zfq%1naZz-NWhBK9M+fc zK^{9V+f1|Rexb#~;;3(nPok9y)$T-EsFTJI?}(mxfiqC`Ra?vP)UDUnfRn1B2+x&m zOdf9z-DHiQ2_~FLb$bt+;uyf5Sh#IkFG_oeTv-YgLcFhR4UQ}2NU6fB`{!>v_p}pS${8AqlzmfSy zJC3!QX*Mk0iSQpK38}tbu!F0ME~ECoK4c$+)wQl_dHnPj1e)3Tn*G2*n7Hb+%eL_x zdH`=kZo~Aqco@iV7%w~}s|a-19pNifR;zbW&6{^zwfY&0F~I0x_(G4q1jzLI-tqA! z0{p5e6?lU7wc)O@VF2UEryV~bg8$fvPE*QoI%fV#*4vp?st{cv=j~uF7nm_LOXHkV zZl0nL_xEn?4a|LsZ)N?P@%61umg7a4xHarT` z=k+b0xkAC5yai%krUXu9&7q#Wr{8{Z&Bi1b#0-GVs<%e#4vJ;~`!1RCup4k^#^6d9 z{2mvhogR+9@%ZdBmP?K-IN3NSL$O6VE89Ur{iq6n3LJhuT$ zA4S{P+;Um#0Q=RbU(4lqUrxjdCM(hPJ%`+oPg2vUnKF7gl|0ZmQ)E`Mq|b9hk8?ri zlYZktJK8Jj7je#yVrM!(vEz{LWEl1#cbjM~FOvTf^Gx?65wz3lEP5X;ivN{~O2Yix z?|>HSCxo{)lj-x~gDyNR2S=o%1ruA zn9!GQ+yPkChD%e&2Fl23dor8O7@QyWe~C^%Fn;ezHJLHxk&Rzim!S(1z??d8?!vYR z-h9=9ilJmg)h9A}K;$^4WJL{izywX5%zHprWMwLs)%@-7rp(m-nCx}Fr+-H#r(}!C z`ga?u@U;d7PC;2Fb6`I#e2iqbydQG-YYcQZ{u|x0O0L{;8PR zMgw>a(+ZzGp$xq#%re3EKyFK%l;xI>#rDUHAECAMibvE+z`cxxVV+DNCX26f4eAOC zY6njz#{CgF<`&leGQ&dBz7qPsb~RugI=9$3Q^)Szoy1aSLxGi=?7VAwfCtq#B=cD< zso0%7mC5161k|`72tz();8IDe`N)}}- zL^#DWbX@MD`lC2(49(Fx%MC0{j^k+q=wXn~1G|PutSO@|7UweF_R{tWOf{?8v4ELzbfd#E^1!0^NXM8nV7ct z*ojK4n$b}|pCchfHjuoCaa@?DNM|u-L2~=U6&CG=9DzDgpv6W+G~*r7_?%b!xUK%^ zV$&Vs*-e0mBbs%W2}W8K`BCFMo;pFw;aA1!)ls)m4=mGm(T|W_t&f%mVS;=XObH~a z0UadfL26Cy_ZKY%P=J^M1EL4r-@=h1cx_L>q)2QpG34H-%bM_U<4jI3*{xAyN1EQ< zm?KZXauAo%M2?M5yXt^Dyn5330=5ytFDR(!LSL$UKh`gXF6(kT=FO143lc{g_X<`` z+k9dqU>%Bc-Va)fmGzx+0T3ZL(%gnY&rF=}N*gZ1nZ51i=MbwXWB zNQqtf`*ARCg6@wEQ8vhp#pYb^nS&33Xd-gKffaS$YZ>gFAhpuBvJEUE>s0LJ*7QosFj zI)GwymfxaL3)rc>n57R2q2Nyga)+N?J4wB4b=TsP?C4uO>^M>dOHs_^-P-J}TNC8- zVh5);?b8&J=!2IxW*pzZko1p6&s7J+epN45q|Nvf66F!aYc^=fp@WFD+}OAp2Yc%b z+pB^P#;anDTC{Zk(_cDwv3u>~<^39o5bs9{sYf-{S`fVUa_p zgjAa3{J+9)R_zXE8-HGE_@!M7)MYGA0-Vg3D?l&Yb7CK$Zs{^}p;=p5wfV0FqIg@) z4I3b!<4y|2LlLYAf}(f|o-z)fnLLSQ2eeDh?H{15k;^v3>vT2?|6D5svmTC(E9c{Q z_6#|1NHvxiEC7bV@Z*d{JWRfJM3dP643bZ#0ITF!_5L9AgDFAY2%Ex^&(8rkUaGtu zOA={F!mH-O3l%)y^fw=%i(LU11tZaMwimOuqk|eZgW3CX_PHBWX-*5IZ@Cc$Vt zy?m`IkhNdOiU#%G6af2^vVC?59eo#2(XhT1d%u7U_H2ctHO>IyreFxq8fDxwR&Z>K z44N=M+bFytC7C%&9VT;g13Hs;ezF65)AD@EKV9TyD)+~Z`iW=Uq8F{R5csmQ(&d;Z zPdIA)8v1xv|9(Z)9Raql;{Q8Y9;v9GKML_Zr)Z^B4*D#54xXD-T*r!&H{_12jG=5m zLqm4zG7?F$3RkQ%G--#qPqm;v@GOt;Hk9oYe+5Wzfy30L4?{63Tu6yE$)rSRxOP+o zfM8qnAUZcAIyIk-MQexA#O6sKLDRYZ#sxEFh@>tWF3~;l$L0NQJlJ(j62c2c4kbXy zLGk94P)m+YY(UUu99zL}FBkkiik(5*6!!jEZftegII~`G!MkjG&y8Z`}6)m^q%5;~IVjCD8WeiOl+e z64(;>g`69a{^_J|t1H}!@fkTG^9v)}z`Xs_Gl|P+Ho#z^*A+gECL${uX(BK?nTV1S z-R#(eZxh4rpY^e8cf>qJXp5cfPLplqd0}kn^C;sRo2ggDRw~S7)fKkRNv3hN3fLWc zA?hI{Kd#g-KEL*A}=^cY1$*Y`8j*pO>P(s5V@iRN% zKHu*1H>k7ROlVypL%0#K9m5v+JAcjKY(|&mTnlcXL%Cka`P^LChfj!`-Ow-z_K=q| zz>pg1&4m!46qF`3QxDEzg!~87E9=|k=XE44?QY{zKlCbfv7sLuA#la44!k~iLf4cn z!Ac*@ePT-829oN5+h?5>CgPKpm)wb7iNtErFtA_8)+N^Uvgzv;b9m$*R|Gl?-qM`5 z+u)3(6=>bZCR;{KB~**LybE^{F{uPBp zGrBWf`5L=MxJA6EV!-ZVx*0;^+#TGAd|+yG2|It5>V^n|(f9g1W&a7;`)kh&+YNvQ zy-=eaQ{RU3r{59oK8Pmt#I%9YZ+4FaNij-197fEFv~|Y>T6<)>)k44{M(a>ZW8rp# z{}Ws#y!n=33~_6iRk_M-(EysE+2UT#rCv7pXM9LH1RDr3nu0i-Cr zzv)6&z#e;;vghJ{#{oDO(y}Ww$HrH!c9Af ze_wt&4&JPJAY$!NmR88~HoR(48>q__Z%wMr=;?)-a!(xS7V|oWoO zp<8+$6-?01`x^=x1Hlq#DkFZjOHz`nzYV3kDu6&q^U2sS;XS$7!(9c98!?wRg4?RB zfb1Ba+{xD&lednN@B{Z76qV+5QZ|MGyJS6Mg0`~ma&)F-TfVXS&#e7Q|IU@TQ3;fS z$~iYpT(_KeUA*eStfWliiR?`Ld~aW@H{~CK$>uf)6Pa6Xqp!8)=9uZ>9K_7H>}yhD zo3=;H=3*&cemypi*rBlNd&)~ujWZjDBJqIZbQqcesPf(X408xM);DjM3_2Pv@UEJBz1lOO z_%A_mEQ}l!rQu){uuuH2QRb-+wH6;?8vbNhu@<>O&l+IWTC9atZ|e4sDJ52>$;UdO zM^!AucjPqx9#<~s<}a0L*|Ju*K(~`*zjjIyy};`W{DM31TnQq4vBfSPl#+8%!r07f z`>7)+>*WKbex{x{+}^|~JNI>YJCP4Lb#bHwV6^)sXUCWH83!5j>2b6?hN)R@6Ll|J z12!e|>Q6IN!BxO~h8DrtuDh5j7OH$x+C_yg5?8T!;B=W#%=qBnd;i1HyPGviTgxP- zdr~3wDV&(F7R2Qc{2SxR3^PD;qOnM7*j(5f8J0iRA2t~IXmrgZdGT0R3XQ$%i+aXPO*v^P8eqx+u zpCMVN+<^`4{87J#I(Vw^^#4j4lG6MUvyztWGcRCD;tfzyK8`)*)_9$6ds2|qACvI! zNBx+GEn2#XB%e)grK3o0IOdZ;ap}e@%%gtm+ElrGCKr56(-40iO2xCkJ5%fXayM#p+E z=19X&94A!5@wnXG*f{dlcG0iFC~6JKiza{%QY)06#T#O8{RptSI27=8{RD(pA79B|(-jm;Fvejdaf9t{77tnof4H)50&lnH&`Jl2ymz&DuCh*S2&6ocfehIRO7{VuSG`+tbQzy0a$@GTYt;};EPlB^D zt38mzUxMB6$eXc#2P8RA1d7hyum*&$IblkkOI}32POh}|M-6a@e=p0Gni;51#*<%1 zl-0-wohOc|7&Dqz8H2D5;@~Rv{nkPtAfX0l<+W%@p9gFDTW0P3s@hBxYL)h`QJ;VV zoRpL_)}ZoEqaK9s(M{p*q4dh4 ziXhV0^hsGIJbZ~0b`h7qU84#K8j-^twJ?wDE!}5RCDQXtG%xb91hTQ^!9i@PM475x zvGS`-`;@rE)852^ievpCu1*;nY9w_OA3s;joRaE5>_H4N+MA#8LET9x-?VNbf zaT%kbLMR@IV~KHd&glyP@lyV*=(yY_k}>6~O@x1V_LMHuJ=u0v%!tilax}Ey$mqjF zxH33|@%8gC+HNLyDxiU219)lj#u%7vRaBdsWe#=8ijjT^5 z!$%a#rJ+I}X=P!}40b;NUZK=|bOCaSiVw~?6##9&fSVz}`ALpNdvrndR{ed`L$syZ zGwcYE$V_8}w49+p@3;HAWmve1A~M00mUxrWCK1EpzHD9D!Z$XXCVKpH?1cL|anQ5e zx*@~De~q0ICZKG>52SQ{eJ2~>7odLqDR(3=x0!VpV@s=I{qrA~ z#u=ZD`k$~B6hCIml1+is8pb22%p6?12g3!U<@ewk5jKPsDAzx?=F=a5;8qlfBbTiH zbCQ~4*b9v)D;drnWxJpP!mYGFgIrd3+9`tx2y^Z&@gkx`{R*ffr;6;fLHov5p*6oG zY88d4TPa?}ViY+J#+&Ro0ZzgLDny+M zhbDY2J-DX~btKyKlIbJsh)A`+<7ss#Yrvu!31 zzbOeLS{hEZ=DwbNtp0$;Ys56j7?gWkQuY@B5dxbKVO{~)`waXFD>XL!dn_Mg*SLKn{di+Nk>m{3&la6QR0? zteej;?#wb6t0n!Bh~UZY{UW;mYye^USH~d4&+{#$(>F|C84IEl{u4Q+NU{NS(*{&o z%_)k6Dk8#jTbVba261gZ$-1}I^$jv&NqR7kUn5yAh=Hmj`(f$Xo34sBnk^CWP(4G1 zy<)QKyX99)Ds=hKrP`Dr9JKa%U#k;6BtQ^jfkqey0K9J!Ban7RXiVSO6_qRPjKD;9 z?EoU$7-~ekP{@m_JstzF%_Gu00TjYpu~AdCIwDzY+OO6@(pVXG*x9KgshmtsrcHh%B+0EoXg%_03izD=7xLyhnO`(l}!#T4idfNNPSgiWZ{t_EBg^atVHdKIE4vvk0Q81t5{R!{sl zfWBgyVo^x1b3w~JF`_E2VL|cRw?AkdLUtxUrwY*=&|wO^ha3~5er`iw!dE{<_Kj+0 zeN?W*(hxHa-3u!Sn}&bkf_ZETTyP36g~q#9ZsWvqv7alVrxeWGp0`0ETqJ`-?9j)cH7}h-0mV{EA)Cwc|OwicuK@ zvH06_OkWxhzcru_)xKH*o@XWcIM4VanBbv+<+?kix#OLHFp#bSLDaE77ch_<<7S5J zhu9!65jF^i)rtva6OhfruhSD^<<4%9X*>^f;BGx8YMo}7H#lpe*OElj#OHOfYq)}@ zo33wTO@AeS(!V<;eIA5pC=mi0^7rfBK+~UlOE*@#gv!e2rvmbT?M zA+PsIhV06Hl4*GMp5_f;_u6}9dc>r;_gKHZvs)S(o(LVr{rn(LeAlBG)7G!_p#>Ua znaf}*Kvl`%h1c6L7D4zcU*VX4t|$)zN~P2so{R)FN^*O=$wpw9#KE)o^+!PqDERW| z?o{Bhs=#37tNDebz*0V&7HpkcJly9l?!o8IK5v>hxiV9Z+|am0Pm3W!z=W7<6Ac~2hs+};x;^KZi=<)G1WlshrF+R* zQ@qwqM!$0Q+;IxRfKzedLq04gLM!FO`CwY<}a=A=5Hnh&tjWVA93*S}fbz(7Y zaoRCIfWuS<_}7)-_s0$#Gu?nA?XA6RHB>>sEeorl|nz=&8~l^_~G*uGE8W4)8tpv@b2*6w~}ib0aKzyGpvYznT3ko@HN z*O{ll+MQ^(RmV3HmUYL^%zS-s95iR+_T800s!Y>g{Dg23ZQwYAv>^78Q4J8gSEZhs zgkQpRcOnHyI&t5<=)=#&&kVz#WarmoOU-Q)ZHN~QfHE^+0`=YDA=!C$tbP$Slf7JX z%t)J4a=N9(9{79=uQO1*GgI80x6h${P;IjH%omz*q$q-v!Yp^T>!xNA(~sAXY}wpK z!kfkM81ZCt5_LDo?X;^9{{$w*%&RNW1XrAM;y*q2$N;EN54XB{HBN+Au*3Ig7Y}JC zqekJVZfnCjOP4v@ItU5%z>OQ-hoX|6Bv-!TUM_Wq*I5zvY+RdnLE#Bh^mq^q z#7^(wm-JYq3QSA@fj?J}2G)}nsA53@%oMtc2SKG{J9M7t;0-!baT6tj2_qNv$}LEi zgs<$l1j1(Vn?K;e_l(46ByJFA3jDqjhs%Ualn=cg$3**g;vHziZTU6cqXy^oUC0Q-uttdfh0Lb?r^=J zJu@)%5L)d>s{dXt&C!QoWSpuA42E7YRpPqX`6et6ZD$R4#%c3WwfnkYi7H=c?dzfoc_UbzxiW@q~IS^M$nt z_$7Dou3=dhLS*4H@F}C(Z04=bpkKu0To+Yf(CL6&gSMT?e0M0p7mzIYH30MTwVEvG zke7V+ZASEpn1+e5{`mpG7R9Tr$U|djlg_P1tbo|9RE!b_wl6O8>!^$eT5y7BV3^*t zFOlvnU|5+xY0Fo*3Y|Y59)rB#p7xb46OgV+3|f`qt=cJWlpk(6ck$Jjk>q1xi^#qG zLoyxGI8fr0=Ax~~AjBY8fu?fq4$lpxFZQSm6~&bEM|JCvmEB|=CG?&9o1;G@)=&3w z9eDAf@RaO6DBTOOGRdrb;J=;Brl67JJ|Do;GqOk#7(y<1T9%`GrPYn8a0Q9V%t?q; z_l~B^Fwz8B^JBp%@Rv;AH#|iJ=wRYkp^f$ojp>FBxf|%;D^kNt^zPf~r<2`WVI#)K zSeSi@aR~{GjuWnnpkpE8uJQ9+A;3+y2hC&GM~B;2(tn5IVlijp{Q}U;z~I27ypO75 z-L(HaJ>hFE7F70;n*C}Hxx{I+oJR-#Qw@HREnTK12(wXc4fQPs=r$zcz$3)a8rRmW zZaywT;@w&Z%|xiWdH^Cr&a7f4XG|3m->8LLoiXn?o~@eG5vhUHPP$Ylp zuBn&)6ZX9|qa6`D4`9zeA2nNbc-kKhE5E|1rB@lRutC4$uc;awChSC25bHjKnn>%M z$(Z^FqYz`jlwAmC6jEoeQ%^`v8;*#te?9Zt59&%He+^YJv)-WYOdMTKX% zW-w_t|8t{puK*J^nt()oSwVYq$6+Ka0)_209i>K3E}=X#r=l|SDi50b4TnA26TX*u zOHBHrDkaGY+b7eDf%wXULEu3|;Ps z$JJOgZ<-*Qt%E-ydt!a=6{{Q4?=qH{i@O(kDr4$5%2?!Yy{Ph&At}_w56NBikUb5`3pg+9y#3_#tE{7&1-V z?w0TC%296S)u7zD!{NsXmcCpg0H5qUH*XQw?ta$GU?$3Y7ksjmWg4CUAmgd>e%p&V zNH%6Mp{vS=MY#Eq)!X)Fomzp%ePOa*4YGG{hRZNPg+nsT`gbp0ov%^_FQg=j zxE?wvCsE}PMF}6k=I>3>0;4F?xh^PEv8EM!UT2Kc2`NO{*7=@3?1dLn=0NQP?9O!Z zNuHZaM?88g4>CUnMpoQPEm@gXNKjo3hQa&z=Dd01W0Nu!yjfFhI?0Qq=r9d;_()yt zGWw)!ggp}-j3}h|EYRHI!8`bsbXXllX=gIb7ECw6)cppCes8($;a|h9ncy-#gyL;t zivwI(@cU(TDm2cr<_FrL=cf}c;m32C9^n5jMvO*-g0{y7s+CZh?fvwO3_kQQVnJ(! z_=KsH4CCK^W=K-^!j86Tt6Id8%xA^4+%?I$Kwcdw?8vz|q+|$skf2Zrb>W<(R+m+l zgG$F$RQ#+VQozPz8qJ(Bn2@96$De_X*bU_53MwYBU?QnHY+gpnIm2-_zLE8%9!wmT zy+EWO;+-g_Ja4rG9%QR^g-`Mr`(nmd?6Yo6bPrY$Cj%^%C%xTnWxi5*1g9y=0xf>7 zf4!kvlXBC@+Q`M@GHmolr)r+z6owY#wG5n;^AC-uz`ibS_1L2}D>3vdox1D|qZ=U(NESSn@~%JM2hY;(lE^5; z?}#>{?+pRy=XzD!1Y3=#axU|=w5~*(^2gtu0@2IVI9-iKVX1Nu7{DF>7x6t9hwF5f zZ(1G4%zq`vNuWj_`y!8`gC)L;U?jQ8oJ4H!oJP&X=)g21 zuUOP>g;fbD+*o=!+U{Ek9fWzmp&PSysundDHfBFxE0-n*y!Q>`{MnsE{!|=ro)_Os z|1zJL&;d|^%>`DWY8i#D>oO(ILx>!jD>AdXa{k_Ip@&dsLP`t*nV1vn=dA1aVvEL) z^@m3A3K4Qoz$#f#4JksgtR|IfaG2+`=CX7wK(d*1%@m$8-oT?qn|ym;Wo)dext**H z+9v3Uep>l{Iy=E5CDWc% zk!GlGH)H@FB+`F_esF|eS$ifH=n$oVJ`Y^_Q~An@-M?zG9euo6<5lsT6|DjoziIq5jHG08uJnocvCtUConJ0IW z{3z|zgO3|7RAl23BLLtg-{-`+6G-Y`yG;8NS9j~#k5@KgT`QEqatSx+vjU zV*dZ>Sx`sNeZ@A;`7wO!zllmVYxDsmzC3c2h}u%~9I6tU+9~cEk8~~#=Knhc@rZQr@-}2|8kqZXB45}7pw@} zDO`!f)U&oJc6{iR5@wA|upa|tDHv`HIO8j=Tf&WWzZU1Ts|uIPbkh>8jyegE8pG9q zUMlRdb9w#XXH4?UTeIVuk-(!wOzlb>^4;0B0ciBC|NZ4@>Jt>ANvxtt%F4|`(q>(; z`h~cw@v|P8(bPA^;`4I;O7=v{-SsMJW}8^XoyC~XVr={_nO^c+zl7ONn+(kHvVBAY zMHzJ~&xvA{N@W#ta;7ncwPWa2Vv>JpFpO00OhoWe@l2tu6Ft(O?{2Y&9~i%K6w(1u z@|bqxedbez4#bGBH<2j(Ju#19?O}Lp|AcbdZl9$c!6H%H?%K})un!Ap&#yq8t&Eld z>JfXc^MF9UTz5f2;8$sO;oVzrU+jf9AWE}v2*a_OUz3K3=hh+HtvrTqX~<{&F*Rsb zc#!PEZVOvtsAlny?gb>v4jnsN7T!#RjT+jX*AzUW#55M)*bCKn7|aDp*azBIt>t(U z-YFyiVXaInkXe_pP@KJF>&!t{OVK3koXIo0l z0>GBoy7Di=&!5ndcMQH~abrdeN{_+}N3X+<84Aj+a;&|(`0l0*P~SiS`D6X-=y)F^ zt_2PQ?_qHMgmND?OkRma-#P7_Y)LszVkWB~)`5tv&+9Vj0FKG}hHUSCw+u!_=I*}E z^`gP&%SA727m%6qjp%7K5*$M)s$S*AGKWl0%@p)qgvClK@mb$!!LckD7S(i!=MHCJ z<7cfxtj=r|)Kjp8g3n~@j_DtkzyPbQ(`k~*Z^shu3Ub|5&J1)TUD28?{1*@i+)}P* zpcjHh^tqSs5Z%d^GejwyM=Ct}X{Vhpyt$J|p2sA`Ol(1x0{6!bd{}84-erXHT=W!l zMmGK-S7>SE`~FJa(=%&7OHUg%S3ScG;um?<4`+RJn*SuXEqYfKkAhU^Qvd2oE4H0m z_nfz)^UJXFigM<}1CmjshSgT|o=$k5oJ)%RsBRVVaBgJVB$Q*OdoahvXVg!3P@Peu z%zW9<9Wk{8iRuiiteEp(sq8Gb%ui1kefF86&@^bJ{Ro=}@qXfUZZ|GBKfnfy?QGq6S%t*{gvYP{7 zESkG$ijG~0!g!eb!XD|*tbe+Vgs4Uml{aKIZn_TL%X~_Bl$(&2HR@MHwRPCN|UxxAXUo*SIhoN4ghPL6kJU$o5jPFMHm)cP5X7Y zSS?w6*NOtKq6ms}A#Uwa7x_lO#`~|TwFZX^ab;Ib&1%eLkQTV=Sa^a4OWziUX-fM6 z;SO zzdJmj(H|sx-8BcQEE_LqQD&-U;UMH7x}`*{bj)r*JW&?hf!K9=LsoyOt;&M;R6>cQ#?X$eSZz(rG*-vuIm!KB z>ci~(CX%(4@P!DF5hte>5N|AC9l|~y2Zdl+cxq#{p5yjwm`2%Lgp>e{cTcGdqFi)D zDC)sl7AVCa$60^OG})A526fjk2t##DQ=r6|h{AG?Q{<>@!*brnXX>J?UzAU;y)6@8H;=z8487(cOzyl(1AC)+n|ehC{r;4D|PfJSU=hNMBB5gs_pX z1y4iPD^pPU>FMes;@H@H-{b1I_PFFejF>r8op_x=r+1GKE6J~PazM|A$YzjC^1-Z$ zmGE8i;6?!9&Xgep6Cb_hm`c4A)kSTsc?Q2 ztYR~x{~6vqK}S7<6h~wVrIg@vyyL$#r2K_+Y;d6^1!>ov!qzc!p<^7H1ab%y=`0Hp zxrM4%nYnU&)N>wbkN;91y;od)H>u>#u0EtiFy#rra46pNP^3?;Da*m10 z#X7X$d^I-RjwDQOc>(tng#9HXNQ zy=HXBoVF?Te_*?y`1P+c8@!Wvygv-{!2$4BMNM&qDEOLg>+o)D(3z}ftR7pZAdhj# zBiOteguiUn2FaM*Ip2`Pf*@)}lY|f~pp3E&2VFpsn?gUnn=*7y4%b1&X~qq1m9>du};jcwJNljw2tq?_U5j2HLTz| zRwv3JP`(PmQ2)s368xXm7E-iGYP&RO=fm9#oUg8$6@(GOvp!}j<;MFEqiLhxQAmCG zs5$`E+h-qOr$b1DSot8dQNmwFb|F3R&nH1cmKt3yW8MX=+a~Plar2;QPv-BW-0;h| zIl%oTG}VigO?;{|tV14%Of3OI(3Z@$yZRu9jxZci%oQtMXChHim5%HF8fW%Ru$k^A zOiyGEuSD@A;JUK(#)_^R%2*bb1<9N^%X~pndz{dJ@BZMeE+Q;dvMJ{IJK-I8{nNAZtJE=ogvU&uZ%K2xjzEajQ)CGyymB!Z zdtU@zDiG?|3*U7I_nG=VJ^N>YG53uXNqXkQR-qpw5>=5TJ*w>3`veH-D(5DvL;?BW z=NfaaIddk53#(O$)!(hxm7gw0+bnWZ^M?X4<|fGm=X<$cP(N0GMb+z~%fJ2Og+J`p zbLx_#SdF26l27xv`exx7u+Nda!3e$< zV?Wc!Ip?TE;jr&{kb7I$MYfj=g*U^E7NRUZx$bMlD-G}Jne!W8`9otlm#<6z(+Bky zOCu5549;Gz`x>3Gv#)r^#$5jb{7C62+_G2J!zTs$$Qr&k= zDrEphoD_Bs_8z=wu8AB^eE^%{kxUatpA*LciPoiw!ka}r!Db%0vnOaamqQwLP+_!u zUj@tofeAD2CKo zH>?buu+(Qm3~s6z@6lmQ+JSFS*}h`j=?V1QM#$fxP{HJLtR>B5!j(>)!1m2?8-;la zpl$DyBd40O4cJTgVM**LOlxKq_2VJ@Rf{zzt^rH;-{_;UYy~U;c$v1=h^vnrSKm73 z(ED|vZ$MbF121yH70r8JyH0xAen9r)Wou|Y${7vnhwnWAid|P|o5XrGYj)8w710Vt zcU`HdH{yri-N8#7W|mOeoN~@30NUIYp?(q zcF-lf$I8yeK$Goj+rTP#eeZ1ulS*Nub9W<4hPwtZc^!-5O;-#0A`*LY@78`w2esyO zcKC;u;Tep?3KLu1bd}B8iIzqErq8A?!N2!C7Z1Q!jvQ>5<<;>RW6sUj&L<%T8}Uc; zhP#ag2@sdX726bVP+T8P;NZW5h>v%Fk6Dj5lUpa_SyEWDUeCa+_$r!F81G&0+#J-=Yue_8`mUEs_~-58LT zn>vdbLxYY4@e5e$GyA|RK|>seLQM&J;PSk*o-qYt9IKchFG6GKog;mYD7QCB-Vf@6 zxL>Zd@rwZiF(jtxRG-;Yf~p^>c;otw|7B5NKM^{?Y-q_@%t-}w zW0^n&yI~e{L_S|{A|&fGN9TbgP2ZgIAW*oE8msq}L2a-t;-t1EE7RSLx=?0l=20`rdBV!qVl(ALX8masZTysuEQ+zWl zazlc=_?w|nbjcK3eqZOO{Vz=uIZ$P)d_QO;k1UM31;bpg84n!RD~CG;!#7*nsxg~z zOxdJRy03plggMRXdC|(8eqkkbYFK!~s{-|$)x~6-H)d5*Yz`XpW**eUA+isR<hDpxB7lSazf9C5}r*$Jn z74Uv@>;LtWL$gQ#t>o_v!moF}yCPbJns7yPM^KC5Ec}3BuoY+wcWoglHWyW*f&)Ag zKTLQE{75Ya7S#OJRVyhUN9Jlg4V*LF)RnSxwrd32W=<9Clx_iJj}4EEJNjoJhaJC? z=l)yp^*xYs0p9Vz3t$+y-jp=CpWZLwYf&~W>jzwpS6iMOdj)O4NaBWt3J!G&HCm2bq$c_m z<*g{#ZPq?%3wR(hqHV&9D&MMqUeR@mH4U}IbFp&*>3jDtRzdO_Cf#1SPV){$(2AiB zt^#UecCJ`a-PSW`!V+=|lnS{*3()NYa^q>LT4V9~sa^WF(WXyi(F*7I9{U>RvrFz8 z3EnTBosiu~PHDY|l+*zPHnp6g{=xKU0@$gk>*n$V4oX4@i!K^W>uhE|_9V?Xk7jy0 zv8tqMz-vLsv=5rpgL`pqgE!_@t<LIZh?0ZQ3BOftvy|13Y{KoVqt|)+udGtY?&vT$*k~<9=pxS5; z2dA8{_s<<7u_>KeeubfP54L1eg4f_}u3)mtcb4E>w4Rh=5|g<9*e%|Im;_gqc@iWV~`|yD<&)mOQE6|A+UWGE=f=@`lLl zo9*ay`K%bk*qiZi7ol7g{hSb%hjZQ^gM4INkTW&rkdE)cVL`5p3CR^J7#L3ynOrt- zaMv0i>Pb0ouvJhh6Z(jirJheBN<{4@JhVHHoNW0JujEdKnWtBvGdNLlXWjf9j(!%+ zzCVj$IO;!xKFGFicq>pbS6XF#P!jihHu4lgDqD{qV=e8IrnLNgjQxjZu|MI_Fk%|| zU@+$6npCXy?sEDgCrH}6R{Rt-Y&r0o!53l%X;ZZPg>Ar(1;h6NTo2CV3dtGs`DL1e5+UjN$f3u7W|n%eZ^~xOJL7wm5br(E zWq|xSw5w<2XavG!0#=YTmxecZYF{JhHbqSOIXP4HP?uV|noB`!KbZ|rVMIpCD8NSb zwa4yIi#cs)NbHW9goF+f4tN1l#HsT9Bp-hHnh|gx8tab;2yc$lYoKL>$Yyxt^}EfYfHObeXUg_$^`K z!&}r)Cpn=zjC|Odetu^!ok|zV%4grb$gh_3U!dl};TY#`fe~n=tZ5rzDo&M+sBLlc-$)H#)EHnq2}T#4($fO^t=wfD%01 zD4@8Aqk{%esqxu<^|j1#4CaTaA^ChwX6bu+#=`4S$GsUmjLDj;lezd8ee^3spHdaC zktoAdpZEjPP?k*|Z%wdBM6@cxFH&e-b%ld4`h`|a+nLi!pp`*={TuD`c;oKORo>a# zRju|^|SpN~7tjF{!K z?SyG%{Pwi0--+80x;@buwRXA0(N+PLZ)&0Gz?z8y%3$U?8^<0TjQU6Q|@iqtZ zOE4Nf`RaSdjp=BBnST2KkS|$_-t?st)j~_h9<{0rZ~9}@e>d4y(fFXbcaD+xx@6YZ zDsyJlX5a;g1{E_Li;sc{(|ml72isxbs>tiAdwU=IteJi#*57FteYVl1SMc64jmzt< zljKvW%~az@HpbVMHEc3o6pUJZ}#Z$?^UjDjw#o zJPJvv$a;12EvLbmogo0K>9Clpkr;T`$FbPkelv({mD35HyUq2_3*u0HES3pYEQpfO zSk^6{-ngR$;GvMMsVPPh#ZgNaRc%bs_u>5L1=sbojtJzyp~)F&DN-xq0ZLU zz~IBz`C4f)&5NTVklwO>2&c>Eh_~*v_9+OU7+_qwgwKnhM)Te^Lg2zc%z^3l7hsC+ zh(&@D5bn#<(sImdal&YLv&sBacnbC+rAGZDgmpGK&zZ8X1S5y3EInC~i&7XfQ2Bmp z+hw2_HU`#^U)V?0rx){R~8bH>H$E!!(UFcyGX%N@)MLhO`<2|pa3?u6v`_hmbQuPCc{tqx>ch!O$cB7lD*kMZ#Q zhsd$D;M6@#N)`#bEI^M~X&(-A<6FQK7S2HVixHrNqpu6v>VjGTBIk^$nwSeJ9QzHI zFKZPu$KVvK-XnlB!zK;t|DU9{4{P#F^M_x+Kspgs(@epVUx8 zjEG1OL}1?EJBy+t@g zB9;-I`VsbqQgL!bJ!U@E-PTToc1@W%I+C6=T$mpT?xpI|q+(O>z#yX(%Hu?DwdcLZ zxRRk1wja+Qe*GGTa=)pnDH_OwEy$7`3rEeEAh(#e!eUJQQ}82o`A zg}$LSzWTlxR3T^8jpnTllf(z04d*vq;3o6vMSZJ7VbVU9;UwgZfQ(6a6uIuYEHnYAKaNBVmdu zTFY|@V?bytU*{IWMJap0%l4@-9+woXVGaZg5}gaL&8luC z-I&dHM-j6)S@ur`Jd9GeyriEcK8*gFDIsT9^Vt!QQwE8`m`9mlhZrVKMw&&}d??8q zmA>tU!I+eIyjLrlb#6|zqcVDgQS;hqj8hg$3T!!Umqh#t&t>8qz6Pm@<58@nn{#Go@?iGXrLQv6K1atS7fFmP&C()0 zZAyHMhi__r+{n$1#dZ9JqcSXXtGNQu%=;%}(A;ojR$~yJC{?d3@;BV?h(pT}*RR?S z?bomV>%XT#=>&Q@AZIq9`$w2rAHDv5lmc^H`q`~_*#v%(c(;#SO#_~l=3C8k!R{b{ zawo8{hYlZD9I(zC*_Bgf@$kROd04e>$OPy9L%lb5N0B_iiNS`MWpWc)F`+EN* z2Z%Guig$wtrqNxCu63awW#8=Omh=N533^}{M5pwHx0ibq+tClfwRwx|-iRqr_8h3b z*IZ&lK}Z`%bsR>q%F)MtPIe}zXrc5Jf3N4Fe&Qbe^tc@Jz&>q%_#V0z8ag%0{Df$C^&=AIGytS|OiL)d42it^1(92uuxfurf^2SPlxx)H6oARALo@ z8uDGUUxyq`frYfw<|(oNy^Spu0AAlJJX;78glMeTB9g$@$tb3b8&iPY{qrD5#sVP4 z8#S|gHnr8LDDX;F!?AlyC1erz8b&MkhCPeEe#h_f7elzIEIV&;O2gwy*#O3B#rB-X>i2oD^-pM7)Mu zcXh@W(g9cqzSLXn6b5Y85_O}AV%DX>32Q`CyJry)fm6}5jUCy`B^<;LFR9QMR&Bw8oMzh61}I`7M?&ex9Lrd|Sq`{CV?Fi>P zJ=b6YoqL=*vMb5z^J4Si{J8RW3@jL#)qT80>#MpIc(E*ooC})@ z%7UfD=O0jUV|s3UM<8+w1{rx=R4B4 z&&b=I`!;4W96c1aNwHRB=G^ZlVLzOQM6>r&kJ zpn(3+EArWzmcGnm1bTW0*7D@WUyu>T#G$|*Ms*oC{h*uzQYO8fd+O8npI~pjp zfWZ8&rZ8e0ywnydv$+Kq*#_Q9(=AgOnBokl)t)y~h8ZTByc6Ie6m#||-C#6^>`yX$Z zfVoGy1=je7;J+@%%|SKPW}+979Krg6ZY+ORNkPMT*z99ADJh@r5_anGAtCZHICde> zabYhMb(^2&t20m`45H>DT(MnSfKKue%AH{V)4@M*U?sikErb)X!w^+VI8{f&L52t) zz%nQ~9;eVJ45Q`N*Dbyr_7#jXg3YP}sfjX{LFr#!5~W|qw*{7Q$uuUCsW#eJgBZaQzPO-ElCm(rN01aE4S4_$MA;M;9VD<358B5 z`lt;S)7aJpXCp$e?jXJF&gEVfI;SxtQx_qPfPNi%EBu^KNMm6nU?z3t{bWkLj#v85 zbL%1oQi0Zd)T$jv_jryMLV%!h?4WuZZcbN(M>B_rRPg0u2>0GyjC9NE%{HDF$oyoF zC#jW|fgP9n(3=iNYgL&Yxc&pz@=G75piD^>?5d5?t*~|4p65D(7DDu*Ul@(HYd~(f zVq4$Phazik*z0Km*tfOyhg(J;B?P8DC?Cbm&Xk+1X%lcsTd71&q!%4l_6V$s9+gdj zf+1g8?0B0e?0y$bdlYloGC^!3f+c%6uJ_`pM$TEQMQcX?L0o6suv7#pT~J1ORoDTt z3KAa}bLDPi01wE;-x~|mIFl14vnt>|YLZjQWmB2aALxUoiu<>hxuLETnvzz}Ca49p zULrKe(#%$&fJIuU{Yr<77Y9b9A}$)2iNdQV6U@_l#zA*SW^+&jIOrSWLQcjztPf!{5WAYQ zF@xNtd2L$bCv#3NyMrm}A}17|s#BW(vrs!I8^Vxfnb=zMTZ^Se&;Ah}5LLLF{A7a; zQ8TA78EqIR!#tLigKuGRMHBxh(aMVbqJ5?tM7lRTH8BPGf1o5iUs{;Z5H0d^~Z1yRSxm0Y5;#5U(nEa0-$jjgF+H@OixXG&_~B`^eBy;s)4 zDB==e33DgCGVT+?A9Rl$pX`7hlPm!hbHdZ!3s!41;6c{(7Dqc=uh=?YTuQS+G7q#X z3PMUo;*#@xI2&RJ2v~*{n@BidCd4*l(H4_ER$uH=Q~(4Dv2+!~=G}HPl~k13ynsZ_ zx;jm5$_pq|n1jO+;JCd<;7`_F7Z0o)Z5PPw$}+*m3{*U8Di$XvW9lF`aGVp)07fOb zklw&&xd{*ngw*P>nt%^r6qP;JjOU$lSGF<=WDDHlH0lgJ z*_QP#f{3A3kPgq-X|NdYN}Cov$fNxDnOzY|68GEEA%0&U779ApP1L1;EJuAjhPGaq zpHd(L6lyjk`9P!Kj2#DbSPq@IyJ>ZX-zRQ`#+%{zNGE-av79mBKIxBdpEWnJ4d(y^ zHhItp!xx80Za|OTTE+_;H)>pFyA%gMK7nTML;->LJNYU$1?8!+moX@Wd2tJ-MmENj zkFl}smp!+d83r^bNKzUiOz%n>W?u1)rrQKG%HFv|e+}-BZ0Q`lI}~-mcK;6SJtOF8 zDFVn<=XOrul_>Tq>8OfqS?t|B+9Jx(a69&NO$7ZYiCa40u1$}g9gf7@vtCq4g6A>l z>+yiPwb<_KXh}4TBEUWO>@_av{c-7ut~{*vm_TT8*N3g|ptmlqm6!X4cK<3|PVs$% z?JtrSr;DtNzYfL?e9-JRi~!sSnulGuH(f+nVX|TT9V?6SG>a+&_1JlEoLl0mJ4b$9 zz|ka4(h70NtmGQn2P7-KCT_>EZbDvVV|^BvF+$?^fB$8y&k8qgnrxvy%+^8H!iW5D zV<3O_Rz16=yPxoRe91)~Ss4GCI)F9?)b20^cCE|~B$zYLP^L1BB*90%Yq>6=cms=V ztQy&jw4Vrxx78gF!Mu{VOqZ=i)36kcE=D@IJlEh%t0i2&vH>@yztC*H;xn?n3>GF9 zHL!$zS(^Lzjtc;KzoWP?doshKbPQqOezqIha4NnDUQdB_aB0Y+9ygq@khj?F>4VoJaJvh^tJh>DgozChw*yXi#lp5=ceEa!@fG!aOsruGP_R~? zvsR6ru1VdYNi3rm-+(#P20yP964H^9r2l1PvSh%9%sJrM|SJ36dGpTEWE zrahu`Bn5tE`+tCh=J)Q!sIA8AGpI>Uc{*4cmgaA-rLtpfxc{l1Sl?#lAc8H3XGn-{ zsl@l(#>eGxTzvqj3FoIei1{q|zhQ=lCrF)VFu%@oVkJc&RK`4jZ%smqt#sOx#<(q7 zFpmqen0#QctMC#F-{q2R{#x*SFT0Bi6nbuyWs)H-cQa5%AsfG4~}D>&w3PD zyMb__+e(g^vY@$>PM33%6_GfKdyrbJfY5+?R3Sysy1(KDOrz>)b%{GgBL+0Ht^+H)sMrFfu$k2o$`gw?^l5)AeXO?#ei#fI?5cHW{C zzp8gO5p=gXRpu3}%`~kTXF}J1*S3H;``M3!)}uJ5p1Hjfxbn2d^rP}rzXe|#fO3f8 z&nfMJt7tAfN?Myf^`WUJnQ*|K!}k_Bv_lHB8$x8WxDefne%M;GX=6c*;22Q@9+mZr z3<>~+14HQQNQ~|z!YsDy{Zt_o7eA?^)1~S_F+9dY=oHv;%VQ;UV5QgGG@;vujWL;n z>RJrqc9?^y(9I{ZBY?OI)|+#mf@Z^~3uNU|3RcW3zIF!;ElEq&Tait@%n;mtKG*q3 zl>oAW-5dChcd$FFZ1#(E;tPu@5`Pv(Q=~k}1uK5`Qv~KV!0Ruk@Ozhrit^kdv$;H1A(!4x8iTGW%mFve~vpsFG-VBAYbTI1uN zB%=IDH!Y7|HdWAbET#wBD-J0j0mQ~PcXXyTfx5X>CJPBIUu^&2ZK3tgP8iP|fQ5*? zR4!KAn;MhgQa~P+N?}o$`%o&y&!fP3AEi0{5GG81IUCRV$|)2 z05sHPUci4k;Js%$ij@PY%=N0E7Qm77;;_kRv5hgUK7baZRZlLo_p+vq>=Qz$F{yfv zsQCmCjUCaiVzI+6V}{@lO|w;FO~xF^YYKKpo}^eM&UV|>>OWR1*UvdGxu05%;AAe$T2+uGWcHMeZ_9-C$CI`=k(s4|Xz90+|(dmVz-Ll08p8`8`n+$L+ z88y8CJMkl=%wEEvBcB@6pcA)^w|-~LGUKTgDurtsgfD=a6l{>V$FTOLqd-YK3|0_j zfWk31cD#BL-*(HDsq9m0;WqHXO!;ac<`0{&K5r-f$D#OPN8Ez?h+7-`XRN)dy1mI# z_c^k?AkrJ}!qBq}e)7(E#J9A(pg&ffZjTa5OO#*5H)vY8)_Ic*Hs%~CO74tcG~Hw3 zNZS{!jAp|)zpo8W?{G#HO|UQX`^hVl4Sg_U`vULC;jtRDKUF)r*cI3TYn8xzy(Z2f zn&7<%>LLss0_{|1JE6TRegy|)=3o>NZ9*%x-_RIFg|>c#`R)V|Na5NHVta1jrte;h zOlmy>Fho%{yBFXBDw}m1Ni7rAG%c%ofTKCGli4`A)9jv4m^}*Ykmm<%*8*?_x$d_n zGy0G=&gy;8Vkysg4q0CkecNkLbRCEG&10ALT4#^1-3PBWMoh5{dV8kyc~qOvgHbLY z(%x%#aV^Lk7i_VsXx1iDD;A*r4pV6G+I&jP>=#_-cq683?Re|QC-=E7xs@BM#G2ap zK5S7{l*}Pd*O>tf{)oYCu&>wQMo%m(^UbNS;v|dQo&)5aQZ5szrC`TC_t`wdcw|oT zDI1@*tIgaMBhV%)3N{da$cs^YPz-}-DRre?|JrR{+wkl3>)#>8!Aex1K)f=Nz1>6u z2^w#`4q@@EXm;JZ7=mc$*=rCYgfOI+65MXasIQr zpc%wVBnzkArV~RX#-!<$&JG37l;ZeG`N87vnk`D2w>bHyUGykmThNPv;5UFzezTggGHKSfyI^z z)6#zi#$MH>i-EIJ9_3H^Ct*Y=;~pWjH)MvJHi~id{0#0V#9nAiEu}(5n7ag>4Fmt~ zTrxFzM>pc<@8Yk_C!{cBa{R?r2cUEZ6li&kRnot`;sVOoup)nVVVV1uo|M|;D`@&> z5rft@jImaHeO4(&8BCn@4o9d?;etz?32dMBET=No-V1i|ycWEu_qs%y)ovVxqW+z- zK{5W+_f217^(r1~>Ws&Jw=qyM0l$Y(I^rXseDv}PjRBD9`P%f#EBO86>b%HRz!Q#n zj!aB@sQ{LswX~@qL%P`J=8^{r8NUr6J*$0i5rLEIzuN-pwbXI&NOR!^01j^+?~tH{ zaPhcbfQS;?#axD7TgnC6%MN@_6N|lz?H*bPq8U4`M?s4Dz!Z5;&n}EEf>$-O14Ixf zIFeUOaca0R^20L!8DCk)PA^`h$L`;got`q9gd{YR# z%q)jT1J_K$KMQ-uAz;!S9fl2U+)h}%Y$6t#u*1o~T*VG-7gpKz8@v;#RU3)aubR&O zWrqo$VhCeY{8iMIE3Z7QXM;{_@*P8}i^fySkxo}&k))PL?i~jh<9RVUh!_5b$;Rh# zNzXeZ>Vy;8p}<*{unNNR>7|u4Mrza*%HGUtKiaC%r*imr7yawu`?4YQ zmr^_D+$(0}`v5i5R6C%ElKY_rv8Xb?GhLCuHH4)Z5ok}Up?DkBy-Z&)7h&}Uw=j{yMDygPT4Hz<+~$j6AClR8_Oxw0Xz3jLO_3IRp~jY_!6jOxx4QBnv#F0 zXX3s)Uh=OzJ39{?*4OWMB<}+rGt3Zwx@mjiZcP((q6R314A`cdfBUbk$?--#UO1@6 zjyEPSz?V@h4DE=SH=qlb)W8^WKKXzDPsv%Cgej>dM`Y4|KG+0|f30EkE=7@m;de2V zm2Z3J$IR|%{OTr++2ZT|G_lvpln6Hj!_|iL2}J*pY(Z|+c^DU~@gKJyqTC&I$+Tl9 zhz(T-K(RE}D$1B#@^d!7_g0ypep8eWn9c)zLVT~Iod`?01%QwIt7*Jc+}d$SjeppA z@cIp-xydZTqTE~`PJoww8geS`m@aUxE(Nn)NF!~32<@to;X&313iD)ed3r9*5E>Kf z2qR`lI9*}PgK|lO$uf@H^nfO4@Ub~R=gr1FwreBgt zCrj-pwU%3=;gA4>{~zfuungXA1!_90r6rsL#8Qm#8}tO1B_dzTE)c_pcg3V*6)~(( zos)djsl&9=zoQIMNklRoMr+o}#}M1T2KF$#GJVIOBV_xs;H=Y;%sE)4BN1|S2gZ{W zS-+$bi?>Jb1qC zf|V(su9dk$5z@`Pv^+Xt!b@M50{I>q!cw^wkrHYEBB5+466(Ig?q)e{oL(9?c^up= zlT2JIQuoRIjY*HnK|(TE-WA5rRJFiZC8V8F%#gJ_2e@@q{2lRuG^4Q*LuT}d2G~~g z$&9RmBp$|1_+H9nUb-&dDRKf=`vMA5Ms>r$piSoH;Ww9og1cSMEmR;PfH<#ghvMdp zgOz?1f|ruOf0<^iddTa#Y}`(X?gd+Y3Ty>7mV7#wSWYkOG$u<|Id&mIznC?e!d3CZ z8_4#=C-Hb!cO+u5Sg4Y{+0+&J&hB_t*O+v5rWW;GT|uU7${nw^0|ce|=pp zYJR`uw+~~!`t0&L@VG?&BMo?vDAF46Wt*5^S75*J>_1UBv^wCK%w^LiYF11 z(#N<9c`MT(*1S-8u({F~dy{Y43zdrQF!BM<{bYVzkY+GgcGH=w5m)}oG+pyC>{OaF z6O(qMBKE{-A8d{Cr5oTQyuue{prpBMLb zs2)wTc=du z=KIG$2k<|4)>~!}`|9_%!|Sq@^m4#NGT%eGmXIR%d>d??DyV%&VK(U&odyyOXm(+u z;SScR8WWW*P&sB+AHyxFDE_E`yUn7YeOD zFO~Bp`X{Z$Ej)t)!4x%T-^SjlcWkN^3TMg34NY_R0Dv$TLawv z6Ug(y;2M`MVa6(pg0k_w?WL%6+Pwx4HnoleY3A%GVsXjSobcC@x$4@3Y293A7&46l zooUGxlp3b<{Q`XMaCgY;DwuuL71FOe1+VARZet1lQHP<)1_(_enkeZfJMY2` z7WJb5V?EtMD+}@y9ru{AKEi0KP8gq2MUj@XWi*qwC~8X9D;V+nlXQ?5a$znBuV{E2 z%n`@7k%QlxDh6m~My>=?kV@9SQ+S5V-jCAWKBWK{91H_VBn99AmN^dv^wOV!4ZC?} zNXJE9C|IEk-_!%s3+(5&${rrSZQMqP6VOcpb&hPZCsbNX+JvPK^=#W2XZy~}JS&+- zfY=S=W+`3@rOekMkuc4;UHP(Jm}TmKA8jsnJ+txU;8AtH>#$8V=>TlkqX4tNhIf^G4V7F|KWna>BnBU%Nb44FE_?Khu!LYy;9cB4^rX- z684>JZ=%*H!0~CR7B!m@piOKBBqmOou7EbU>H6uI#UU0LaWFYkhbX`|ahq{&khe^> zLc}WWxUgtl=pDcbZoA;Rbn0PKCzkoGaM*2e1X%{mmZj2Ov|#pvJo*4my;^aW9p8uU049w8VZ>n# z2sV%GmI)98_A+gfkIAPPs?AhCUpt+xzN=t&n8Kf(1fG@4*UleS0`txN*^La?FaSNP z;YrSHM4vt?wfdFXE3oT3R?7~=r4hZ@PbpBA1#Z}D>e-RFfW-%5ppXy__yC6htx2hv zk;DRze{h^tjFDBxgnx^S{>+mXnZZXq=qbFe}kY>j{FEEw1&-P-9 z&X&QF1Q5=CkmO7gC!F!)XNRyAj6yy;^ODp#c}^(9lq>8TST8EEKh8$=^dT@+A=|EJ zeja7%I1MgL;eDrRf+i(g^R=K`d8kJ+mjZpXB0mvO5-1jtxNAGr-|K^lZI|8&Bf+EV zxD0r+$?5a9DJU*Jn z9T8?rSJ20IRRTB>|HBhGGjOhUkHrE!%cFdt_e*R?!Q5*ptP?^J|q@%^y> zZDd=t={oq1Dff>rp4P`4#o*sFon|}=u^Iv0T{3~ku;lKzQI`Umd4h`PadZoNP=i`0 zZy2=$Q>h63TuH0ja~}<5$fXPIGi5vq3lVlSWAa|}BE2&G;>G3Z;yzFEE_J90va!ZI z!8|sxOo(}SkIGSfXE@}f;uyBf@ng+dTFWs%eb94OYW$1J%(xcvmT{KnHFZNC)hYZI zW4D6}@$9{mqge}MMD%|D(tYE2V~3dm%y$>{X1#BA@{$e>SFaqo6Fakz)klA1jbp6& z*Ae0{*&=OPs7_IajHIS8gvW$ojO5wg(A=Bu7pXGIMH1Dj9!YKp5RbM*0cghPBz3^U}Mb{DE zx8D*Y3L_Hafj1wsBv6%MjKo1Hw$YYnsU9tSWkhEK7U=dtvTFie=wmO}8Fi6;JGRa|!{wNQ<5Lrp5*F|s3S0Zk@> z{lfrA){4Ve*=3V-wiC(IQ4@pS;mgO$8z7n>AO9GvnaP*Us3{EA4Nd+0KGshcQ28!p z$J;@q7G6lMU{J8Q@y$6>y8cXWVR(NhL8FuRu@o2SIQHi7BmZWJp)E36zcYdA8b!8@ zMSUR{Yz3T(5J9#gjE0eoT_B4G3q02AVoGlLs6i0>Nq)Mia%7eEZAC12;puco#iM{04P z>W2OohNE0gHaNF{g?e;!nE16jzP6?HW>VR8Wo{Dx(drq9fQ?6o0BVT8RXspj&MTAM z+p%MmWBrLMXD5k&{Rd~*q4A?UPY z$9q7w$bD*CU%^_%+G=mQkFn-qa}G3+ISb~J2c|?1O$gj)9<8gQ3FT@B_fJnsaT`e_ zvD2w#j)OYl_w!#uI{oyey(~8U+40||d&m_P#icgukwt%JMze{W3q~znQa`sjC zllfj55<)ig-X5^#657b9Q?_Ueh3gn$7nDUYs{3eQEo1tVxL>oi)rl#kgM3^Zj_*Nw z+OP3~SH;P^Y?Ilmwkj#ACpq(&MT+)vT2O7&ju{-^M+ec#d3AfxiDfi$G81QSG#iFB z#_aQXjNOdjtt&b=W(Gw(po#&rq4^$5oS{|KZ7f_nQR zZDIj4fU7>xeZznh0{y>I6!+*HtmsnoaP-nWE46M|Et6{!Di6DQ0MhGAys9`_wK&A{ z_F2h}lt{ze-mWQ8qNo|Z53tga*=c#wQPx|t*K^M*@~!pw%B0(ydUPaHy?inP9iu>i z$xCtvqmZ+EdmMo1qeJ1|0g&>fQ?F7NY)_%j67R>Re;qTLNhMG7z~Xv?BO|EpVDcXw z8~;!;va5^SUuaV8H@eXK=!yq?w%e$OJWP`vfM49E%4TsVc1iI=7-|+}U_QZktlZ=) z8lYd{S%jOTxNvmu6ap!Zc@~|@ff_-u)zC0j57qyeB>^xs$=O;PXpcZA0kO0l_@tdW z7e<}JTgTQJVX_5$Kd`m4I|j$#{wX@)CSvz3W&^!En z3-^x4Ud7V1yqKnPS8@S7J(k9`Fdqi%%s6XYYdxn2%cw(%F!;Mw?c@PC+}e{;aaP-yLSdxp^MG$Jq2Z{kr_tDa#)k^UGH*3!Q$O=j z09n;WHiUnlI+irEv^0rg8kW-wQ~_EXM|NA*Vw>FZBhYTm;+ypr%$1>{uEKzqcfn84 zz{A62#^O)$?hZ{g%-%>6TB1+L|GI$aI2Ql*_lTaQ)4>0bzKl5O#ss#fx4jWa~wov#a<@T&C`q>C>C>o}pWq3KCaN z{O9ez_BwdrDlJV_Mpcz7w1ZY+o*!W|Y%%fEViXkZ1W0LLD85a_7EpiXxidM!5&+0# z(qlldVNQ1IIU;oYP^;XP09Y@r$sXw$3Lc<9mY;m$9!W2m!+?>P5P|tNdN)M z|3#epD!C&m6sk^#*%Yn=!f9oc2}H^vmdgzH5XK;)Q9fR)7PZU}cj(kuL8QH^ols|a zbAA0ghfF8H=6ReX{%l973x#Z7)A;z3ex@{co1b~HrfdPu>MlHKk`ymTup56^LT&~Qz;l%}W zfhu#N<(q8U0Kgups@w#PP)U?1dI3C{*Up!&oQ%oAE=u5%Ym=Z#oX%PqtNP7|-}F(w zgUeRDOLinz+ZFx6z7Vu%L&9j#54l@0qOlfE?v}w>c0=geld`r@=ooetgdN^WXU>vW zrO@LK0eU*3jSocy2lRZT*=C+cWgO?`noi(ye?Ze2bJi-t0mZq#22&K@_^))^FtVjI zm(VrlVCQy&ZwgjD4gG?3?6n8l7oqMZvCre`+k}yD?#XT!%qyv%lJ3)?^0sSbXs(Ko zh3fZwrqpL$faB#pi*7*$Y&lK2vBDA6T5ZJ!x@m92KU|WGoc+q--C8KS#~`;5Y^1l& znHABaw{iW$J7t7i|9>~f^WYc_(74?}VRZR*T}JuXMsjet2{A3NI3V(BvRAx7WA-7< zZWqa{7T{RE_Cda2gkdi~sQ?L``LoQwx(;$w-O>=)uxojY#FbGcb$K<zzKQH^dC59TI1MON6fjKe&{ zq-{to)PaA7LdAGnvMI^L-i_hZ)y?k%s^e57ZEv=e!5RdEi;64$pAnK6 zYOdsFmEf?8|CV2oCDU(kZv$%+; zKWFm+fwC$pyHLAdnH{ExXd+v3Dim^n!>-PA%$N$Lbpun@Cv}jq>}%K%?^Kk+!X#aW zWBm9iR`?mBE00Pc5&1i`{ED){DDrQuKMhyVRphlx8Mti?bjKe$m0FJ3YnWoz?yoNl z1Pu#XsI`2V>ptd!&O@(33ZE`zZBxc=!5&VCRCk^g%q$y1I_VfHVI6)qAkl%+wf9JP z$bUR+GKz5MmbDEDmH(k<$4kZSLhyd2wy#knDO1nqJb2A$Ay~&*krW9gL($_E1SX+wd3L& zo`ERRwq>#h=|7=`ST+)XK600Yk05|juC12rt0=YM0m&5$i}E3X2ErVcUI|2RXKb@t;3jAO`rQMOJ{K?dP_+||Fk|B0S<<{U%0D0OdN&-r# zjYpTumo6^04fcU$Q7!^?QZ|k481-?nV06H)dI-vplPt}t_ALzH4s%iH!nAH>@t{#@{kZgL*Q#peGG>t2RJZ&!FC#}(Z( zXdsX4H47WbzWLQNb0A#3vZnA*l1|jwp`9Vr-$|#ErqfLBtx-t<(6VIAxO3sd6uzYA z2$C&#od;=OQSG=$orjW(bX-fQ2vCIEJSdjTrEfks{^bBUuD>`k)QZ3IefC zCQn}!J8{f|EpgG&D?nA4&lc{ij2es<14h54skQvb8c6C87Pqg#@l8>pn$g}EEVjYt);>YO}AIMl(0Zi&0AMa+$sJw@?8HsynXWDJ3a2$dX{t$1t z99=r8xFedlDkzD9{d%9N~zecjt)r&+KGMjm-pi{$A@N> zI70Bk6~5!{%oqt|y{*ui;WhzY!pO_f@ku{DS70%89#E>@>)t?Ef*?V}qwx5c%d_|N z94MN#7JGw`}BP!9NMeLNXz@4eRd)hlCUi$3Rg&e6NKa znS4|D&2!MHck=#e+?zBG=d52>8$~ugE;ToJk);54^by8F)a&6=kT6rO1AHJq4J*on z#&SE~S5k-~I3w-r4XMinveM9CTscVvL-&5d zG_?9)vsyigmqNVr?rbXt{hs{q@648W7hr)wPoUl4iiT$(*nn!X)a#{mW zd@5)P5nZo;E7no0V&@J0mcM438!NCrxd3Vf@5Nu`#s#G?j}d8!t_YJztm4lI(=dYI z>Z7nx@vA%X!g=uxlpSr2%!XJX!Y2!q7jPtNwY!DEKz z)yiyL|2og{aVo=$UXV6KqrPZ^M?(#JI6MPmZ{n>f&ZkH+EBUAH8_YInM(~Did`E@x>aYfUaP$u!#Mi>y>l$GOV zg0~8u9472sew(5kWQQ)Z8|CoE@k>zd?ZnuqK9B}fte~oeRWfIpAT0~EnT1KfK6B5k zhS3y6Gh*3m;BwgjfVX>w5ujMc>$G9^yEf&h4y8^a9O}iuGN7QF_?TM-%*{fSuQ;h{S`|IFTw{gh@p1G=_f|DLnb?}{iE?9hM z@-G#B105ntVt~SNWP;eF_}`;bv>hAs0>t$(BY{joHJ*=odi>MhT5$0kJL61d7f`q^ z(^Z0s(zY4JHy`L3_6XlV`(TLVGQtBK=Jcn58M)LMD*`rL%(3d;!D)6EX({&sL3YZS zPJk~J_m-dAC@Q4m0OCVT6Ps1Sd8}(7^gy7JGvx|c%Hn_TJR-H4-RHF^q54vV;;iP` z-$_>hEtHW+grL%{{%1iXaOfRn)4zoG{@t>;&oR?2%B29Ej7`>lY&(8?%`b$ zAX+{mo6?6d%3N?d&^7^8HkCaJbBc>NfRK(!PcHyq+ZhiGVEj*nDbJR85+j!}@a4G9 ziM~KnbWMXLb)s~=ndq0EU3BOJAVjD<`7Okch zsElFH667J#T?h-r$4e4cHPdiPqU z@snB8&o)r-J0ol#Ch9%oE9sVHT;WlzAT)6VqI^JY1&djU@K-w$y^;%{3Z8mqK=+*& zC170rq!Qt7T;iTU*YZyi+M~v0GN)?{W&=jAkH?_Oq*AW=a%lYMo}t$cX=AH;7*>89 zyi1rKy=nG za^m~3isG;3<28U2cJlrKpzWLv?uYsy4R#1%W@5|oF81}7W=$RB<4k{3l6UY4Od?i2 zXli(lR9H2@TaO>Sk@t%ac$-z(s6w6bH;&^2sL!^~>b1)ttyG*JF{@&H!4@8?ip9a?H7L$qWY# z%wLL6kkQDmwl+?T+vNV2E2#w)pU~sp@-skbp@o%QBI%Pn26>Kr15o;?%q#ujp_LdX zqU+;Y_@S!~xDVWMMWKB{1lpu5$_%$GrUV3-9n@q9bV*e|C;WbC%7PC3_u1oDV0i